repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 52
3.87M
| func_documentation_string
stringlengths 1
47.2k
| func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|
bitshares/uptick | uptick/witness.py | disapprovewitness | def disapprovewitness(ctx, witnesses, account):
""" Disapprove witness(es)
"""
print_tx(ctx.bitshares.disapprovewitness(witnesses, account=account)) | python | def disapprovewitness(ctx, witnesses, account):
""" Disapprove witness(es)
"""
print_tx(ctx.bitshares.disapprovewitness(witnesses, account=account)) | Disapprove witness(es) | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/witness.py#L37-L40 |
bitshares/uptick | uptick/witness.py | witnesses | def witnesses(ctx):
""" List witnesses and relevant information
"""
t = [
[
"weight",
"account",
"signing_key",
"vote_id",
"url",
"total_missed",
"last_confirmed_block_num",
]
]
for witness in sorted(Witnesses(), key=lambda x: x.weight, reverse=True):
witness.refresh()
t.append(
[
"{:.2f}%".format(witness.weight * 100),
witness.account["name"],
witness["signing_key"],
witness["vote_id"],
witness["url"],
witness["total_missed"],
witness["last_confirmed_block_num"],
]
)
print_table(t) | python | def witnesses(ctx):
""" List witnesses and relevant information
"""
t = [
[
"weight",
"account",
"signing_key",
"vote_id",
"url",
"total_missed",
"last_confirmed_block_num",
]
]
for witness in sorted(Witnesses(), key=lambda x: x.weight, reverse=True):
witness.refresh()
t.append(
[
"{:.2f}%".format(witness.weight * 100),
witness.account["name"],
witness["signing_key"],
witness["vote_id"],
witness["url"],
witness["total_missed"],
witness["last_confirmed_block_num"],
]
)
print_table(t) | List witnesses and relevant information | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/witness.py#L46-L73 |
bitshares/uptick | uptick/vesting.py | vesting | def vesting(ctx, account):
""" List accounts vesting balances
"""
account = Account(account, full=True)
t = [["vesting_id", "claimable"]]
for vest in account["vesting_balances"]:
vesting = Vesting(vest)
t.append([vesting["id"], str(vesting.claimable)])
print_table(t) | python | def vesting(ctx, account):
""" List accounts vesting balances
"""
account = Account(account, full=True)
t = [["vesting_id", "claimable"]]
for vest in account["vesting_balances"]:
vesting = Vesting(vest)
t.append([vesting["id"], str(vesting.claimable)])
print_table(t) | List accounts vesting balances | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/vesting.py#L16-L24 |
bitshares/uptick | uptick/vesting.py | claim | def claim(ctx, vestingid, account, amount):
""" Claim funds from the vesting balance
"""
vesting = Vesting(vestingid)
if amount:
amount = Amount(float(amount), "BTS")
else:
amount = vesting.claimable
print_tx(
ctx.bitshares.vesting_balance_withdraw(
vesting["id"], amount=amount, account=vesting["owner"]
)
) | python | def claim(ctx, vestingid, account, amount):
""" Claim funds from the vesting balance
"""
vesting = Vesting(vestingid)
if amount:
amount = Amount(float(amount), "BTS")
else:
amount = vesting.claimable
print_tx(
ctx.bitshares.vesting_balance_withdraw(
vesting["id"], amount=amount, account=vesting["owner"]
)
) | Claim funds from the vesting balance | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/vesting.py#L34-L46 |
bitshares/uptick | uptick/vesting.py | reserve | def reserve(ctx, amount, symbol, account):
""" Reserve/Burn tokens
"""
print_tx(
ctx.bitshares.reserve(
Amount(amount, symbol, bitshares_instance=ctx.bitshares), account=account
)
) | python | def reserve(ctx, amount, symbol, account):
""" Reserve/Burn tokens
"""
print_tx(
ctx.bitshares.reserve(
Amount(amount, symbol, bitshares_instance=ctx.bitshares), account=account
)
) | Reserve/Burn tokens | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/vesting.py#L56-L63 |
bitshares/uptick | uptick/apis/poloniex.py | run | def run(context, port):
""" Run the Webserver/SocketIO and app
"""
global ctx
ctx = context
app.run(port=port) | python | def run(context, port):
""" Run the Webserver/SocketIO and app
"""
global ctx
ctx = context
app.run(port=port) | Run the Webserver/SocketIO and app | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/apis/poloniex.py#L109-L114 |
bitshares/uptick | uptick/decorators.py | offline | def offline(f):
""" This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares with ``offline=True``.
"""
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
ctx.obj["offline"] = True
ctx.bitshares = BitShares(**ctx.obj)
ctx.blockchain = ctx.bitshares
ctx.bitshares.set_shared_instance()
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f) | python | def offline(f):
""" This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares with ``offline=True``.
"""
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
ctx.obj["offline"] = True
ctx.bitshares = BitShares(**ctx.obj)
ctx.blockchain = ctx.bitshares
ctx.bitshares.set_shared_instance()
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f) | This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares with ``offline=True``. | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/decorators.py#L55-L69 |
bitshares/uptick | uptick/decorators.py | customchain | def customchain(**kwargsChain):
""" This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares. But in contrast to @chain, this is a
decorator that expects parameters that are directed right to
``BitShares()``.
... code-block::python
@main.command()
@click.option("--worker", default=None)
@click.pass_context
@customchain(foo="bar")
@unlock
def list(ctx, worker):
print(ctx.obj)
"""
def wrap(f):
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
newoptions = ctx.obj
newoptions.update(kwargsChain)
ctx.bitshares = BitShares(**newoptions)
ctx.blockchain = ctx.bitshares
set_shared_bitshares_instance(ctx.bitshares)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
return wrap | python | def customchain(**kwargsChain):
""" This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares. But in contrast to @chain, this is a
decorator that expects parameters that are directed right to
``BitShares()``.
... code-block::python
@main.command()
@click.option("--worker", default=None)
@click.pass_context
@customchain(foo="bar")
@unlock
def list(ctx, worker):
print(ctx.obj)
"""
def wrap(f):
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
newoptions = ctx.obj
newoptions.update(kwargsChain)
ctx.bitshares = BitShares(**newoptions)
ctx.blockchain = ctx.bitshares
set_shared_bitshares_instance(ctx.bitshares)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
return wrap | This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares. But in contrast to @chain, this is a
decorator that expects parameters that are directed right to
``BitShares()``.
... code-block::python
@main.command()
@click.option("--worker", default=None)
@click.pass_context
@customchain(foo="bar")
@unlock
def list(ctx, worker):
print(ctx.obj) | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/decorators.py#L72-L103 |
bitshares/uptick | uptick/proposal.py | disapproveproposal | def disapproveproposal(ctx, proposal, account):
""" Disapprove a proposal
"""
print_tx(ctx.bitshares.disapproveproposal(proposal, account=account)) | python | def disapproveproposal(ctx, proposal, account):
""" Disapprove a proposal
"""
print_tx(ctx.bitshares.disapproveproposal(proposal, account=account)) | Disapprove a proposal | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/proposal.py#L21-L24 |
bitshares/uptick | uptick/proposal.py | approveproposal | def approveproposal(ctx, proposal, account):
""" Approve a proposal
"""
print_tx(ctx.bitshares.approveproposal(proposal, account=account)) | python | def approveproposal(ctx, proposal, account):
""" Approve a proposal
"""
print_tx(ctx.bitshares.approveproposal(proposal, account=account)) | Approve a proposal | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/proposal.py#L38-L41 |
bitshares/uptick | uptick/proposal.py | proposals | def proposals(ctx, account):
""" List proposals
"""
proposals = Proposals(account)
t = [
[
"id",
"expiration",
"proposer",
"required approvals",
"available approvals",
"review period time",
"proposal",
]
]
for proposal in proposals:
t.append(
[
proposal["id"],
proposal["expiration_time"],
Account(proposal.proposer)["name"],
[
Account(x)["name"]
for x in (
proposal["required_active_approvals"]
+ proposal["required_owner_approvals"]
)
],
json.dumps(
[Account(x)["name"] for x in proposal["available_active_approvals"]]
+ proposal["available_key_approvals"]
+ proposal["available_owner_approvals"],
indent=1,
),
proposal.get("review_period_time", None),
format_dict(proposal["proposed_transaction"]),
]
)
print_table(t) | python | def proposals(ctx, account):
""" List proposals
"""
proposals = Proposals(account)
t = [
[
"id",
"expiration",
"proposer",
"required approvals",
"available approvals",
"review period time",
"proposal",
]
]
for proposal in proposals:
t.append(
[
proposal["id"],
proposal["expiration_time"],
Account(proposal.proposer)["name"],
[
Account(x)["name"]
for x in (
proposal["required_active_approvals"]
+ proposal["required_owner_approvals"]
)
],
json.dumps(
[Account(x)["name"] for x in proposal["available_active_approvals"]]
+ proposal["available_key_approvals"]
+ proposal["available_owner_approvals"],
indent=1,
),
proposal.get("review_period_time", None),
format_dict(proposal["proposed_transaction"]),
]
)
print_table(t) | List proposals | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/proposal.py#L48-L87 |
bitshares/uptick | uptick/message.py | sign | def sign(ctx, file, account):
""" Sign a message with an account
"""
if not file:
print_message("Prompting for message. Terminate with CTRL-D", "info")
file = click.get_text_stream("stdin")
m = Message(file.read(), bitshares_instance=ctx.bitshares)
print_message(m.sign(account), "info") | python | def sign(ctx, file, account):
""" Sign a message with an account
"""
if not file:
print_message("Prompting for message. Terminate with CTRL-D", "info")
file = click.get_text_stream("stdin")
m = Message(file.read(), bitshares_instance=ctx.bitshares)
print_message(m.sign(account), "info") | Sign a message with an account | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/message.py#L24-L31 |
bitshares/uptick | uptick/message.py | verify | def verify(ctx, file, account):
""" Verify a signed message
"""
if not file:
print_message("Prompting for message. Terminate with CTRL-D", "info")
file = click.get_text_stream("stdin")
m = Message(file.read(), bitshares_instance=ctx.bitshares)
try:
if m.verify():
print_message("Verified", "success")
else:
print_message("not verified", "error")
except InvalidMessageSignature:
print_message("Signature INVALID!", "error") | python | def verify(ctx, file, account):
""" Verify a signed message
"""
if not file:
print_message("Prompting for message. Terminate with CTRL-D", "info")
file = click.get_text_stream("stdin")
m = Message(file.read(), bitshares_instance=ctx.bitshares)
try:
if m.verify():
print_message("Verified", "success")
else:
print_message("not verified", "error")
except InvalidMessageSignature:
print_message("Signature INVALID!", "error") | Verify a signed message | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/message.py#L41-L54 |
bitshares/uptick | uptick/account.py | allow | def allow(ctx, foreign_account, permission, weight, threshold, account):
""" Add a key/account to an account's permission
"""
if not foreign_account:
from bitsharesbase.account import PasswordKey
pwd = click.prompt(
"Password for Key Derivation", hide_input=True, confirmation_prompt=True
)
foreign_account = format(
PasswordKey(account, pwd, permission).get_public(), "BTS"
)
print_tx(
ctx.bitshares.allow(
foreign_account,
weight=weight,
account=account,
permission=permission,
threshold=threshold,
)
) | python | def allow(ctx, foreign_account, permission, weight, threshold, account):
""" Add a key/account to an account's permission
"""
if not foreign_account:
from bitsharesbase.account import PasswordKey
pwd = click.prompt(
"Password for Key Derivation", hide_input=True, confirmation_prompt=True
)
foreign_account = format(
PasswordKey(account, pwd, permission).get_public(), "BTS"
)
print_tx(
ctx.bitshares.allow(
foreign_account,
weight=weight,
account=account,
permission=permission,
threshold=threshold,
)
) | Add a key/account to an account's permission | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L28-L48 |
bitshares/uptick | uptick/account.py | disallow | def disallow(ctx, foreign_account, permission, threshold, account):
""" Remove a key/account from an account's permission
"""
print_tx(
ctx.bitshares.disallow(
foreign_account, account=account, permission=permission, threshold=threshold
)
) | python | def disallow(ctx, foreign_account, permission, threshold, account):
""" Remove a key/account from an account's permission
"""
print_tx(
ctx.bitshares.disallow(
foreign_account, account=account, permission=permission, threshold=threshold
)
) | Remove a key/account from an account's permission | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L66-L73 |
bitshares/uptick | uptick/account.py | history | def history(ctx, account, limit, type, csv, exclude, raw):
""" Show history of an account
"""
from bitsharesbase.operations import getOperationNameForId
t = [["#", "time (block)", "operation", "details"]]
for a in account:
account = Account(a, bitshares_instance=ctx.bitshares)
for b in account.history(limit=limit, only_ops=type, exclude_ops=exclude):
block = BlockHeader(b["block_num"])
row = [
b["id"],
"%s (%s)" % (block.time(), b["block_num"]),
"{} ({})".format(getOperationNameForId(b["op"][0]), b["op"][0]),
pprintOperation(b) if not raw else json.dumps(b, indent=4),
]
t.append(row)
print_table(t) | python | def history(ctx, account, limit, type, csv, exclude, raw):
""" Show history of an account
"""
from bitsharesbase.operations import getOperationNameForId
t = [["#", "time (block)", "operation", "details"]]
for a in account:
account = Account(a, bitshares_instance=ctx.bitshares)
for b in account.history(limit=limit, only_ops=type, exclude_ops=exclude):
block = BlockHeader(b["block_num"])
row = [
b["id"],
"%s (%s)" % (block.time(), b["block_num"]),
"{} ({})".format(getOperationNameForId(b["op"][0]), b["op"][0]),
pprintOperation(b) if not raw else json.dumps(b, indent=4),
]
t.append(row)
print_table(t) | Show history of an account | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L87-L105 |
bitshares/uptick | uptick/account.py | transfer | def transfer(ctx, to, amount, asset, memo, account):
""" Transfer assets
"""
print_tx(ctx.bitshares.transfer(to, amount, asset, memo=memo, account=account)) | python | def transfer(ctx, to, amount, asset, memo, account):
""" Transfer assets
"""
print_tx(ctx.bitshares.transfer(to, amount, asset, memo=memo, account=account)) | Transfer assets | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L119-L122 |
bitshares/uptick | uptick/account.py | balance | def balance(ctx, accounts):
""" Show Account balances
"""
t = [["Account", "Amount"]]
for a in accounts:
account = Account(a, bitshares_instance=ctx.bitshares)
for b in account.balances:
t.append([str(a), str(b)])
print_table(t) | python | def balance(ctx, accounts):
""" Show Account balances
"""
t = [["Account", "Amount"]]
for a in accounts:
account = Account(a, bitshares_instance=ctx.bitshares)
for b in account.balances:
t.append([str(a), str(b)])
print_table(t) | Show Account balances | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L129-L137 |
bitshares/uptick | uptick/account.py | newaccount | def newaccount(ctx, accountname, account, password):
""" Create a new account
"""
print_tx(
ctx.bitshares.create_account(accountname, registrar=account, password=password)
) | python | def newaccount(ctx, accountname, account, password):
""" Create a new account
"""
print_tx(
ctx.bitshares.create_account(accountname, registrar=account, password=password)
) | Create a new account | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L167-L172 |
bitshares/uptick | uptick/account.py | cloneaccount | def cloneaccount(ctx, account_name, account):
""" Clone an account
This copies the owner and active permissions as well as the
options (e.g. votes, memo key)
"""
from bitsharesbase import transactions, operations
account = Account(account)
op = {
"fee": {"amount": 0, "asset_id": "1.3.0"},
"registrar": account["id"],
"referrer": account["id"],
"referrer_percent": 100,
"name": account_name,
"owner": account["owner"],
"active": account["active"],
"options": account["options"],
"extensions": {},
"prefix": ctx.bitshares.rpc.chain_params["prefix"],
}
op = operations.Account_create(**op)
print_tx(ctx.bitshares.finalizeOp(op, account, "active")) | python | def cloneaccount(ctx, account_name, account):
""" Clone an account
This copies the owner and active permissions as well as the
options (e.g. votes, memo key)
"""
from bitsharesbase import transactions, operations
account = Account(account)
op = {
"fee": {"amount": 0, "asset_id": "1.3.0"},
"registrar": account["id"],
"referrer": account["id"],
"referrer_percent": 100,
"name": account_name,
"owner": account["owner"],
"active": account["active"],
"options": account["options"],
"extensions": {},
"prefix": ctx.bitshares.rpc.chain_params["prefix"],
}
op = operations.Account_create(**op)
print_tx(ctx.bitshares.finalizeOp(op, account, "active")) | Clone an account
This copies the owner and active permissions as well as the
options (e.g. votes, memo key) | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L198-L220 |
bitshares/uptick | uptick/account.py | changememokey | def changememokey(ctx, key, account):
""" Change the memo key of an account
"""
print_tx(ctx.bitshares.update_memo_key(key, account=account)) | python | def changememokey(ctx, key, account):
""" Change the memo key of an account
"""
print_tx(ctx.bitshares.update_memo_key(key, account=account)) | Change the memo key of an account | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L234-L237 |
bitshares/uptick | uptick/account.py | whitelist | def whitelist(ctx, whitelist_account, account):
""" Add an account to a whitelist
"""
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.whitelist(whitelist_account)) | python | def whitelist(ctx, whitelist_account, account):
""" Add an account to a whitelist
"""
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.whitelist(whitelist_account)) | Add an account to a whitelist | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L251-L255 |
bitshares/uptick | uptick/account.py | blacklist | def blacklist(ctx, blacklist_account, account):
""" Add an account to a blacklist
"""
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.blacklist(blacklist_account)) | python | def blacklist(ctx, blacklist_account, account):
""" Add an account to a blacklist
"""
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.blacklist(blacklist_account)) | Add an account to a blacklist | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L269-L273 |
bitshares/uptick | uptick/account.py | unlist | def unlist(ctx, unlist_account, account):
""" Remove an account from any list
"""
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.nolist(unlist_account)) | python | def unlist(ctx, unlist_account, account):
""" Remove an account from any list
"""
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.nolist(unlist_account)) | Remove an account from any list | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L287-L291 |
bitshares/uptick | uptick/account.py | setproxy | def setproxy(ctx, proxy_account, account):
""" Set the proxy account for an account
"""
print_tx(ctx.bitshares.set_proxy(proxy_account, account=account)) | python | def setproxy(ctx, proxy_account, account):
""" Set the proxy account for an account
"""
print_tx(ctx.bitshares.set_proxy(proxy_account, account=account)) | Set the proxy account for an account | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/account.py#L305-L308 |
bitshares/uptick | uptick/callorders.py | calls | def calls(ctx, obj, limit):
""" List call/short positions of an account or an asset
"""
if obj.upper() == obj:
# Asset
from bitshares.asset import Asset
asset = Asset(obj, full=True)
calls = asset.get_call_orders(limit)
t = [["acount", "debt", "collateral", "call price", "ratio"]]
for call in calls:
t.append(
[
str(call["account"]["name"]),
str(call["debt"]),
str(call["collateral"]),
str(call["call_price"]),
"%.2f" % (call["ratio"]),
]
)
print_table(t)
else:
# Account
from bitshares.dex import Dex
dex = Dex(bitshares_instance=ctx.bitshares)
calls = dex.list_debt_positions(account=obj)
t = [["debt", "collateral", "call price", "ratio"]]
for symbol in calls:
t.append(
[
str(calls[symbol]["debt"]),
str(calls[symbol]["collateral"]),
str(calls[symbol]["call_price"]),
"%.2f" % (calls[symbol]["ratio"]),
]
)
print_table(t) | python | def calls(ctx, obj, limit):
""" List call/short positions of an account or an asset
"""
if obj.upper() == obj:
# Asset
from bitshares.asset import Asset
asset = Asset(obj, full=True)
calls = asset.get_call_orders(limit)
t = [["acount", "debt", "collateral", "call price", "ratio"]]
for call in calls:
t.append(
[
str(call["account"]["name"]),
str(call["debt"]),
str(call["collateral"]),
str(call["call_price"]),
"%.2f" % (call["ratio"]),
]
)
print_table(t)
else:
# Account
from bitshares.dex import Dex
dex = Dex(bitshares_instance=ctx.bitshares)
calls = dex.list_debt_positions(account=obj)
t = [["debt", "collateral", "call price", "ratio"]]
for symbol in calls:
t.append(
[
str(calls[symbol]["debt"]),
str(calls[symbol]["collateral"]),
str(calls[symbol]["call_price"]),
"%.2f" % (calls[symbol]["ratio"]),
]
)
print_table(t) | List call/short positions of an account or an asset | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/callorders.py#L20-L57 |
bitshares/uptick | uptick/callorders.py | settlements | def settlements(ctx, asset, limit):
""" Show pending settlement orders of a bitasset
"""
from bitshares.asset import Asset
asset = Asset(asset, full=True)
if not asset.is_bitasset:
print_message("{} is not a bitasset.".format(asset["symbol"]), "warning")
sys.exit(1)
calls = asset.get_settle_orders(limit)
t = [["acount", "amount", "date"]]
for call in calls:
t.append([str(call["account"]["name"]), str(call["amount"]), str(call["date"])])
print_table(t) | python | def settlements(ctx, asset, limit):
""" Show pending settlement orders of a bitasset
"""
from bitshares.asset import Asset
asset = Asset(asset, full=True)
if not asset.is_bitasset:
print_message("{} is not a bitasset.".format(asset["symbol"]), "warning")
sys.exit(1)
calls = asset.get_settle_orders(limit)
t = [["acount", "amount", "date"]]
for call in calls:
t.append([str(call["account"]["name"]), str(call["amount"]), str(call["date"])])
print_table(t) | Show pending settlement orders of a bitasset | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/callorders.py#L65-L78 |
bitshares/uptick | uptick/workers.py | approveworker | def approveworker(ctx, workers, account):
""" Approve worker(es)
"""
print_tx(ctx.bitshares.approveworker(workers, account=account)) | python | def approveworker(ctx, workers, account):
""" Approve worker(es)
"""
print_tx(ctx.bitshares.approveworker(workers, account=account)) | Approve worker(es) | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/workers.py#L23-L26 |
bitshares/uptick | uptick/workers.py | disapproveworker | def disapproveworker(ctx, workers, account):
""" Disapprove worker(es)
"""
print_tx(ctx.bitshares.disapproveworker(workers, account=account)) | python | def disapproveworker(ctx, workers, account):
""" Disapprove worker(es)
"""
print_tx(ctx.bitshares.disapproveworker(workers, account=account)) | Disapprove worker(es) | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/workers.py#L40-L43 |
bitshares/uptick | uptick/workers.py | workers | def workers(ctx, account, top):
""" List all workers (of an account)
"""
workers = Workers(account)
t = [["id", "name/url", "daily_pay", "votes", "time", "account"]]
workers_sorted = sorted(
workers, key=lambda x: int(x["total_votes_for"]), reverse=True
)
if top:
workers_sorted = workers_sorted[: top + 1]
for worker in workers_sorted:
if worker["work_end_date"] < datetime.datetime.utcnow():
continue
votes = Amount({"amount": worker["total_votes_for"], "asset_id": "1.3.0"})
amount = Amount({"amount": worker["daily_pay"], "asset_id": "1.3.0"})
t.append(
[
worker["id"],
"{name}\n{url}".format(**worker),
str(amount),
str(votes),
"{work_begin_date:%Y-%m-%d}\n-\n{work_end_date:%Y-%m-%d}".format(
**worker
),
str(Account(worker["worker_account"])["name"]),
]
)
print_table(t) | python | def workers(ctx, account, top):
""" List all workers (of an account)
"""
workers = Workers(account)
t = [["id", "name/url", "daily_pay", "votes", "time", "account"]]
workers_sorted = sorted(
workers, key=lambda x: int(x["total_votes_for"]), reverse=True
)
if top:
workers_sorted = workers_sorted[: top + 1]
for worker in workers_sorted:
if worker["work_end_date"] < datetime.datetime.utcnow():
continue
votes = Amount({"amount": worker["total_votes_for"], "asset_id": "1.3.0"})
amount = Amount({"amount": worker["daily_pay"], "asset_id": "1.3.0"})
t.append(
[
worker["id"],
"{name}\n{url}".format(**worker),
str(amount),
str(votes),
"{work_begin_date:%Y-%m-%d}\n-\n{work_end_date:%Y-%m-%d}".format(
**worker
),
str(Account(worker["worker_account"])["name"]),
]
)
print_table(t) | List all workers (of an account) | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/workers.py#L51-L78 |
bitshares/uptick | uptick/wallet.py | addkey | def addkey(ctx, key):
""" Add a private key to the wallet
"""
if not key:
while True:
key = click.prompt(
"Private Key (wif) [Enter to quit]",
hide_input=True,
show_default=False,
default="exit",
)
if not key or key == "exit":
break
try:
ctx.bitshares.wallet.addPrivateKey(key)
except Exception as e:
click.echo(str(e))
continue
else:
for k in key:
try:
ctx.bitshares.wallet.addPrivateKey(k)
except Exception as e:
click.echo(str(e))
installedKeys = ctx.bitshares.wallet.getPublicKeys()
if len(installedKeys) == 1:
name = ctx.bitshares.wallet.getAccountFromPublicKey(installedKeys[0])
if name: # only if a name to the key was found
account = Account(name, bitshares_instance=ctx.bitshares)
click.echo("=" * 30)
click.echo("Setting new default user: %s" % account["name"])
click.echo()
click.echo("You can change these settings with:")
click.echo(" uptick set default_account <account>")
click.echo("=" * 30)
config["default_account"] = account["name"] | python | def addkey(ctx, key):
""" Add a private key to the wallet
"""
if not key:
while True:
key = click.prompt(
"Private Key (wif) [Enter to quit]",
hide_input=True,
show_default=False,
default="exit",
)
if not key or key == "exit":
break
try:
ctx.bitshares.wallet.addPrivateKey(key)
except Exception as e:
click.echo(str(e))
continue
else:
for k in key:
try:
ctx.bitshares.wallet.addPrivateKey(k)
except Exception as e:
click.echo(str(e))
installedKeys = ctx.bitshares.wallet.getPublicKeys()
if len(installedKeys) == 1:
name = ctx.bitshares.wallet.getAccountFromPublicKey(installedKeys[0])
if name: # only if a name to the key was found
account = Account(name, bitshares_instance=ctx.bitshares)
click.echo("=" * 30)
click.echo("Setting new default user: %s" % account["name"])
click.echo()
click.echo("You can change these settings with:")
click.echo(" uptick set default_account <account>")
click.echo("=" * 30)
config["default_account"] = account["name"] | Add a private key to the wallet | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/wallet.py#L47-L83 |
bitshares/uptick | uptick/wallet.py | delkey | def delkey(ctx, pubkeys):
""" Delete a private key from the wallet
"""
if not pubkeys:
pubkeys = click.prompt("Public Keys").split(" ")
if click.confirm(
"Are you sure you want to delete keys from your wallet?\n"
"This step is IRREVERSIBLE! If you don't have a backup, "
"You may lose access to your account!"
):
for pub in pubkeys:
ctx.bitshares.wallet.removePrivateKeyFromPublicKey(pub) | python | def delkey(ctx, pubkeys):
""" Delete a private key from the wallet
"""
if not pubkeys:
pubkeys = click.prompt("Public Keys").split(" ")
if click.confirm(
"Are you sure you want to delete keys from your wallet?\n"
"This step is IRREVERSIBLE! If you don't have a backup, "
"You may lose access to your account!"
):
for pub in pubkeys:
ctx.bitshares.wallet.removePrivateKeyFromPublicKey(pub) | Delete a private key from the wallet | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/wallet.py#L90-L101 |
bitshares/uptick | uptick/wallet.py | getkey | def getkey(ctx, pubkey):
""" Obtain private key in WIF format
"""
click.echo(ctx.bitshares.wallet.getPrivateKeyForPublicKey(pubkey)) | python | def getkey(ctx, pubkey):
""" Obtain private key in WIF format
"""
click.echo(ctx.bitshares.wallet.getPrivateKeyForPublicKey(pubkey)) | Obtain private key in WIF format | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/wallet.py#L109-L112 |
bitshares/uptick | uptick/wallet.py | listkeys | def listkeys(ctx):
""" List all keys (for all networks)
"""
t = [["Available Key"]]
for key in ctx.bitshares.wallet.getPublicKeys():
t.append([key])
print_table(t) | python | def listkeys(ctx):
""" List all keys (for all networks)
"""
t = [["Available Key"]]
for key in ctx.bitshares.wallet.getPublicKeys():
t.append([key])
print_table(t) | List all keys (for all networks) | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/wallet.py#L118-L124 |
bitshares/uptick | uptick/wallet.py | listaccounts | def listaccounts(ctx):
""" List accounts (for the connected network)
"""
t = [["Name", "Key", "Owner", "Active", "Memo"]]
for key in tqdm(ctx.bitshares.wallet.getPublicKeys(True)):
for account in ctx.bitshares.wallet.getAccountsFromPublicKey(key):
account = Account(account)
is_owner = key in [x[0] for x in account["owner"]["key_auths"]]
is_active = key in [x[0] for x in account["active"]["key_auths"]]
is_memo = key == account["options"]["memo_key"]
t.append([
account["name"],
key,
"x" if is_owner else "",
"x" if is_active else "",
"x" if is_memo else "",
])
print_table(t) | python | def listaccounts(ctx):
""" List accounts (for the connected network)
"""
t = [["Name", "Key", "Owner", "Active", "Memo"]]
for key in tqdm(ctx.bitshares.wallet.getPublicKeys(True)):
for account in ctx.bitshares.wallet.getAccountsFromPublicKey(key):
account = Account(account)
is_owner = key in [x[0] for x in account["owner"]["key_auths"]]
is_active = key in [x[0] for x in account["active"]["key_auths"]]
is_memo = key == account["options"]["memo_key"]
t.append([
account["name"],
key,
"x" if is_owner else "",
"x" if is_active else "",
"x" if is_memo else "",
])
print_table(t) | List accounts (for the connected network) | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/wallet.py#L130-L147 |
bitshares/uptick | uptick/wallet.py | importaccount | def importaccount(ctx, account, role):
""" Import an account using an account password
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Account Passphrase", hide_input=True)
account = Account(account, bitshares_instance=ctx.bitshares)
imported = False
if role == "owner":
owner_key = PasswordKey(account["name"], password, role="owner")
owner_pubkey = format(
owner_key.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]
)
if owner_pubkey in [x[0] for x in account["owner"]["key_auths"]]:
print_message("Importing owner key!")
owner_privkey = owner_key.get_private_key()
ctx.bitshares.wallet.addPrivateKey(owner_privkey)
imported = True
if role == "active":
active_key = PasswordKey(account["name"], password, role="active")
active_pubkey = format(
active_key.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]
)
if active_pubkey in [x[0] for x in account["active"]["key_auths"]]:
print_message("Importing active key!")
active_privkey = active_key.get_private_key()
ctx.bitshares.wallet.addPrivateKey(active_privkey)
imported = True
if role == "memo":
memo_key = PasswordKey(account["name"], password, role=role)
memo_pubkey = format(
memo_key.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]
)
if memo_pubkey == account["memo_key"]:
print_message("Importing memo key!")
memo_privkey = memo_key.get_private_key()
ctx.bitshares.wallet.addPrivateKey(memo_privkey)
imported = True
if not imported:
print_message("No matching key(s) found. Password correct?", "error") | python | def importaccount(ctx, account, role):
""" Import an account using an account password
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Account Passphrase", hide_input=True)
account = Account(account, bitshares_instance=ctx.bitshares)
imported = False
if role == "owner":
owner_key = PasswordKey(account["name"], password, role="owner")
owner_pubkey = format(
owner_key.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]
)
if owner_pubkey in [x[0] for x in account["owner"]["key_auths"]]:
print_message("Importing owner key!")
owner_privkey = owner_key.get_private_key()
ctx.bitshares.wallet.addPrivateKey(owner_privkey)
imported = True
if role == "active":
active_key = PasswordKey(account["name"], password, role="active")
active_pubkey = format(
active_key.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]
)
if active_pubkey in [x[0] for x in account["active"]["key_auths"]]:
print_message("Importing active key!")
active_privkey = active_key.get_private_key()
ctx.bitshares.wallet.addPrivateKey(active_privkey)
imported = True
if role == "memo":
memo_key = PasswordKey(account["name"], password, role=role)
memo_pubkey = format(
memo_key.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]
)
if memo_pubkey == account["memo_key"]:
print_message("Importing memo key!")
memo_privkey = memo_key.get_private_key()
ctx.bitshares.wallet.addPrivateKey(memo_privkey)
imported = True
if not imported:
print_message("No matching key(s) found. Password correct?", "error") | Import an account using an account password | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/wallet.py#L158-L201 |
bitshares/uptick | uptick/api.py | create | def create(ctx):
""" Create default config file
"""
import shutil
this_dir, this_filename = os.path.split(__file__)
default_config_file = os.path.join(this_dir, "apis/example-config.yaml")
config_file = ctx.obj["configfile"]
shutil.copyfile(default_config_file, config_file)
print_message("Config file created: {}".format(config_file)) | python | def create(ctx):
""" Create default config file
"""
import shutil
this_dir, this_filename = os.path.split(__file__)
default_config_file = os.path.join(this_dir, "apis/example-config.yaml")
config_file = ctx.obj["configfile"]
shutil.copyfile(default_config_file, config_file)
print_message("Config file created: {}".format(config_file)) | Create default config file | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/api.py#L28-L37 |
bitshares/uptick | uptick/api.py | start | def start(ctx):
""" Start the API according to the config file
"""
module = ctx.config.get("api", "poloniex")
# unlockWallet
if module == "poloniex":
from .apis import poloniex
poloniex.run(ctx, port=5000)
else:
print_message("Unkown 'api'!", "error") | python | def start(ctx):
""" Start the API according to the config file
"""
module = ctx.config.get("api", "poloniex")
# unlockWallet
if module == "poloniex":
from .apis import poloniex
poloniex.run(ctx, port=5000)
else:
print_message("Unkown 'api'!", "error") | Start the API according to the config file | https://github.com/bitshares/uptick/blob/66c102200fdbf96cef4fd55cc69d00e690f62001/uptick/api.py#L43-L53 |
rmax/scrapydo | scrapydo/api.py | fetch | def fetch(url, **kwargs):
"""Fetches an URL and returns the response.
Parameters
----------
url : str
An URL to crawl.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out : Response or None
Returns a ``Response`` instance if the crawler is able to retrieve a
response, otherwise it returns ``None``.
Raises
------
crochet.TimeoutError
"""
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT)
kwargs['return_crawler'] = True
crawler = wait_for(timeout, _fetch_in_reactor, url, **kwargs)
if hasattr(crawler.spider, 'response'):
return crawler.spider.response | python | def fetch(url, **kwargs):
"""Fetches an URL and returns the response.
Parameters
----------
url : str
An URL to crawl.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out : Response or None
Returns a ``Response`` instance if the crawler is able to retrieve a
response, otherwise it returns ``None``.
Raises
------
crochet.TimeoutError
"""
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT)
kwargs['return_crawler'] = True
crawler = wait_for(timeout, _fetch_in_reactor, url, **kwargs)
if hasattr(crawler.spider, 'response'):
return crawler.spider.response | Fetches an URL and returns the response.
Parameters
----------
url : str
An URL to crawl.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out : Response or None
Returns a ``Response`` instance if the crawler is able to retrieve a
response, otherwise it returns ``None``.
Raises
------
crochet.TimeoutError | https://github.com/rmax/scrapydo/blob/b0f9e6d50a5ea9d2ba8335bffa877003109c3af5/scrapydo/api.py#L25-L59 |
rmax/scrapydo | scrapydo/api.py | crawl | def crawl(url, callback, **kwargs):
"""Crawls an URL with given callback.
Parameters
----------
url : str
An URL to crawl.
callback : callable
A function to be used as spider callback for the given URL.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out
By default, the scraped items. If ``return_crawler`` is ``True``,
returns the crawler instance.
Raises
------
crochet.TimeoutError
"""
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT)
return wait_for(timeout, _crawl_in_reactor, url, callback, **kwargs) | python | def crawl(url, callback, **kwargs):
"""Crawls an URL with given callback.
Parameters
----------
url : str
An URL to crawl.
callback : callable
A function to be used as spider callback for the given URL.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out
By default, the scraped items. If ``return_crawler`` is ``True``,
returns the crawler instance.
Raises
------
crochet.TimeoutError
"""
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT)
return wait_for(timeout, _crawl_in_reactor, url, callback, **kwargs) | Crawls an URL with given callback.
Parameters
----------
url : str
An URL to crawl.
callback : callable
A function to be used as spider callback for the given URL.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out
By default, the scraped items. If ``return_crawler`` is ``True``,
returns the crawler instance.
Raises
------
crochet.TimeoutError | https://github.com/rmax/scrapydo/blob/b0f9e6d50a5ea9d2ba8335bffa877003109c3af5/scrapydo/api.py#L62-L95 |
rmax/scrapydo | scrapydo/api.py | run_spider | def run_spider(spider_cls, **kwargs):
"""Runs a spider and returns the scraped items (by default).
Parameters
----------
spider_cls : scrapy.Spider
A spider class to run.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out : list or scrapy.crawler.Crawler instance
The scraped items by default or the crawler instance if
``return_crawler`` is ``True``.
Raises
------
crochet.TimeoutError
"""
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT)
return wait_for(timeout, _run_spider_in_reactor, spider_cls, **kwargs) | python | def run_spider(spider_cls, **kwargs):
"""Runs a spider and returns the scraped items (by default).
Parameters
----------
spider_cls : scrapy.Spider
A spider class to run.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out : list or scrapy.crawler.Crawler instance
The scraped items by default or the crawler instance if
``return_crawler`` is ``True``.
Raises
------
crochet.TimeoutError
"""
timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT)
return wait_for(timeout, _run_spider_in_reactor, spider_cls, **kwargs) | Runs a spider and returns the scraped items (by default).
Parameters
----------
spider_cls : scrapy.Spider
A spider class to run.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
timeout : int, (default: DEFAULT_TIMEOUT)
Result wait timeout.
Returns
-------
out : list or scrapy.crawler.Crawler instance
The scraped items by default or the crawler instance if
``return_crawler`` is ``True``.
Raises
------
crochet.TimeoutError | https://github.com/rmax/scrapydo/blob/b0f9e6d50a5ea9d2ba8335bffa877003109c3af5/scrapydo/api.py#L98-L127 |
rmax/scrapydo | scrapydo/api.py | _fetch_in_reactor | def _fetch_in_reactor(url, spider_cls=DefaultSpider, **kwargs):
"""Fetches an URL and returns the response.
Parameters
----------
url : str
An URL to fetch.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler.
kwargs : dict, optional
Additional arguments to be passed to ``_run_spider_in_reactor``.
Returns
-------
crochet.EventualResult
"""
def parse(self, response):
self.response = response
req = Request(url) if isinstance(url, six.string_types) else url
req.dont_filter = True
req.meta['handle_httpstatus_all'] = True
spider_cls = override_start_requests(spider_cls, [req], parse=parse)
return _run_spider_in_reactor(spider_cls, **kwargs) | python | def _fetch_in_reactor(url, spider_cls=DefaultSpider, **kwargs):
"""Fetches an URL and returns the response.
Parameters
----------
url : str
An URL to fetch.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler.
kwargs : dict, optional
Additional arguments to be passed to ``_run_spider_in_reactor``.
Returns
-------
crochet.EventualResult
"""
def parse(self, response):
self.response = response
req = Request(url) if isinstance(url, six.string_types) else url
req.dont_filter = True
req.meta['handle_httpstatus_all'] = True
spider_cls = override_start_requests(spider_cls, [req], parse=parse)
return _run_spider_in_reactor(spider_cls, **kwargs) | Fetches an URL and returns the response.
Parameters
----------
url : str
An URL to fetch.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler.
kwargs : dict, optional
Additional arguments to be passed to ``_run_spider_in_reactor``.
Returns
-------
crochet.EventualResult | https://github.com/rmax/scrapydo/blob/b0f9e6d50a5ea9d2ba8335bffa877003109c3af5/scrapydo/api.py#L130-L153 |
rmax/scrapydo | scrapydo/api.py | _crawl_in_reactor | def _crawl_in_reactor(url, callback, spider_cls=DefaultSpider, **kwargs):
"""Crawls given URL with given callback.
Parameters
----------
url : str
The URL to crawl.
callback : callable
Function to be used as callback for the request.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
kwargs : dict, optional
Extra arguments to be passed to ``_run_spider_in_reactor``.
Returns
-------
crochet.EventualResult
"""
spider_cls = override_start_requests(spider_cls, [url], callback)
return _run_spider_in_reactor(spider_cls, **kwargs) | python | def _crawl_in_reactor(url, callback, spider_cls=DefaultSpider, **kwargs):
"""Crawls given URL with given callback.
Parameters
----------
url : str
The URL to crawl.
callback : callable
Function to be used as callback for the request.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
kwargs : dict, optional
Extra arguments to be passed to ``_run_spider_in_reactor``.
Returns
-------
crochet.EventualResult
"""
spider_cls = override_start_requests(spider_cls, [url], callback)
return _run_spider_in_reactor(spider_cls, **kwargs) | Crawls given URL with given callback.
Parameters
----------
url : str
The URL to crawl.
callback : callable
Function to be used as callback for the request.
spider_cls : scrapy.Spider (default: DefaultSpider)
A spider class to be used in the crawler instance.
kwargs : dict, optional
Extra arguments to be passed to ``_run_spider_in_reactor``.
Returns
-------
crochet.EventualResult | https://github.com/rmax/scrapydo/blob/b0f9e6d50a5ea9d2ba8335bffa877003109c3af5/scrapydo/api.py#L156-L176 |
rmax/scrapydo | scrapydo/api.py | _run_spider_in_reactor | def _run_spider_in_reactor(spider_cls, capture_items=True, return_crawler=False,
settings=None, **kwargs):
"""Runs given spider inside the twisted reactdor.
Parameters
----------
spider_cls : scrapy.Spider
Spider to run.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
Returns
-------
out : crochet.EventualResult
If ``capture_items`` is ``True``, returns scraped items. If
``return_crawler`` is ``True``, returns the crawler instance.
"""
settings = settings or {}
crawler_settings = get_project_settings().copy()
crawler_settings.setdict(default_settings)
crawler_settings.setdict(settings)
log_scrapy_info(crawler_settings)
crawler = Crawler(spider_cls, crawler_settings)
d = crawler.crawl(**kwargs)
if capture_items:
crawler.items = _OutputItems()
crawler.signals.connect(crawler.items.append, signal=signals.item_scraped)
d.addCallback(lambda _: crawler.items)
if return_crawler:
d.addCallback(lambda _: crawler)
return d | python | def _run_spider_in_reactor(spider_cls, capture_items=True, return_crawler=False,
settings=None, **kwargs):
"""Runs given spider inside the twisted reactdor.
Parameters
----------
spider_cls : scrapy.Spider
Spider to run.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
Returns
-------
out : crochet.EventualResult
If ``capture_items`` is ``True``, returns scraped items. If
``return_crawler`` is ``True``, returns the crawler instance.
"""
settings = settings or {}
crawler_settings = get_project_settings().copy()
crawler_settings.setdict(default_settings)
crawler_settings.setdict(settings)
log_scrapy_info(crawler_settings)
crawler = Crawler(spider_cls, crawler_settings)
d = crawler.crawl(**kwargs)
if capture_items:
crawler.items = _OutputItems()
crawler.signals.connect(crawler.items.append, signal=signals.item_scraped)
d.addCallback(lambda _: crawler.items)
if return_crawler:
d.addCallback(lambda _: crawler)
return d | Runs given spider inside the twisted reactdor.
Parameters
----------
spider_cls : scrapy.Spider
Spider to run.
capture_items : bool (default: True)
If enabled, the scraped items are captured and returned.
return_crawler : bool (default: False)
If enabled, the crawler instance is returned. If ``capture_items`` is
enabled, the scraped items is collected in ``crawler.items``.
settings : dict, optional
Custom crawler settings.
Returns
-------
out : crochet.EventualResult
If ``capture_items`` is ``True``, returns scraped items. If
``return_crawler`` is ``True``, returns the crawler instance. | https://github.com/rmax/scrapydo/blob/b0f9e6d50a5ea9d2ba8335bffa877003109c3af5/scrapydo/api.py#L180-L216 |
rmax/scrapydo | scrapydo/api.py | override_start_requests | def override_start_requests(spider_cls, start_urls, callback=None, **attrs):
"""Returns a new spider class overriding the ``start_requests``.
This function is useful to replace the start requests of an existing spider
class on runtime.
Parameters
----------
spider_cls : scrapy.Spider
Spider class to be used as base class.
start_urls : iterable
Iterable of URLs or ``Request`` objects.
callback : callable, optional
Callback for the start URLs.
attrs : dict, optional
Additional class attributes.
Returns
-------
out : class
A subclass of ``spider_cls`` with overrided ``start_requests`` method.
"""
def start_requests():
for url in start_urls:
req = Request(url, dont_filter=True) if isinstance(url, six.string_types) else url
if callback is not None:
req.callback = callback
yield req
attrs['start_requests'] = staticmethod(start_requests)
return type(spider_cls.__name__, (spider_cls, ), attrs) | python | def override_start_requests(spider_cls, start_urls, callback=None, **attrs):
"""Returns a new spider class overriding the ``start_requests``.
This function is useful to replace the start requests of an existing spider
class on runtime.
Parameters
----------
spider_cls : scrapy.Spider
Spider class to be used as base class.
start_urls : iterable
Iterable of URLs or ``Request`` objects.
callback : callable, optional
Callback for the start URLs.
attrs : dict, optional
Additional class attributes.
Returns
-------
out : class
A subclass of ``spider_cls`` with overrided ``start_requests`` method.
"""
def start_requests():
for url in start_urls:
req = Request(url, dont_filter=True) if isinstance(url, six.string_types) else url
if callback is not None:
req.callback = callback
yield req
attrs['start_requests'] = staticmethod(start_requests)
return type(spider_cls.__name__, (spider_cls, ), attrs) | Returns a new spider class overriding the ``start_requests``.
This function is useful to replace the start requests of an existing spider
class on runtime.
Parameters
----------
spider_cls : scrapy.Spider
Spider class to be used as base class.
start_urls : iterable
Iterable of URLs or ``Request`` objects.
callback : callable, optional
Callback for the start URLs.
attrs : dict, optional
Additional class attributes.
Returns
-------
out : class
A subclass of ``spider_cls`` with overrided ``start_requests`` method. | https://github.com/rmax/scrapydo/blob/b0f9e6d50a5ea9d2ba8335bffa877003109c3af5/scrapydo/api.py#L225-L255 |
rmax/scrapydo | scrapydo/api.py | wait_for | def wait_for(timeout, func, *args, **kwargs):
"""Waits for a eventual result.
Parameters
----------
timeout : int
How much time to wait, in seconds.
func : callable
A function that returns ``crochet.EventualResult``.
args : tuple, optional
Arguments for ``func``.
kwargs : dict, optional
Keyword arguments for ``func``.
Returns
-------
out
Given ``func`` result.
Raises
------
corchet.TimeoutError
"""
result = func(*args, **kwargs)
try:
return result.wait(timeout)
except crochet.TimeoutError:
result.cancel()
raise | python | def wait_for(timeout, func, *args, **kwargs):
"""Waits for a eventual result.
Parameters
----------
timeout : int
How much time to wait, in seconds.
func : callable
A function that returns ``crochet.EventualResult``.
args : tuple, optional
Arguments for ``func``.
kwargs : dict, optional
Keyword arguments for ``func``.
Returns
-------
out
Given ``func`` result.
Raises
------
corchet.TimeoutError
"""
result = func(*args, **kwargs)
try:
return result.wait(timeout)
except crochet.TimeoutError:
result.cancel()
raise | Waits for a eventual result.
Parameters
----------
timeout : int
How much time to wait, in seconds.
func : callable
A function that returns ``crochet.EventualResult``.
args : tuple, optional
Arguments for ``func``.
kwargs : dict, optional
Keyword arguments for ``func``.
Returns
-------
out
Given ``func`` result.
Raises
------
corchet.TimeoutError | https://github.com/rmax/scrapydo/blob/b0f9e6d50a5ea9d2ba8335bffa877003109c3af5/scrapydo/api.py#L258-L287 |
rmax/scrapydo | scrapydo/utils.py | highlight | def highlight(code, lexer='html', formatter='html', output_wrapper=None):
"""Highlights given code using pygments."""
if not pygments:
raise TypeError("pygments module required")
if not isinstance(code, six.string_types):
code = pprint.pformat(code)
if isinstance(lexer, six.string_types):
lexer = pygments.lexers.get_lexer_by_name(lexer)
if isinstance(formatter, six.string_types):
formatter = pygments.formatters.get_formatter_by_name(formatter)
if formatter.name.lower() == 'html':
formatter.full = True
formatter.cssclass = "pygments-%s" % uuid.uuid4()
if output_wrapper is None:
output_wrapper = HTML
return output_wrapper(pygments.highlight(code, lexer, formatter)) | python | def highlight(code, lexer='html', formatter='html', output_wrapper=None):
"""Highlights given code using pygments."""
if not pygments:
raise TypeError("pygments module required")
if not isinstance(code, six.string_types):
code = pprint.pformat(code)
if isinstance(lexer, six.string_types):
lexer = pygments.lexers.get_lexer_by_name(lexer)
if isinstance(formatter, six.string_types):
formatter = pygments.formatters.get_formatter_by_name(formatter)
if formatter.name.lower() == 'html':
formatter.full = True
formatter.cssclass = "pygments-%s" % uuid.uuid4()
if output_wrapper is None:
output_wrapper = HTML
return output_wrapper(pygments.highlight(code, lexer, formatter)) | Highlights given code using pygments. | https://github.com/rmax/scrapydo/blob/b0f9e6d50a5ea9d2ba8335bffa877003109c3af5/scrapydo/utils.py#L16-L31 |
williballenthin/ida-netnode | netnode/netnode.py | Netnode._get_next_slot | def _get_next_slot(self, tag):
'''
get the first unused supval table key, or 0 if the
table is empty.
useful for filling the supval table sequentially.
'''
slot = self._n.suplast(tag)
if slot is None or slot == idaapi.BADNODE:
return 0
else:
return slot + 1 | python | def _get_next_slot(self, tag):
'''
get the first unused supval table key, or 0 if the
table is empty.
useful for filling the supval table sequentially.
'''
slot = self._n.suplast(tag)
if slot is None or slot == idaapi.BADNODE:
return 0
else:
return slot + 1 | get the first unused supval table key, or 0 if the
table is empty.
useful for filling the supval table sequentially. | https://github.com/williballenthin/ida-netnode/blob/6ca60ceddaa2e3283207217b832accfcf8dd01dc/netnode/netnode.py#L104-L114 |
mattboyer/sqbrite | version.py | GitRunner.run_git | def run_git(self, args, git_env=None):
'''
Runs the git executable with the arguments given and returns a list of
lines produced on its standard output.
'''
popen_kwargs = {
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE,
}
if git_env:
popen_kwargs['env'] = git_env
if self._git_toplevel:
popen_kwargs['cwd'] = self._git_toplevel
git_process = subprocess.Popen(
[GitRunner._git_executable] + args,
**popen_kwargs
)
try:
out, err = git_process.communicate()
git_process.wait()
except Exception as e:
raise GitError("Couldn't run 'git {args}':{newline}{ex}".format(
args=' '.join(args),
newline=os.linesep,
ex=str(e)
))
if (0 != git_process.returncode) or err:
if err:
err = err.decode('utf_8')
raise GitError("'git {args}' failed with:{newline}{err}".format(
args=' '.join(args),
newline=os.linesep,
err=err
))
if not out:
raise ValueError("No output")
return out.decode('utf_8').splitlines() | python | def run_git(self, args, git_env=None):
'''
Runs the git executable with the arguments given and returns a list of
lines produced on its standard output.
'''
popen_kwargs = {
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE,
}
if git_env:
popen_kwargs['env'] = git_env
if self._git_toplevel:
popen_kwargs['cwd'] = self._git_toplevel
git_process = subprocess.Popen(
[GitRunner._git_executable] + args,
**popen_kwargs
)
try:
out, err = git_process.communicate()
git_process.wait()
except Exception as e:
raise GitError("Couldn't run 'git {args}':{newline}{ex}".format(
args=' '.join(args),
newline=os.linesep,
ex=str(e)
))
if (0 != git_process.returncode) or err:
if err:
err = err.decode('utf_8')
raise GitError("'git {args}' failed with:{newline}{err}".format(
args=' '.join(args),
newline=os.linesep,
err=err
))
if not out:
raise ValueError("No output")
return out.decode('utf_8').splitlines() | Runs the git executable with the arguments given and returns a list of
lines produced on its standard output. | https://github.com/mattboyer/sqbrite/blob/22d8049f8c03e52ed5232726f883517813fe7e8c/version.py#L131-L175 |
mozilla/build-mar | src/mardor/writer.py | add_signature_block | def add_signature_block(src_fileobj, dest_fileobj, signing_algorithm, signature=None):
"""Add a signature block to marfile, a MarReader object.
Productversion and channel are preserved, but any existing signatures are overwritten.
Args:
src_fileobj (file object): The input MAR file to add a signature to
dest_fileobj (file object): File object to write new MAR file to. Must be open in w+b mode.
signing_algorithm (str): One of 'sha1', or 'sha384'
signature (bytes): Signature to write, or None to use a dummy signature
"""
algo_id = {'sha1': 1, 'sha384': 2}[signing_algorithm]
if not signature:
signature = make_dummy_signature(algo_id)
src_fileobj.seek(0)
mardata = mar.parse_stream(src_fileobj)
# Header
header = mardata.header
dest_fileobj.write(mar_header.build(header))
# Signature block
sig = dict(algorithm_id=algo_id,
size=len(signature),
signature=signature,
)
# This will be fixed up later
filesize = 0
sigs_offset = dest_fileobj.tell()
sigs = sigs_header.build(dict(
filesize=filesize,
count=1,
sigs=[sig],
))
dest_fileobj.write(sigs)
# Write the additional section
dest_fileobj.write(extras_header.build(mardata.additional))
# Write the data
data_offset = dest_fileobj.tell()
src_fileobj.seek(mardata.data_offset)
write_to_file(takeexactly(src_fileobj, mardata.data_length), dest_fileobj)
# Write the index
index_offset = dest_fileobj.tell()
index = mardata.index
# Adjust the offsets
data_offset_delta = data_offset - mardata.data_offset
for e in index.entries:
e.offset += data_offset_delta
dest_fileobj.write(index_header.build(index))
filesize = dest_fileobj.tell()
# Go back and update the index offset and filesize
dest_fileobj.seek(0)
header.index_offset = index_offset
dest_fileobj.write(mar_header.build(header))
dest_fileobj.seek(sigs_offset)
sigs = sigs_header.build(dict(
filesize=filesize,
count=1,
sigs=[sig],
))
dest_fileobj.write(sigs) | python | def add_signature_block(src_fileobj, dest_fileobj, signing_algorithm, signature=None):
"""Add a signature block to marfile, a MarReader object.
Productversion and channel are preserved, but any existing signatures are overwritten.
Args:
src_fileobj (file object): The input MAR file to add a signature to
dest_fileobj (file object): File object to write new MAR file to. Must be open in w+b mode.
signing_algorithm (str): One of 'sha1', or 'sha384'
signature (bytes): Signature to write, or None to use a dummy signature
"""
algo_id = {'sha1': 1, 'sha384': 2}[signing_algorithm]
if not signature:
signature = make_dummy_signature(algo_id)
src_fileobj.seek(0)
mardata = mar.parse_stream(src_fileobj)
# Header
header = mardata.header
dest_fileobj.write(mar_header.build(header))
# Signature block
sig = dict(algorithm_id=algo_id,
size=len(signature),
signature=signature,
)
# This will be fixed up later
filesize = 0
sigs_offset = dest_fileobj.tell()
sigs = sigs_header.build(dict(
filesize=filesize,
count=1,
sigs=[sig],
))
dest_fileobj.write(sigs)
# Write the additional section
dest_fileobj.write(extras_header.build(mardata.additional))
# Write the data
data_offset = dest_fileobj.tell()
src_fileobj.seek(mardata.data_offset)
write_to_file(takeexactly(src_fileobj, mardata.data_length), dest_fileobj)
# Write the index
index_offset = dest_fileobj.tell()
index = mardata.index
# Adjust the offsets
data_offset_delta = data_offset - mardata.data_offset
for e in index.entries:
e.offset += data_offset_delta
dest_fileobj.write(index_header.build(index))
filesize = dest_fileobj.tell()
# Go back and update the index offset and filesize
dest_fileobj.seek(0)
header.index_offset = index_offset
dest_fileobj.write(mar_header.build(header))
dest_fileobj.seek(sigs_offset)
sigs = sigs_header.build(dict(
filesize=filesize,
count=1,
sigs=[sig],
))
dest_fileobj.write(sigs) | Add a signature block to marfile, a MarReader object.
Productversion and channel are preserved, but any existing signatures are overwritten.
Args:
src_fileobj (file object): The input MAR file to add a signature to
dest_fileobj (file object): File object to write new MAR file to. Must be open in w+b mode.
signing_algorithm (str): One of 'sha1', or 'sha384'
signature (bytes): Signature to write, or None to use a dummy signature | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L328-L399 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.add | def add(self, path, compress=None):
"""Add `path` to the MAR file.
If `path` is a file, it will be added directly.
If `path` is a directory, it will be traversed recursively and all
files inside will be added.
Args:
path (str): path to file or directory on disk to add to this MAR
file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
"""
if os.path.isdir(path):
self.add_dir(path, compress)
else:
self.add_file(path, compress) | python | def add(self, path, compress=None):
"""Add `path` to the MAR file.
If `path` is a file, it will be added directly.
If `path` is a directory, it will be traversed recursively and all
files inside will be added.
Args:
path (str): path to file or directory on disk to add to this MAR
file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
"""
if os.path.isdir(path):
self.add_dir(path, compress)
else:
self.add_file(path, compress) | Add `path` to the MAR file.
If `path` is a file, it will be added directly.
If `path` is a directory, it will be traversed recursively and all
files inside will be added.
Args:
path (str): path to file or directory on disk to add to this MAR
file
compress (str): One of 'xz', 'bz2', or None. Defaults to None. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L117-L132 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.add_dir | def add_dir(self, path, compress):
"""Add all files under directory `path` to the MAR file.
Args:
path (str): path to directory to add to this MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
"""
if not os.path.isdir(path):
raise ValueError('{} is not a directory'.format(path))
for root, dirs, files in os.walk(path):
for f in files:
self.add_file(os.path.join(root, f), compress) | python | def add_dir(self, path, compress):
"""Add all files under directory `path` to the MAR file.
Args:
path (str): path to directory to add to this MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
"""
if not os.path.isdir(path):
raise ValueError('{} is not a directory'.format(path))
for root, dirs, files in os.walk(path):
for f in files:
self.add_file(os.path.join(root, f), compress) | Add all files under directory `path` to the MAR file.
Args:
path (str): path to directory to add to this MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L134-L145 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.add_fileobj | def add_fileobj(self, fileobj, path, compress, flags=None):
"""Add the contents of a file object to the MAR file.
Args:
fileobj (file-like object): open file object
path (str): name of this file in the MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
flags (int): permission of this file in the MAR file. Defaults to the permissions of `path`
"""
f = file_iter(fileobj)
flags = flags or os.stat(path) & 0o777
return self.add_stream(f, path, compress, flags) | python | def add_fileobj(self, fileobj, path, compress, flags=None):
"""Add the contents of a file object to the MAR file.
Args:
fileobj (file-like object): open file object
path (str): name of this file in the MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
flags (int): permission of this file in the MAR file. Defaults to the permissions of `path`
"""
f = file_iter(fileobj)
flags = flags or os.stat(path) & 0o777
return self.add_stream(f, path, compress, flags) | Add the contents of a file object to the MAR file.
Args:
fileobj (file-like object): open file object
path (str): name of this file in the MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
flags (int): permission of this file in the MAR file. Defaults to the permissions of `path` | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L147-L158 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.add_stream | def add_stream(self, stream, path, compress, flags):
"""Add the contents of an iterable to the MAR file.
Args:
stream (iterable): yields blocks of data
path (str): name of this file in the MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
flags (int): permission of this file in the MAR file
"""
self.data_fileobj.seek(self.last_offset)
if compress == 'bz2':
stream = bz2_compress_stream(stream)
elif compress == 'xz':
stream = xz_compress_stream(stream)
elif compress is None:
pass
else:
raise ValueError('Unsupported compression type: {}'.format(compress))
size = write_to_file(stream, self.data_fileobj)
# On Windows, convert \ to /
# very difficult to mock this out for coverage on linux
if os.sep == '\\': # pragma: no cover
path = path.replace('\\', '/')
e = dict(
name=six.u(path),
offset=self.last_offset,
size=size,
flags=flags,
)
self.entries.append(e)
self.last_offset += e['size'] | python | def add_stream(self, stream, path, compress, flags):
"""Add the contents of an iterable to the MAR file.
Args:
stream (iterable): yields blocks of data
path (str): name of this file in the MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
flags (int): permission of this file in the MAR file
"""
self.data_fileobj.seek(self.last_offset)
if compress == 'bz2':
stream = bz2_compress_stream(stream)
elif compress == 'xz':
stream = xz_compress_stream(stream)
elif compress is None:
pass
else:
raise ValueError('Unsupported compression type: {}'.format(compress))
size = write_to_file(stream, self.data_fileobj)
# On Windows, convert \ to /
# very difficult to mock this out for coverage on linux
if os.sep == '\\': # pragma: no cover
path = path.replace('\\', '/')
e = dict(
name=six.u(path),
offset=self.last_offset,
size=size,
flags=flags,
)
self.entries.append(e)
self.last_offset += e['size'] | Add the contents of an iterable to the MAR file.
Args:
stream (iterable): yields blocks of data
path (str): name of this file in the MAR file
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
flags (int): permission of this file in the MAR file | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L160-L194 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.add_file | def add_file(self, path, compress):
"""Add a single file to the MAR file.
Args:
path (str): path to a file to add to this MAR file.
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
"""
if not os.path.isfile(path):
raise ValueError('{} is not a file'.format(path))
self.fileobj.seek(self.last_offset)
with open(path, 'rb') as f:
flags = os.stat(path).st_mode & 0o777
self.add_fileobj(f, path, compress, flags) | python | def add_file(self, path, compress):
"""Add a single file to the MAR file.
Args:
path (str): path to a file to add to this MAR file.
compress (str): One of 'xz', 'bz2', or None. Defaults to None.
"""
if not os.path.isfile(path):
raise ValueError('{} is not a file'.format(path))
self.fileobj.seek(self.last_offset)
with open(path, 'rb') as f:
flags = os.stat(path).st_mode & 0o777
self.add_fileobj(f, path, compress, flags) | Add a single file to the MAR file.
Args:
path (str): path to a file to add to this MAR file.
compress (str): One of 'xz', 'bz2', or None. Defaults to None. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L196-L209 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.write_header | def write_header(self):
"""Write the MAR header to the file.
The MAR header includes the MAR magic bytes as well as the offset to
where the index data can be found.
"""
self.fileobj.seek(0)
header = mar_header.build(dict(index_offset=self.last_offset))
self.fileobj.write(header) | python | def write_header(self):
"""Write the MAR header to the file.
The MAR header includes the MAR magic bytes as well as the offset to
where the index data can be found.
"""
self.fileobj.seek(0)
header = mar_header.build(dict(index_offset=self.last_offset))
self.fileobj.write(header) | Write the MAR header to the file.
The MAR header includes the MAR magic bytes as well as the offset to
where the index data can be found. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L211-L219 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.dummy_signatures | def dummy_signatures(self):
"""Create a dummy signature.
This is used when initially writing the MAR header and we don't know
what the final signature data will be.
Returns:
Fake signature data suitable for writing to the header with
.write_signatures()
"""
if not self.signing_algorithm:
return []
algo_id = {'sha1': 1, 'sha384': 2}[self.signing_algorithm]
signature = make_dummy_signature(algo_id)
return [(algo_id, signature)] | python | def dummy_signatures(self):
"""Create a dummy signature.
This is used when initially writing the MAR header and we don't know
what the final signature data will be.
Returns:
Fake signature data suitable for writing to the header with
.write_signatures()
"""
if not self.signing_algorithm:
return []
algo_id = {'sha1': 1, 'sha384': 2}[self.signing_algorithm]
signature = make_dummy_signature(algo_id)
return [(algo_id, signature)] | Create a dummy signature.
This is used when initially writing the MAR header and we don't know
what the final signature data will be.
Returns:
Fake signature data suitable for writing to the header with
.write_signatures() | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L221-L236 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.calculate_signatures | def calculate_signatures(self):
"""Calculate the signatures for this MAR file.
Returns:
A list of signature tuples: [(algorithm_id, signature_data), ...]
"""
if not self.signing_algorithm:
return []
algo_id = {'sha1': 1, 'sha384': 2}[self.signing_algorithm]
hashers = [(algo_id, make_hasher(algo_id))]
for block in get_signature_data(self.fileobj, self.filesize):
[h.update(block) for (_, h) in hashers]
signatures = [(algo_id, sign_hash(self.signing_key, h.finalize(), h.algorithm.name)) for (algo_id, h) in hashers]
return signatures | python | def calculate_signatures(self):
"""Calculate the signatures for this MAR file.
Returns:
A list of signature tuples: [(algorithm_id, signature_data), ...]
"""
if not self.signing_algorithm:
return []
algo_id = {'sha1': 1, 'sha384': 2}[self.signing_algorithm]
hashers = [(algo_id, make_hasher(algo_id))]
for block in get_signature_data(self.fileobj, self.filesize):
[h.update(block) for (_, h) in hashers]
signatures = [(algo_id, sign_hash(self.signing_key, h.finalize(), h.algorithm.name)) for (algo_id, h) in hashers]
return signatures | Calculate the signatures for this MAR file.
Returns:
A list of signature tuples: [(algorithm_id, signature_data), ...] | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L238-L254 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.write_signatures | def write_signatures(self, signatures):
"""Write signature data to the MAR file.
Args:
signatures (list): list of signature tuples of the form
(algorithm_id, signature_data)
"""
self.fileobj.seek(self.signature_offset)
sig_entries = [dict(algorithm_id=id_,
size=len(sig),
signature=sig)
for (id_, sig) in signatures]
sigs = sigs_header.build(dict(
filesize=self.filesize,
count=len(signatures),
sigs=sig_entries,
))
self.fileobj.write(sigs)
signatures_len = len(sigs)
self.additional_offset = self.signature_offset + signatures_len
# sanity check; this should never happen
if not self.additional_offset == self.fileobj.tell(): # pragma: no cover
raise IOError('ended up at unexpected offset') | python | def write_signatures(self, signatures):
"""Write signature data to the MAR file.
Args:
signatures (list): list of signature tuples of the form
(algorithm_id, signature_data)
"""
self.fileobj.seek(self.signature_offset)
sig_entries = [dict(algorithm_id=id_,
size=len(sig),
signature=sig)
for (id_, sig) in signatures]
sigs = sigs_header.build(dict(
filesize=self.filesize,
count=len(signatures),
sigs=sig_entries,
))
self.fileobj.write(sigs)
signatures_len = len(sigs)
self.additional_offset = self.signature_offset + signatures_len
# sanity check; this should never happen
if not self.additional_offset == self.fileobj.tell(): # pragma: no cover
raise IOError('ended up at unexpected offset') | Write signature data to the MAR file.
Args:
signatures (list): list of signature tuples of the form
(algorithm_id, signature_data) | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L256-L280 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.write_additional | def write_additional(self, productversion, channel):
"""Write the additional information to the MAR header.
Args:
productversion (str): product and version string
channel (str): channel string
"""
self.fileobj.seek(self.additional_offset)
extras = extras_header.build(dict(
count=1,
sections=[dict(
channel=six.u(channel),
productversion=six.u(productversion),
size=len(channel) + len(productversion) + 2 + 8,
padding=b'',
)],
))
self.fileobj.write(extras)
self.last_offset = self.fileobj.tell() | python | def write_additional(self, productversion, channel):
"""Write the additional information to the MAR header.
Args:
productversion (str): product and version string
channel (str): channel string
"""
self.fileobj.seek(self.additional_offset)
extras = extras_header.build(dict(
count=1,
sections=[dict(
channel=six.u(channel),
productversion=six.u(productversion),
size=len(channel) + len(productversion) + 2 + 8,
padding=b'',
)],
))
self.fileobj.write(extras)
self.last_offset = self.fileobj.tell() | Write the additional information to the MAR header.
Args:
productversion (str): product and version string
channel (str): channel string | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L282-L302 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.write_index | def write_index(self):
"""Write the index of all our files to the MAR file."""
self.fileobj.seek(self.last_offset)
index = index_header.build(dict(entries=self.entries))
self.fileobj.write(index)
self.filesize = self.fileobj.tell() | python | def write_index(self):
"""Write the index of all our files to the MAR file."""
self.fileobj.seek(self.last_offset)
index = index_header.build(dict(entries=self.entries))
self.fileobj.write(index)
self.filesize = self.fileobj.tell() | Write the index of all our files to the MAR file. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L304-L309 |
mozilla/build-mar | src/mardor/writer.py | MarWriter.finish | def finish(self):
"""Finalize the MAR file.
The MAR header, index and signatures need to be updated once we've
finished adding all the files.
"""
# Update the last_offset in the mar header
self.write_header()
# Write out the index of contents
self.write_index()
if not self.use_old_format:
# Refresh the signature
sigs = self.calculate_signatures()
self.write_signatures(sigs) | python | def finish(self):
"""Finalize the MAR file.
The MAR header, index and signatures need to be updated once we've
finished adding all the files.
"""
# Update the last_offset in the mar header
self.write_header()
# Write out the index of contents
self.write_index()
if not self.use_old_format:
# Refresh the signature
sigs = self.calculate_signatures()
self.write_signatures(sigs) | Finalize the MAR file.
The MAR header, index and signatures need to be updated once we've
finished adding all the files. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/writer.py#L311-L325 |
mozilla/build-mar | src/mardor/cli.py | build_argparser | def build_argparser():
"""Build argument parser for the CLI."""
parser = ArgumentParser('Utility for managing MAR files')
create_group = parser.add_argument_group("Create a MAR file")
create_group.add_argument("-c", "--create", metavar="MARFILE", help="create MAR")
create_group.add_argument("-V", "--productversion", dest="productversion",
help="product/version string")
create_group.add_argument("-H", "--channel", dest="channel",
help="channel this MAR file is applicable to")
create_group.add_argument("files", nargs=REMAINDER,
help="files to add to the MAR file")
extract_group = parser.add_argument_group("Extract a MAR file")
extract_group.add_argument("-x", "--extract", help="extract MAR", metavar="MARFILE")
list_group = parser.add_argument_group("Print information on a MAR file")
list_group.add_argument("-t", "--list", help="print out MAR contents",
metavar="MARFILE")
list_group.add_argument("-T", "--list-detailed", metavar="MARFILE",
help="print out MAR contents including signatures")
verify_group = parser.add_argument_group("Verify a MAR file")
verify_group.add_argument("-v", "--verify", metavar="MARFILE",
help="verify the marfile")
parser.add_argument("-j", "--bzip2", action="store_const", dest="compression",
const="bz2", help="compress/decompress members with BZ2")
parser.add_argument("-J", "--xz", action="store_const", dest="compression",
const="xz", help="compress/decompress archive with XZ")
parser.add_argument("--auto", action="store_const", dest="compression",
const="auto", help="automatically decompress contents")
parser.add_argument("-k", "--keyfiles", dest="keyfiles", action='append',
help="sign/verify with given key(s)")
parser.add_argument("-C", "--chdir", dest="chdir",
help="chdir to this directory before creating or "
"extracing; location of marfile isn't affected by "
"this option.")
parser.add_argument("--verbose", dest="loglevel", action="store_const",
const=logging.DEBUG, default=logging.WARN,
help="increase logging verbosity")
parser.add_argument('--version', action='version', version='mar version {}'.format(mardor.version_str))
signing_group = parser.add_argument_group('Sign a MAR file')
signing_group.add_argument('--hash', help='output hash for signing', choices=('sha1', 'sha384'))
signing_group.add_argument('--asn1', help='format hash as an ASN1 DigestInfo block',
default=False, action='store_true')
signing_group.add_argument('--add-signature', help='inject signature', nargs=3,
metavar=('input', 'output', 'signature'))
return parser | python | def build_argparser():
"""Build argument parser for the CLI."""
parser = ArgumentParser('Utility for managing MAR files')
create_group = parser.add_argument_group("Create a MAR file")
create_group.add_argument("-c", "--create", metavar="MARFILE", help="create MAR")
create_group.add_argument("-V", "--productversion", dest="productversion",
help="product/version string")
create_group.add_argument("-H", "--channel", dest="channel",
help="channel this MAR file is applicable to")
create_group.add_argument("files", nargs=REMAINDER,
help="files to add to the MAR file")
extract_group = parser.add_argument_group("Extract a MAR file")
extract_group.add_argument("-x", "--extract", help="extract MAR", metavar="MARFILE")
list_group = parser.add_argument_group("Print information on a MAR file")
list_group.add_argument("-t", "--list", help="print out MAR contents",
metavar="MARFILE")
list_group.add_argument("-T", "--list-detailed", metavar="MARFILE",
help="print out MAR contents including signatures")
verify_group = parser.add_argument_group("Verify a MAR file")
verify_group.add_argument("-v", "--verify", metavar="MARFILE",
help="verify the marfile")
parser.add_argument("-j", "--bzip2", action="store_const", dest="compression",
const="bz2", help="compress/decompress members with BZ2")
parser.add_argument("-J", "--xz", action="store_const", dest="compression",
const="xz", help="compress/decompress archive with XZ")
parser.add_argument("--auto", action="store_const", dest="compression",
const="auto", help="automatically decompress contents")
parser.add_argument("-k", "--keyfiles", dest="keyfiles", action='append',
help="sign/verify with given key(s)")
parser.add_argument("-C", "--chdir", dest="chdir",
help="chdir to this directory before creating or "
"extracing; location of marfile isn't affected by "
"this option.")
parser.add_argument("--verbose", dest="loglevel", action="store_const",
const=logging.DEBUG, default=logging.WARN,
help="increase logging verbosity")
parser.add_argument('--version', action='version', version='mar version {}'.format(mardor.version_str))
signing_group = parser.add_argument_group('Sign a MAR file')
signing_group.add_argument('--hash', help='output hash for signing', choices=('sha1', 'sha384'))
signing_group.add_argument('--asn1', help='format hash as an ASN1 DigestInfo block',
default=False, action='store_true')
signing_group.add_argument('--add-signature', help='inject signature', nargs=3,
metavar=('input', 'output', 'signature'))
return parser | Build argument parser for the CLI. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L23-L73 |
mozilla/build-mar | src/mardor/cli.py | do_extract | def do_extract(marfile, destdir, decompress):
"""Extract the MAR file to the destdir."""
with open(marfile, 'rb') as f:
with MarReader(f) as m:
m.extract(str(destdir), decompress=decompress) | python | def do_extract(marfile, destdir, decompress):
"""Extract the MAR file to the destdir."""
with open(marfile, 'rb') as f:
with MarReader(f) as m:
m.extract(str(destdir), decompress=decompress) | Extract the MAR file to the destdir. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L76-L80 |
mozilla/build-mar | src/mardor/cli.py | get_keys | def get_keys(keyfiles, signature_type):
"""Get public keys for the given keyfiles.
Args:
keyfiles: List of filenames with public keys, or :mozilla- prefixed key
names
signature_type: one of 'sha1' or 'sha384'
Returns:
List of public keys as strings
"""
builtin_keys = {
('release', 'sha1'): [mardor.mozilla.release1_sha1, mardor.mozilla.release2_sha1],
('release', 'sha384'): [mardor.mozilla.release1_sha384, mardor.mozilla.release2_sha384],
('nightly', 'sha1'): [mardor.mozilla.nightly1_sha1, mardor.mozilla.nightly2_sha1],
('nightly', 'sha384'): [mardor.mozilla.nightly1_sha384, mardor.mozilla.nightly2_sha384],
('dep', 'sha1'): [mardor.mozilla.dep1_sha1, mardor.mozilla.dep2_sha1],
('dep', 'sha384'): [mardor.mozilla.dep1_sha384, mardor.mozilla.dep2_sha384],
('autograph-stage', 'sha384'): [mardor.mozilla.autograph_stage_sha384],
}
keys = []
for keyfile in keyfiles:
if keyfile.startswith(':mozilla-'):
name = keyfile.split(':mozilla-')[1]
try:
keys.extend(builtin_keys[name, signature_type])
except KeyError:
raise ValueError('Invalid internal key name: {}'
.format(keyfile))
else:
key = open(keyfile, 'rb').read()
keys.append(key)
return keys | python | def get_keys(keyfiles, signature_type):
"""Get public keys for the given keyfiles.
Args:
keyfiles: List of filenames with public keys, or :mozilla- prefixed key
names
signature_type: one of 'sha1' or 'sha384'
Returns:
List of public keys as strings
"""
builtin_keys = {
('release', 'sha1'): [mardor.mozilla.release1_sha1, mardor.mozilla.release2_sha1],
('release', 'sha384'): [mardor.mozilla.release1_sha384, mardor.mozilla.release2_sha384],
('nightly', 'sha1'): [mardor.mozilla.nightly1_sha1, mardor.mozilla.nightly2_sha1],
('nightly', 'sha384'): [mardor.mozilla.nightly1_sha384, mardor.mozilla.nightly2_sha384],
('dep', 'sha1'): [mardor.mozilla.dep1_sha1, mardor.mozilla.dep2_sha1],
('dep', 'sha384'): [mardor.mozilla.dep1_sha384, mardor.mozilla.dep2_sha384],
('autograph-stage', 'sha384'): [mardor.mozilla.autograph_stage_sha384],
}
keys = []
for keyfile in keyfiles:
if keyfile.startswith(':mozilla-'):
name = keyfile.split(':mozilla-')[1]
try:
keys.extend(builtin_keys[name, signature_type])
except KeyError:
raise ValueError('Invalid internal key name: {}'
.format(keyfile))
else:
key = open(keyfile, 'rb').read()
keys.append(key)
return keys | Get public keys for the given keyfiles.
Args:
keyfiles: List of filenames with public keys, or :mozilla- prefixed key
names
signature_type: one of 'sha1' or 'sha384'
Returns:
List of public keys as strings | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L83-L116 |
mozilla/build-mar | src/mardor/cli.py | do_verify | def do_verify(marfile, keyfiles=None):
"""Verify the MAR file."""
try:
with open(marfile, 'rb') as f:
with MarReader(f) as m:
# Check various parts of the mar file
# e.g. signature algorithms and additional block sections
errors = m.get_errors()
if errors:
print("File is not well formed: {}".format(errors))
sys.exit(1)
if keyfiles:
try:
keys = get_keys(keyfiles, m.signature_type)
except ValueError as e:
print(e)
sys.exit(1)
if any(m.verify(key) for key in keys):
print("Verification OK")
return True
else:
print("Verification failed")
sys.exit(1)
else:
print("Verification OK")
return True
except Exception as e:
print("Error opening or parsing file: {}".format(e))
sys.exit(1) | python | def do_verify(marfile, keyfiles=None):
"""Verify the MAR file."""
try:
with open(marfile, 'rb') as f:
with MarReader(f) as m:
# Check various parts of the mar file
# e.g. signature algorithms and additional block sections
errors = m.get_errors()
if errors:
print("File is not well formed: {}".format(errors))
sys.exit(1)
if keyfiles:
try:
keys = get_keys(keyfiles, m.signature_type)
except ValueError as e:
print(e)
sys.exit(1)
if any(m.verify(key) for key in keys):
print("Verification OK")
return True
else:
print("Verification failed")
sys.exit(1)
else:
print("Verification OK")
return True
except Exception as e:
print("Error opening or parsing file: {}".format(e))
sys.exit(1) | Verify the MAR file. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L119-L150 |
mozilla/build-mar | src/mardor/cli.py | do_list | def do_list(marfile, detailed=False):
"""
List the MAR file.
Yields lines of text to output
"""
with open(marfile, 'rb') as f:
with MarReader(f) as m:
if detailed:
if m.compression_type:
yield "Compression type: {}".format(m.compression_type)
if m.signature_type:
yield "Signature type: {}".format(m.signature_type)
if m.mardata.signatures:
plural = "s" if (m.mardata.signatures.count == 0 or m.mardata.signatures.count > 1) else ""
yield "Signature block found with {} signature{}".format(m.mardata.signatures.count, plural)
for s in m.mardata.signatures.sigs:
yield "- Signature {} size {}".format(s.algorithm_id, s.size)
yield ""
if m.mardata.additional:
yield "{} additional block found:".format(len(m.mardata.additional.sections))
for s in m.mardata.additional.sections:
if s.id == 1:
yield (" - Product Information Block:")
yield (" - MAR channel name: {}".format(s.channel))
yield (" - Product version: {}".format(s.productversion))
yield ""
else:
yield ("Unknown additional data")
yield ("{:7s} {:7s} {:7s}".format("SIZE", "MODE", "NAME"))
for e in m.mardata.index.entries:
yield ("{:<7d} {:04o} {}".format(e.size, e.flags, e.name)) | python | def do_list(marfile, detailed=False):
"""
List the MAR file.
Yields lines of text to output
"""
with open(marfile, 'rb') as f:
with MarReader(f) as m:
if detailed:
if m.compression_type:
yield "Compression type: {}".format(m.compression_type)
if m.signature_type:
yield "Signature type: {}".format(m.signature_type)
if m.mardata.signatures:
plural = "s" if (m.mardata.signatures.count == 0 or m.mardata.signatures.count > 1) else ""
yield "Signature block found with {} signature{}".format(m.mardata.signatures.count, plural)
for s in m.mardata.signatures.sigs:
yield "- Signature {} size {}".format(s.algorithm_id, s.size)
yield ""
if m.mardata.additional:
yield "{} additional block found:".format(len(m.mardata.additional.sections))
for s in m.mardata.additional.sections:
if s.id == 1:
yield (" - Product Information Block:")
yield (" - MAR channel name: {}".format(s.channel))
yield (" - Product version: {}".format(s.productversion))
yield ""
else:
yield ("Unknown additional data")
yield ("{:7s} {:7s} {:7s}".format("SIZE", "MODE", "NAME"))
for e in m.mardata.index.entries:
yield ("{:<7d} {:04o} {}".format(e.size, e.flags, e.name)) | List the MAR file.
Yields lines of text to output | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L153-L184 |
mozilla/build-mar | src/mardor/cli.py | do_create | def do_create(marfile, files, compress, productversion=None, channel=None,
signing_key=None, signing_algorithm=None):
"""Create a new MAR file."""
with open(marfile, 'w+b') as f:
with MarWriter(f, productversion=productversion, channel=channel,
signing_key=signing_key,
signing_algorithm=signing_algorithm,
) as m:
for f in files:
m.add(f, compress=compress) | python | def do_create(marfile, files, compress, productversion=None, channel=None,
signing_key=None, signing_algorithm=None):
"""Create a new MAR file."""
with open(marfile, 'w+b') as f:
with MarWriter(f, productversion=productversion, channel=channel,
signing_key=signing_key,
signing_algorithm=signing_algorithm,
) as m:
for f in files:
m.add(f, compress=compress) | Create a new MAR file. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L187-L196 |
mozilla/build-mar | src/mardor/cli.py | do_hash | def do_hash(hash_algo, marfile, asn1=False):
"""Output the hash for this MAR file."""
# Add a dummy signature into a temporary file
dst = tempfile.TemporaryFile()
with open(marfile, 'rb') as f:
add_signature_block(f, dst, hash_algo)
dst.seek(0)
with MarReader(dst) as m:
hashes = m.calculate_hashes()
h = hashes[0][1]
if asn1:
h = format_hash(h, hash_algo)
print(base64.b64encode(h).decode('ascii')) | python | def do_hash(hash_algo, marfile, asn1=False):
"""Output the hash for this MAR file."""
# Add a dummy signature into a temporary file
dst = tempfile.TemporaryFile()
with open(marfile, 'rb') as f:
add_signature_block(f, dst, hash_algo)
dst.seek(0)
with MarReader(dst) as m:
hashes = m.calculate_hashes()
h = hashes[0][1]
if asn1:
h = format_hash(h, hash_algo)
print(base64.b64encode(h).decode('ascii')) | Output the hash for this MAR file. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L199-L214 |
mozilla/build-mar | src/mardor/cli.py | do_add_signature | def do_add_signature(input_file, output_file, signature_file):
"""Add a signature to the MAR file."""
signature = open(signature_file, 'rb').read()
if len(signature) == 256:
hash_algo = 'sha1'
elif len(signature) == 512:
hash_algo = 'sha384'
else:
raise ValueError()
with open(output_file, 'w+b') as dst:
with open(input_file, 'rb') as src:
add_signature_block(src, dst, hash_algo, signature) | python | def do_add_signature(input_file, output_file, signature_file):
"""Add a signature to the MAR file."""
signature = open(signature_file, 'rb').read()
if len(signature) == 256:
hash_algo = 'sha1'
elif len(signature) == 512:
hash_algo = 'sha384'
else:
raise ValueError()
with open(output_file, 'w+b') as dst:
with open(input_file, 'rb') as src:
add_signature_block(src, dst, hash_algo, signature) | Add a signature to the MAR file. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L217-L229 |
mozilla/build-mar | src/mardor/cli.py | check_args | def check_args(parser, args):
"""Validate commandline arguments."""
# Make sure only one action has been specified
if len([a for a in [args.create, args.extract, args.verify, args.list,
args.list_detailed, args.hash, args.add_signature] if a
is not None]) != 1:
parser.error("Must specify something to do (one of -c, -x, -t, -T, -v, --hash, --add-signature)")
if args.create and not args.files:
parser.error("Must specify at least one file to add to marfile")
if args.extract and args.compression not in (None, 'bz2', 'xz', 'auto'): # pragma: nocover
parser.error('Unsupported compression type')
if args.create and args.compression not in (None, 'bz2', 'xz'): # pragma: nocover
parser.error('Unsupported compression type')
if args.hash and len(args.files) != 1:
parser.error("Must specify a file to output the hash for") | python | def check_args(parser, args):
"""Validate commandline arguments."""
# Make sure only one action has been specified
if len([a for a in [args.create, args.extract, args.verify, args.list,
args.list_detailed, args.hash, args.add_signature] if a
is not None]) != 1:
parser.error("Must specify something to do (one of -c, -x, -t, -T, -v, --hash, --add-signature)")
if args.create and not args.files:
parser.error("Must specify at least one file to add to marfile")
if args.extract and args.compression not in (None, 'bz2', 'xz', 'auto'): # pragma: nocover
parser.error('Unsupported compression type')
if args.create and args.compression not in (None, 'bz2', 'xz'): # pragma: nocover
parser.error('Unsupported compression type')
if args.hash and len(args.files) != 1:
parser.error("Must specify a file to output the hash for") | Validate commandline arguments. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L232-L250 |
mozilla/build-mar | src/mardor/cli.py | get_key_from_cmdline | def get_key_from_cmdline(parser, args):
"""Return the signing key and signing algoritm from the commandline."""
if args.keyfiles:
signing_key = open(args.keyfiles[0], 'rb').read()
bits = get_keysize(signing_key)
if bits == 2048:
signing_algorithm = 'sha1'
elif bits == 4096:
signing_algorithm = 'sha384'
else:
parser.error("Unsupported key size {} from key {}".format(bits, args.keyfiles[0]))
print("Using {} to sign using algorithm {!s}".format(args.keyfiles[0], signing_algorithm))
else:
signing_key = None
signing_algorithm = None
return signing_key, signing_algorithm | python | def get_key_from_cmdline(parser, args):
"""Return the signing key and signing algoritm from the commandline."""
if args.keyfiles:
signing_key = open(args.keyfiles[0], 'rb').read()
bits = get_keysize(signing_key)
if bits == 2048:
signing_algorithm = 'sha1'
elif bits == 4096:
signing_algorithm = 'sha384'
else:
parser.error("Unsupported key size {} from key {}".format(bits, args.keyfiles[0]))
print("Using {} to sign using algorithm {!s}".format(args.keyfiles[0], signing_algorithm))
else:
signing_key = None
signing_algorithm = None
return signing_key, signing_algorithm | Return the signing key and signing algoritm from the commandline. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L253-L270 |
mozilla/build-mar | src/mardor/cli.py | main | def main(argv=None):
"""Run the main CLI entry point."""
parser = build_argparser()
args = parser.parse_args(argv)
logging.basicConfig(level=args.loglevel, format="%(message)s")
check_args(parser, args)
if args.extract:
marfile = os.path.abspath(args.extract)
if args.chdir:
os.chdir(args.chdir)
do_extract(marfile, os.getcwd(), args.compression)
elif args.verify:
do_verify(args.verify, args.keyfiles)
elif args.list:
print("\n".join(do_list(args.list)))
elif args.list_detailed:
print("\n".join(do_list(args.list_detailed, detailed=True)))
elif args.create:
marfile = os.path.abspath(args.create)
signing_key, signing_algorithm = get_key_from_cmdline(parser, args)
if args.chdir:
os.chdir(args.chdir)
do_create(marfile, args.files, args.compression,
productversion=args.productversion, channel=args.channel,
signing_key=signing_key, signing_algorithm=signing_algorithm)
elif args.hash:
do_hash(args.hash, args.files[0], args.asn1)
elif args.add_signature:
do_add_signature(args.add_signature[0], args.add_signature[1], args.add_signature[2])
# sanity check; should never happen
else: # pragma: no cover
parser.error("Unsupported action") | python | def main(argv=None):
"""Run the main CLI entry point."""
parser = build_argparser()
args = parser.parse_args(argv)
logging.basicConfig(level=args.loglevel, format="%(message)s")
check_args(parser, args)
if args.extract:
marfile = os.path.abspath(args.extract)
if args.chdir:
os.chdir(args.chdir)
do_extract(marfile, os.getcwd(), args.compression)
elif args.verify:
do_verify(args.verify, args.keyfiles)
elif args.list:
print("\n".join(do_list(args.list)))
elif args.list_detailed:
print("\n".join(do_list(args.list_detailed, detailed=True)))
elif args.create:
marfile = os.path.abspath(args.create)
signing_key, signing_algorithm = get_key_from_cmdline(parser, args)
if args.chdir:
os.chdir(args.chdir)
do_create(marfile, args.files, args.compression,
productversion=args.productversion, channel=args.channel,
signing_key=signing_key, signing_algorithm=signing_algorithm)
elif args.hash:
do_hash(args.hash, args.files[0], args.asn1)
elif args.add_signature:
do_add_signature(args.add_signature[0], args.add_signature[1], args.add_signature[2])
# sanity check; should never happen
else: # pragma: no cover
parser.error("Unsupported action") | Run the main CLI entry point. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/cli.py#L273-L316 |
mozilla/build-mar | src/mardor/reader.py | MarReader.compression_type | def compression_type(self):
"""Return the latest compresion type used in this MAR.
Returns:
One of None, 'bz2', or 'xz'
"""
best_compression = None
for e in self.mardata.index.entries:
self.fileobj.seek(e.offset)
magic = self.fileobj.read(10)
compression = guess_compression(magic)
if compression == 'xz':
best_compression = 'xz'
break
elif compression == 'bz2' and best_compression is None:
best_compression = 'bz2'
return best_compression | python | def compression_type(self):
"""Return the latest compresion type used in this MAR.
Returns:
One of None, 'bz2', or 'xz'
"""
best_compression = None
for e in self.mardata.index.entries:
self.fileobj.seek(e.offset)
magic = self.fileobj.read(10)
compression = guess_compression(magic)
if compression == 'xz':
best_compression = 'xz'
break
elif compression == 'bz2' and best_compression is None:
best_compression = 'bz2'
return best_compression | Return the latest compresion type used in this MAR.
Returns:
One of None, 'bz2', or 'xz' | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/reader.py#L59-L76 |
mozilla/build-mar | src/mardor/reader.py | MarReader.signature_type | def signature_type(self):
"""Return the signature type used in this MAR.
Returns:
One of None, 'unknown', 'sha1', or 'sha384'
"""
if not self.mardata.signatures:
return None
for sig in self.mardata.signatures.sigs:
if sig.algorithm_id == 1:
return 'sha1'
elif sig.algorithm_id == 2:
return 'sha384'
else:
return 'unknown' | python | def signature_type(self):
"""Return the signature type used in this MAR.
Returns:
One of None, 'unknown', 'sha1', or 'sha384'
"""
if not self.mardata.signatures:
return None
for sig in self.mardata.signatures.sigs:
if sig.algorithm_id == 1:
return 'sha1'
elif sig.algorithm_id == 2:
return 'sha384'
else:
return 'unknown' | Return the signature type used in this MAR.
Returns:
One of None, 'unknown', 'sha1', or 'sha384' | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/reader.py#L79-L95 |
mozilla/build-mar | src/mardor/reader.py | MarReader.extract_entry | def extract_entry(self, e, decompress='auto'):
"""Yield blocks of data for this entry from this MAR file.
Args:
e (:obj:`mardor.format.index_entry`): An index_entry object that
refers to this file's size and offset inside the MAR file.
path (str): Where on disk to extract this file to.
decompress (str, optional): Controls whether files are decompressed
when extracted. Must be one of None, 'auto', 'bz2', or 'xz'.
Defaults to 'auto'
Yields:
Blocks of data for `e`
"""
self.fileobj.seek(e.offset)
stream = file_iter(self.fileobj)
stream = takeexactly(stream, e.size)
if decompress == 'auto':
stream = auto_decompress_stream(stream)
elif decompress == 'bz2':
stream = bz2_decompress_stream(stream)
elif decompress == 'xz':
stream = xz_decompress_stream(stream)
elif decompress is None:
pass
else:
raise ValueError("Unsupported decompression type: {}".format(decompress))
for block in stream:
yield block | python | def extract_entry(self, e, decompress='auto'):
"""Yield blocks of data for this entry from this MAR file.
Args:
e (:obj:`mardor.format.index_entry`): An index_entry object that
refers to this file's size and offset inside the MAR file.
path (str): Where on disk to extract this file to.
decompress (str, optional): Controls whether files are decompressed
when extracted. Must be one of None, 'auto', 'bz2', or 'xz'.
Defaults to 'auto'
Yields:
Blocks of data for `e`
"""
self.fileobj.seek(e.offset)
stream = file_iter(self.fileobj)
stream = takeexactly(stream, e.size)
if decompress == 'auto':
stream = auto_decompress_stream(stream)
elif decompress == 'bz2':
stream = bz2_decompress_stream(stream)
elif decompress == 'xz':
stream = xz_decompress_stream(stream)
elif decompress is None:
pass
else:
raise ValueError("Unsupported decompression type: {}".format(decompress))
for block in stream:
yield block | Yield blocks of data for this entry from this MAR file.
Args:
e (:obj:`mardor.format.index_entry`): An index_entry object that
refers to this file's size and offset inside the MAR file.
path (str): Where on disk to extract this file to.
decompress (str, optional): Controls whether files are decompressed
when extracted. Must be one of None, 'auto', 'bz2', or 'xz'.
Defaults to 'auto'
Yields:
Blocks of data for `e` | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/reader.py#L97-L127 |
mozilla/build-mar | src/mardor/reader.py | MarReader.extract | def extract(self, destdir, decompress='auto'):
"""Extract the entire MAR file into a directory.
Args:
destdir (str): A local directory on disk into which the contents of
this MAR file will be extracted. Required parent directories
will be created as necessary.
decompress (obj, optional): Controls whether files are decompressed
when extracted. Must be one of 'auto' or None. Defaults to
'auto'.
"""
for e in self.mardata.index.entries:
name = e.name
entry_path = safejoin(destdir, name)
entry_dir = os.path.dirname(entry_path)
mkdir(entry_dir)
with open(entry_path, 'wb') as f:
write_to_file(self.extract_entry(e, decompress), f)
os.chmod(entry_path, e.flags) | python | def extract(self, destdir, decompress='auto'):
"""Extract the entire MAR file into a directory.
Args:
destdir (str): A local directory on disk into which the contents of
this MAR file will be extracted. Required parent directories
will be created as necessary.
decompress (obj, optional): Controls whether files are decompressed
when extracted. Must be one of 'auto' or None. Defaults to
'auto'.
"""
for e in self.mardata.index.entries:
name = e.name
entry_path = safejoin(destdir, name)
entry_dir = os.path.dirname(entry_path)
mkdir(entry_dir)
with open(entry_path, 'wb') as f:
write_to_file(self.extract_entry(e, decompress), f)
os.chmod(entry_path, e.flags) | Extract the entire MAR file into a directory.
Args:
destdir (str): A local directory on disk into which the contents of
this MAR file will be extracted. Required parent directories
will be created as necessary.
decompress (obj, optional): Controls whether files are decompressed
when extracted. Must be one of 'auto' or None. Defaults to
'auto'. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/reader.py#L129-L147 |
mozilla/build-mar | src/mardor/reader.py | MarReader.get_errors | def get_errors(self):
"""Verify that this MAR file is well formed.
Returns:
A list of strings describing errors in the MAR file
None if this MAR file appears well formed.
"""
errors = []
errors.extend(self._get_signature_errors())
errors.extend(self._get_additional_errors())
errors.extend(self._get_entry_errors())
return errors if errors else None | python | def get_errors(self):
"""Verify that this MAR file is well formed.
Returns:
A list of strings describing errors in the MAR file
None if this MAR file appears well formed.
"""
errors = []
errors.extend(self._get_signature_errors())
errors.extend(self._get_additional_errors())
errors.extend(self._get_entry_errors())
return errors if errors else None | Verify that this MAR file is well formed.
Returns:
A list of strings describing errors in the MAR file
None if this MAR file appears well formed. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/reader.py#L181-L194 |
mozilla/build-mar | src/mardor/reader.py | MarReader.verify | def verify(self, verify_key):
"""Verify that this MAR file has a valid signature.
Args:
verify_key (str): PEM formatted public key
Returns:
True if the MAR file's signature matches its contents
False otherwise; this includes cases where there is no signature.
"""
if not self.mardata.signatures or not self.mardata.signatures.sigs:
# This MAR file can't be verified since it has no signatures
return False
hashers = []
for sig in self.mardata.signatures.sigs:
hashers.append((sig.algorithm_id, sig.signature, make_hasher(sig.algorithm_id)))
assert len(hashers) == len(self.mardata.signatures.sigs)
for block in get_signature_data(self.fileobj,
self.mardata.signatures.filesize):
[h.update(block) for (_, _, h) in hashers]
for algo_id, sig, h in hashers:
if not verify_signature(verify_key, sig, h.finalize(), h.algorithm.name):
return False
else:
return True | python | def verify(self, verify_key):
"""Verify that this MAR file has a valid signature.
Args:
verify_key (str): PEM formatted public key
Returns:
True if the MAR file's signature matches its contents
False otherwise; this includes cases where there is no signature.
"""
if not self.mardata.signatures or not self.mardata.signatures.sigs:
# This MAR file can't be verified since it has no signatures
return False
hashers = []
for sig in self.mardata.signatures.sigs:
hashers.append((sig.algorithm_id, sig.signature, make_hasher(sig.algorithm_id)))
assert len(hashers) == len(self.mardata.signatures.sigs)
for block in get_signature_data(self.fileobj,
self.mardata.signatures.filesize):
[h.update(block) for (_, _, h) in hashers]
for algo_id, sig, h in hashers:
if not verify_signature(verify_key, sig, h.finalize(), h.algorithm.name):
return False
else:
return True | Verify that this MAR file has a valid signature.
Args:
verify_key (str): PEM formatted public key
Returns:
True if the MAR file's signature matches its contents
False otherwise; this includes cases where there is no signature. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/reader.py#L196-L225 |
mozilla/build-mar | src/mardor/reader.py | MarReader.productinfo | def productinfo(self):
"""Return the productversion and channel of this MAR if present."""
if not self.mardata.additional:
return None
for s in self.mardata.additional.sections:
if s.id == 1:
return str(s.productversion), str(s.channel)
return None | python | def productinfo(self):
"""Return the productversion and channel of this MAR if present."""
if not self.mardata.additional:
return None
for s in self.mardata.additional.sections:
if s.id == 1:
return str(s.productversion), str(s.channel)
return None | Return the productversion and channel of this MAR if present. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/reader.py#L228-L237 |
mozilla/build-mar | src/mardor/reader.py | MarReader.calculate_hashes | def calculate_hashes(self):
"""Return hashes of the contents of this MAR file.
The hashes depend on the algorithms defined in the MAR file's signature block.
Returns:
A list of (algorithm_id, hash) tuples
"""
hashers = []
if not self.mardata.signatures:
return []
for s in self.mardata.signatures.sigs:
h = make_hasher(s.algorithm_id)
hashers.append((s.algorithm_id, h))
for block in get_signature_data(self.fileobj, self.mardata.signatures.filesize):
[h.update(block) for (_, h) in hashers]
return [(algo_id, h.finalize()) for (algo_id, h) in hashers] | python | def calculate_hashes(self):
"""Return hashes of the contents of this MAR file.
The hashes depend on the algorithms defined in the MAR file's signature block.
Returns:
A list of (algorithm_id, hash) tuples
"""
hashers = []
if not self.mardata.signatures:
return []
for s in self.mardata.signatures.sigs:
h = make_hasher(s.algorithm_id)
hashers.append((s.algorithm_id, h))
for block in get_signature_data(self.fileobj, self.mardata.signatures.filesize):
[h.update(block) for (_, h) in hashers]
return [(algo_id, h.finalize()) for (algo_id, h) in hashers] | Return hashes of the contents of this MAR file.
The hashes depend on the algorithms defined in the MAR file's signature block.
Returns:
A list of (algorithm_id, hash) tuples | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/reader.py#L239-L259 |
mozilla/build-mar | src/mardor/format.py | _has_extras | def _has_extras(ctx):
"""Determine if a MAR file has an additional section block or not.
It does this by looking at where file data starts in the file. If this
starts immediately after the signature data, then no additional sections are present.
Args:
ctx (context): construct parsing context
Returns:
True if the MAR file has an additional section block
False otherwise
"""
if not ctx.index.entries:
return False
return ctx.data_offset > 8 and ctx.data_offset > (ctx.signatures.offset_end + 8) | python | def _has_extras(ctx):
"""Determine if a MAR file has an additional section block or not.
It does this by looking at where file data starts in the file. If this
starts immediately after the signature data, then no additional sections are present.
Args:
ctx (context): construct parsing context
Returns:
True if the MAR file has an additional section block
False otherwise
"""
if not ctx.index.entries:
return False
return ctx.data_offset > 8 and ctx.data_offset > (ctx.signatures.offset_end + 8) | Determine if a MAR file has an additional section block or not.
It does this by looking at where file data starts in the file. If this
starts immediately after the signature data, then no additional sections are present.
Args:
ctx (context): construct parsing context
Returns:
True if the MAR file has an additional section block
False otherwise | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/format.py#L108-L125 |
mozilla/build-mar | src/mardor/signing.py | get_publickey | def get_publickey(keydata):
"""Load the public key from a PEM encoded string."""
try:
key = serialization.load_pem_public_key(
keydata,
backend=default_backend(),
)
return key
except ValueError:
key = serialization.load_pem_private_key(
keydata,
password=None,
backend=default_backend(),
)
key = key.public_key()
return key | python | def get_publickey(keydata):
"""Load the public key from a PEM encoded string."""
try:
key = serialization.load_pem_public_key(
keydata,
backend=default_backend(),
)
return key
except ValueError:
key = serialization.load_pem_private_key(
keydata,
password=None,
backend=default_backend(),
)
key = key.public_key()
return key | Load the public key from a PEM encoded string. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/signing.py#L25-L40 |
mozilla/build-mar | src/mardor/signing.py | get_privatekey | def get_privatekey(keydata):
"""Load the private key from a PEM encoded string."""
key = serialization.load_pem_private_key(
keydata,
password=None,
backend=default_backend(),
)
return key | python | def get_privatekey(keydata):
"""Load the private key from a PEM encoded string."""
key = serialization.load_pem_private_key(
keydata,
password=None,
backend=default_backend(),
)
return key | Load the private key from a PEM encoded string. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/signing.py#L43-L50 |
mozilla/build-mar | src/mardor/signing.py | get_signature_data | def get_signature_data(fileobj, filesize):
"""Read data from MAR file that is required for MAR signatures.
Args:
fileboj (file-like object): file-like object to read the MAR data from
filesize (int): the total size of the file
Yields:
blocks of bytes representing the data required to generate or validate
signatures.
"""
# Read everything except the signature entries
# The first 8 bytes are covered, as is everything from the beginning
# of the additional section to the end of the file. The signature
# algorithm id and size fields are also covered.
fileobj.seek(0)
marfile = mar.parse_stream(fileobj)
if not marfile.signatures:
raise IOError("Can't generate signature data for file without signature blocks")
# MAR header
fileobj.seek(0)
block = fileobj.read(8)
yield block
# Signatures header
sigs = sigs_header.parse_stream(fileobj)
sig_types = [(sig.algorithm_id, sig.size) for sig in sigs.sigs]
block = Int64ub.build(filesize) + Int32ub.build(sigs.count)
yield block
# Signature algorithm id and size per entry
for algorithm_id, size in sig_types:
block = Int32ub.build(algorithm_id) + Int32ub.build(size)
yield block
# Everything else in the file is covered
for block in file_iter(fileobj):
yield block | python | def get_signature_data(fileobj, filesize):
"""Read data from MAR file that is required for MAR signatures.
Args:
fileboj (file-like object): file-like object to read the MAR data from
filesize (int): the total size of the file
Yields:
blocks of bytes representing the data required to generate or validate
signatures.
"""
# Read everything except the signature entries
# The first 8 bytes are covered, as is everything from the beginning
# of the additional section to the end of the file. The signature
# algorithm id and size fields are also covered.
fileobj.seek(0)
marfile = mar.parse_stream(fileobj)
if not marfile.signatures:
raise IOError("Can't generate signature data for file without signature blocks")
# MAR header
fileobj.seek(0)
block = fileobj.read(8)
yield block
# Signatures header
sigs = sigs_header.parse_stream(fileobj)
sig_types = [(sig.algorithm_id, sig.size) for sig in sigs.sigs]
block = Int64ub.build(filesize) + Int32ub.build(sigs.count)
yield block
# Signature algorithm id and size per entry
for algorithm_id, size in sig_types:
block = Int32ub.build(algorithm_id) + Int32ub.build(size)
yield block
# Everything else in the file is covered
for block in file_iter(fileobj):
yield block | Read data from MAR file that is required for MAR signatures.
Args:
fileboj (file-like object): file-like object to read the MAR data from
filesize (int): the total size of the file
Yields:
blocks of bytes representing the data required to generate or validate
signatures. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/signing.py#L59-L101 |
mozilla/build-mar | src/mardor/signing.py | make_hasher | def make_hasher(algorithm_id):
"""Create a hashing object for the given signing algorithm."""
if algorithm_id == 1:
return hashes.Hash(hashes.SHA1(), default_backend())
elif algorithm_id == 2:
return hashes.Hash(hashes.SHA384(), default_backend())
else:
raise ValueError("Unsupported signing algorithm: %s" % algorithm_id) | python | def make_hasher(algorithm_id):
"""Create a hashing object for the given signing algorithm."""
if algorithm_id == 1:
return hashes.Hash(hashes.SHA1(), default_backend())
elif algorithm_id == 2:
return hashes.Hash(hashes.SHA384(), default_backend())
else:
raise ValueError("Unsupported signing algorithm: %s" % algorithm_id) | Create a hashing object for the given signing algorithm. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/signing.py#L104-L111 |
mozilla/build-mar | src/mardor/signing.py | sign_hash | def sign_hash(private_key, hash, hash_algo):
"""Sign the given hash with the given private key.
Args:
private_key (str): PEM enoded private key
hash (byte str): hash to sign
hash_algo (str): name of hash algorithm used
Returns:
byte string representing the signature
"""
hash_algo = _hash_algorithms[hash_algo]
return get_privatekey(private_key).sign(
hash,
padding.PKCS1v15(),
utils.Prehashed(hash_algo),
) | python | def sign_hash(private_key, hash, hash_algo):
"""Sign the given hash with the given private key.
Args:
private_key (str): PEM enoded private key
hash (byte str): hash to sign
hash_algo (str): name of hash algorithm used
Returns:
byte string representing the signature
"""
hash_algo = _hash_algorithms[hash_algo]
return get_privatekey(private_key).sign(
hash,
padding.PKCS1v15(),
utils.Prehashed(hash_algo),
) | Sign the given hash with the given private key.
Args:
private_key (str): PEM enoded private key
hash (byte str): hash to sign
hash_algo (str): name of hash algorithm used
Returns:
byte string representing the signature | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/signing.py#L114-L131 |
mozilla/build-mar | src/mardor/signing.py | verify_signature | def verify_signature(public_key, signature, hash, hash_algo):
"""Verify the given signature is correct for the given hash and public key.
Args:
public_key (str): PEM encoded public key
signature (bytes): signature to verify
hash (bytes): hash of data
hash_algo (str): hash algorithm used
Returns:
True if the signature is valid, False otherwise
"""
hash_algo = _hash_algorithms[hash_algo]
try:
return get_publickey(public_key).verify(
signature,
hash,
padding.PKCS1v15(),
utils.Prehashed(hash_algo),
) is None
except InvalidSignature:
return False | python | def verify_signature(public_key, signature, hash, hash_algo):
"""Verify the given signature is correct for the given hash and public key.
Args:
public_key (str): PEM encoded public key
signature (bytes): signature to verify
hash (bytes): hash of data
hash_algo (str): hash algorithm used
Returns:
True if the signature is valid, False otherwise
"""
hash_algo = _hash_algorithms[hash_algo]
try:
return get_publickey(public_key).verify(
signature,
hash,
padding.PKCS1v15(),
utils.Prehashed(hash_algo),
) is None
except InvalidSignature:
return False | Verify the given signature is correct for the given hash and public key.
Args:
public_key (str): PEM encoded public key
signature (bytes): signature to verify
hash (bytes): hash of data
hash_algo (str): hash algorithm used
Returns:
True if the signature is valid, False otherwise | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/signing.py#L134-L156 |
mozilla/build-mar | src/mardor/signing.py | make_rsa_keypair | def make_rsa_keypair(bits):
"""Generate an RSA keypair.
Args:
bits (int): number of bits to use for the key.
Returns:
(private_key, public_key) - both as PEM encoded strings
"""
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=bits,
backend=default_backend(),
)
private_pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
public_pem = private_key.public_key().public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
)
return private_pem, public_pem | python | def make_rsa_keypair(bits):
"""Generate an RSA keypair.
Args:
bits (int): number of bits to use for the key.
Returns:
(private_key, public_key) - both as PEM encoded strings
"""
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=bits,
backend=default_backend(),
)
private_pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
public_pem = private_key.public_key().public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
)
return private_pem, public_pem | Generate an RSA keypair.
Args:
bits (int): number of bits to use for the key.
Returns:
(private_key, public_key) - both as PEM encoded strings | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/signing.py#L159-L183 |
mozilla/build-mar | src/mardor/utils.py | mkdir | def mkdir(path):
"""Make a directory and its parents.
Args:
path (str): path to create
Returns:
None
Raises:
OSError if the directory cannot be created.
"""
try:
os.makedirs(path)
# sanity check
if not os.path.isdir(path): # pragma: no cover
raise IOError('path is not a directory')
except OSError as e:
# EEXIST
if e.errno == 17 and os.path.isdir(path):
return
raise | python | def mkdir(path):
"""Make a directory and its parents.
Args:
path (str): path to create
Returns:
None
Raises:
OSError if the directory cannot be created.
"""
try:
os.makedirs(path)
# sanity check
if not os.path.isdir(path): # pragma: no cover
raise IOError('path is not a directory')
except OSError as e:
# EEXIST
if e.errno == 17 and os.path.isdir(path):
return
raise | Make a directory and its parents.
Args:
path (str): path to create
Returns:
None
Raises:
OSError if the directory cannot be created. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L18-L40 |
mozilla/build-mar | src/mardor/utils.py | takeexactly | def takeexactly(iterable, size):
"""Yield blocks from `iterable` until exactly len(size) have been returned.
Args:
iterable (iterable): Any iterable that yields sliceable objects that
have length.
size (int): How much data to consume
Yields:
blocks from `iterable` such that
sum(len(block) for block in takeexactly(iterable, size)) == size
Raises:
ValueError if there is less than `size` data in `iterable`
"""
total = 0
for block in iterable:
n = min(len(block), size - total)
block = block[:n]
if block:
yield block
total += len(block)
if total >= size:
break
if total < size:
raise ValueError('not enough data (yielded {} of {})')
# sanity check; this should never happen
if total != size: # pragma: no cover
raise ValueError('yielded too much data') | python | def takeexactly(iterable, size):
"""Yield blocks from `iterable` until exactly len(size) have been returned.
Args:
iterable (iterable): Any iterable that yields sliceable objects that
have length.
size (int): How much data to consume
Yields:
blocks from `iterable` such that
sum(len(block) for block in takeexactly(iterable, size)) == size
Raises:
ValueError if there is less than `size` data in `iterable`
"""
total = 0
for block in iterable:
n = min(len(block), size - total)
block = block[:n]
if block:
yield block
total += len(block)
if total >= size:
break
if total < size:
raise ValueError('not enough data (yielded {} of {})')
# sanity check; this should never happen
if total != size: # pragma: no cover
raise ValueError('yielded too much data') | Yield blocks from `iterable` until exactly len(size) have been returned.
Args:
iterable (iterable): Any iterable that yields sliceable objects that
have length.
size (int): How much data to consume
Yields:
blocks from `iterable` such that
sum(len(block) for block in takeexactly(iterable, size)) == size
Raises:
ValueError if there is less than `size` data in `iterable` | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L57-L87 |
mozilla/build-mar | src/mardor/utils.py | write_to_file | def write_to_file(src, dst):
"""Write data from `src` into `dst`.
Args:
src (iterable): iterable that yields blocks of data to write
dst (file-like object): file-like object that must support
.write(block)
Returns:
number of bytes written to `dst`
"""
n = 0
for block in src:
dst.write(block)
n += len(block)
return n | python | def write_to_file(src, dst):
"""Write data from `src` into `dst`.
Args:
src (iterable): iterable that yields blocks of data to write
dst (file-like object): file-like object that must support
.write(block)
Returns:
number of bytes written to `dst`
"""
n = 0
for block in src:
dst.write(block)
n += len(block)
return n | Write data from `src` into `dst`.
Args:
src (iterable): iterable that yields blocks of data to write
dst (file-like object): file-like object that must support
.write(block)
Returns:
number of bytes written to `dst` | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L90-L106 |
mozilla/build-mar | src/mardor/utils.py | bz2_compress_stream | def bz2_compress_stream(src, level=9):
"""Compress data from `src`.
Args:
src (iterable): iterable that yields blocks of data to compress
level (int): compression level (1-9) default is 9
Yields:
blocks of compressed data
"""
compressor = bz2.BZ2Compressor(level)
for block in src:
encoded = compressor.compress(block)
if encoded:
yield encoded
yield compressor.flush() | python | def bz2_compress_stream(src, level=9):
"""Compress data from `src`.
Args:
src (iterable): iterable that yields blocks of data to compress
level (int): compression level (1-9) default is 9
Yields:
blocks of compressed data
"""
compressor = bz2.BZ2Compressor(level)
for block in src:
encoded = compressor.compress(block)
if encoded:
yield encoded
yield compressor.flush() | Compress data from `src`.
Args:
src (iterable): iterable that yields blocks of data to compress
level (int): compression level (1-9) default is 9
Yields:
blocks of compressed data | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L109-L125 |
mozilla/build-mar | src/mardor/utils.py | bz2_decompress_stream | def bz2_decompress_stream(src):
"""Decompress data from `src`.
Args:
src (iterable): iterable that yields blocks of compressed data
Yields:
blocks of uncompressed data
"""
dec = bz2.BZ2Decompressor()
for block in src:
decoded = dec.decompress(block)
if decoded:
yield decoded | python | def bz2_decompress_stream(src):
"""Decompress data from `src`.
Args:
src (iterable): iterable that yields blocks of compressed data
Yields:
blocks of uncompressed data
"""
dec = bz2.BZ2Decompressor()
for block in src:
decoded = dec.decompress(block)
if decoded:
yield decoded | Decompress data from `src`.
Args:
src (iterable): iterable that yields blocks of compressed data
Yields:
blocks of uncompressed data | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L128-L142 |
mozilla/build-mar | src/mardor/utils.py | xz_compress_stream | def xz_compress_stream(src):
"""Compress data from `src`.
Args:
src (iterable): iterable that yields blocks of data to compress
Yields:
blocks of compressed data
"""
compressor = lzma.LZMACompressor(
check=lzma.CHECK_CRC64,
filters=[
{"id": lzma.FILTER_X86},
{"id": lzma.FILTER_LZMA2,
"preset": lzma.PRESET_DEFAULT},
])
for block in src:
encoded = compressor.compress(block)
if encoded:
yield encoded
yield compressor.flush() | python | def xz_compress_stream(src):
"""Compress data from `src`.
Args:
src (iterable): iterable that yields blocks of data to compress
Yields:
blocks of compressed data
"""
compressor = lzma.LZMACompressor(
check=lzma.CHECK_CRC64,
filters=[
{"id": lzma.FILTER_X86},
{"id": lzma.FILTER_LZMA2,
"preset": lzma.PRESET_DEFAULT},
])
for block in src:
encoded = compressor.compress(block)
if encoded:
yield encoded
yield compressor.flush() | Compress data from `src`.
Args:
src (iterable): iterable that yields blocks of data to compress
Yields:
blocks of compressed data | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L145-L166 |
mozilla/build-mar | src/mardor/utils.py | xz_decompress_stream | def xz_decompress_stream(src):
"""Decompress data from `src`.
Args:
src (iterable): iterable that yields blocks of compressed data
Yields:
blocks of uncompressed data
"""
dec = lzma.LZMADecompressor()
for block in src:
decoded = dec.decompress(block)
if decoded:
yield decoded
if dec.unused_data: # pragma: nocover; can't figure out how to test this
raise IOError('Read unused data at end of compressed stream') | python | def xz_decompress_stream(src):
"""Decompress data from `src`.
Args:
src (iterable): iterable that yields blocks of compressed data
Yields:
blocks of uncompressed data
"""
dec = lzma.LZMADecompressor()
for block in src:
decoded = dec.decompress(block)
if decoded:
yield decoded
if dec.unused_data: # pragma: nocover; can't figure out how to test this
raise IOError('Read unused data at end of compressed stream') | Decompress data from `src`.
Args:
src (iterable): iterable that yields blocks of compressed data
Yields:
blocks of uncompressed data | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L169-L186 |
mozilla/build-mar | src/mardor/utils.py | auto_decompress_stream | def auto_decompress_stream(src):
"""Decompress data from `src` if required.
If the first block of `src` appears to be compressed, then the entire
stream will be uncompressed. Otherwise the stream will be passed along
as-is.
Args:
src (iterable): iterable that yields blocks of data
Yields:
blocks of uncompressed data
"""
block = next(src)
compression = guess_compression(block)
if compression == 'bz2':
src = bz2_decompress_stream(chain([block], src))
elif compression == 'xz':
src = xz_decompress_stream(chain([block], src))
else:
src = chain([block], src)
for block in src:
yield block | python | def auto_decompress_stream(src):
"""Decompress data from `src` if required.
If the first block of `src` appears to be compressed, then the entire
stream will be uncompressed. Otherwise the stream will be passed along
as-is.
Args:
src (iterable): iterable that yields blocks of data
Yields:
blocks of uncompressed data
"""
block = next(src)
compression = guess_compression(block)
if compression == 'bz2':
src = bz2_decompress_stream(chain([block], src))
elif compression == 'xz':
src = xz_decompress_stream(chain([block], src))
else:
src = chain([block], src)
for block in src:
yield block | Decompress data from `src` if required.
If the first block of `src` appears to be compressed, then the entire
stream will be uncompressed. Otherwise the stream will be passed along
as-is.
Args:
src (iterable): iterable that yields blocks of data
Yields:
blocks of uncompressed data | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L206-L230 |
mozilla/build-mar | src/mardor/utils.py | path_is_inside | def path_is_inside(path, dirname):
"""Return True if path is under dirname."""
path = os.path.abspath(path)
dirname = os.path.abspath(dirname)
while len(path) >= len(dirname):
if path == dirname:
return True
newpath = os.path.dirname(path)
if newpath == path:
return False
path = newpath
return False | python | def path_is_inside(path, dirname):
"""Return True if path is under dirname."""
path = os.path.abspath(path)
dirname = os.path.abspath(dirname)
while len(path) >= len(dirname):
if path == dirname:
return True
newpath = os.path.dirname(path)
if newpath == path:
return False
path = newpath
return False | Return True if path is under dirname. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L233-L244 |
mozilla/build-mar | src/mardor/utils.py | safejoin | def safejoin(base, *elements):
"""Safely joins paths together.
The result will always be a subdirectory under `base`, otherwise ValueError
is raised.
Args:
base (str): base path
elements (list of strings): path elements to join to base
Returns:
elements joined to base
"""
# TODO: do we really want to be absolute here?
base = os.path.abspath(base)
path = os.path.join(base, *elements)
path = os.path.normpath(path)
if not path_is_inside(path, base):
raise ValueError('target path is outside of the base path')
return path | python | def safejoin(base, *elements):
"""Safely joins paths together.
The result will always be a subdirectory under `base`, otherwise ValueError
is raised.
Args:
base (str): base path
elements (list of strings): path elements to join to base
Returns:
elements joined to base
"""
# TODO: do we really want to be absolute here?
base = os.path.abspath(base)
path = os.path.join(base, *elements)
path = os.path.normpath(path)
if not path_is_inside(path, base):
raise ValueError('target path is outside of the base path')
return path | Safely joins paths together.
The result will always be a subdirectory under `base`, otherwise ValueError
is raised.
Args:
base (str): base path
elements (list of strings): path elements to join to base
Returns:
elements joined to base | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L247-L267 |
mozilla/build-mar | src/mardor/utils.py | filesize | def filesize(fileobj):
"""Return the number of bytes in the fileobj.
This function seeks to the end of the file, and then back to the original position.
"""
current = fileobj.tell()
fileobj.seek(0, 2)
end = fileobj.tell()
fileobj.seek(current)
return end | python | def filesize(fileobj):
"""Return the number of bytes in the fileobj.
This function seeks to the end of the file, and then back to the original position.
"""
current = fileobj.tell()
fileobj.seek(0, 2)
end = fileobj.tell()
fileobj.seek(current)
return end | Return the number of bytes in the fileobj.
This function seeks to the end of the file, and then back to the original position. | https://github.com/mozilla/build-mar/blob/d8c3b3469e55654d31f430cb343fd89392196c4e/src/mardor/utils.py#L270-L280 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.