code
stringlengths 66
870k
| docstring
stringlengths 19
26.7k
| func_name
stringlengths 1
138
| language
stringclasses 1
value | repo
stringlengths 7
68
| path
stringlengths 5
324
| url
stringlengths 46
389
| license
stringclasses 7
values |
---|---|---|---|---|---|---|---|
def decode_input(self, calldata: bytes) -> tuple[str, dict[str, Any]]:
"""
Decode the given calldata using this contract.
If the calldata has a method ID prefix, Ape will detect it and find
the corresponding method, else it will error.
Args:
calldata (bytes): The calldata to decode.
Returns:
tuple[str, dict[str, Any]]: A tuple containing the method selector
along a mapping of input names to their decoded values.
If an input does not have a number, it will have the stringified
index as its key.
"""
ecosystem = self.provider.network.ecosystem
if calldata in self.contract_type.mutable_methods:
method = self.contract_type.mutable_methods[calldata]
elif calldata in self.contract_type.view_methods:
method = self.contract_type.view_methods[calldata]
else:
method = None
if not method:
raise ContractDataError(
f"Unable to find method ABI from calldata '{to_hex(calldata)}'. "
"Try prepending the method ID to the beginning of the calldata."
)
method_id = ecosystem.get_method_selector(method)
cutoff = len(method_id)
rest_calldata = calldata[cutoff:]
input_dict = ecosystem.decode_calldata(method, rest_calldata)
return method.selector, input_dict
|
Decode the given calldata using this contract.
If the calldata has a method ID prefix, Ape will detect it and find
the corresponding method, else it will error.
Args:
calldata (bytes): The calldata to decode.
Returns:
tuple[str, dict[str, Any]]: A tuple containing the method selector
along a mapping of input names to their decoded values.
If an input does not have a number, it will have the stringified
index as its key.
|
decode_input
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def from_receipt(
cls, receipt: "ReceiptAPI", contract_type: "ContractType"
) -> "ContractInstance":
"""
Create a contract instance from the contract deployment receipt.
"""
address = receipt.contract_address
if not address:
raise ChainError(
"Receipt missing 'contract_address' field. "
"Was this from a deploy transaction (e.g. `project.MyContract.deploy()`)?"
)
instance = cls(
address=address,
contract_type=contract_type,
txn_hash=receipt.txn_hash,
)
# Cache creation.
creation = ContractCreation.from_receipt(receipt)
cls.chain_manager.contracts.contract_creations[address] = creation
return instance
|
Create a contract instance from the contract deployment receipt.
|
from_receipt
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def call_view_method(self, method_name: str, *args, **kwargs) -> Any:
"""
Call a contract's view function directly using the method_name.
This is helpful in the scenario where the contract has a
method name matching an attribute of the
:class:`~ape.api.address.BaseAddress` class, such as ``nonce``
or ``balance``
Args:
method_name (str): The contract method name to be called
*args: Contract method arguments.
**kwargs: Transaction values, such as ``value`` or ``sender``
Returns:
Any: Output of smart contract view call.
"""
if method_name in self._view_methods_:
view_handler = self._view_methods_[method_name]
output = view_handler(*args, **kwargs)
return output
elif method_name in self._mutable_methods_:
handler = self._mutable_methods_[method_name].call
output = handler(*args, **kwargs)
return output
else:
# Didn't find anything that matches
name = self.contract_type.name or "ContractType"
raise ApeAttributeError(f"'{name}' has no attribute '{method_name}'.")
|
Call a contract's view function directly using the method_name.
This is helpful in the scenario where the contract has a
method name matching an attribute of the
:class:`~ape.api.address.BaseAddress` class, such as ``nonce``
or ``balance``
Args:
method_name (str): The contract method name to be called
*args: Contract method arguments.
**kwargs: Transaction values, such as ``value`` or ``sender``
Returns:
Any: Output of smart contract view call.
|
call_view_method
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def invoke_transaction(self, method_name: str, *args, **kwargs) -> "ReceiptAPI":
"""
Call a contract's function directly using the method_name.
This function is for non-view function's which may change
contract state and will execute a transaction.
This is helpful in the scenario where the contract has a
method name matching an attribute of the
:class:`~ape.api.address.BaseAddress` class, such as ``nonce``
or ``balance``
Args:
method_name (str): The contract method name to be called
*args: Contract method arguments.
**kwargs: Transaction values, such as ``value`` or ``sender``
Returns:
:class:`~ape.api.transactions.ReceiptAPI`: Output of smart contract interaction.
"""
if method_name in self._view_methods_:
view_handler = self._view_methods_[method_name].transact
output = view_handler(*args, **kwargs)
return output
elif method_name in self._mutable_methods_:
handler = self._mutable_methods_[method_name]
output = handler(*args, **kwargs)
return output
else:
# Didn't find anything that matches
name = self.contract_type.name or "ContractType"
raise ApeAttributeError(f"'{name}' has no attribute '{method_name}'.")
|
Call a contract's function directly using the method_name.
This function is for non-view function's which may change
contract state and will execute a transaction.
This is helpful in the scenario where the contract has a
method name matching an attribute of the
:class:`~ape.api.address.BaseAddress` class, such as ``nonce``
or ``balance``
Args:
method_name (str): The contract method name to be called
*args: Contract method arguments.
**kwargs: Transaction values, such as ``value`` or ``sender``
Returns:
:class:`~ape.api.transactions.ReceiptAPI`: Output of smart contract interaction.
|
invoke_transaction
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def get_event_by_signature(self, signature: str) -> ContractEvent:
"""
Get an event by its signature. Most often, you can use the
:meth:`~ape.contracts.base.ContractInstance.__getattr__`
method on this class to access events. However, in the case
when you have more than one event with the same name, such
as the case where one event is coming from a base contract,
you can use this method to access the respective events.
Args:
signature (str): The signature of the event.
Returns:
:class:`~ape.contracts.base.ContractEvent`
"""
name_from_sig = signature.partition("(")[0].strip()
options = self._events_.get(name_from_sig.strip(), [])
err = ContractDataError(f"No event found with signature '{signature}'.")
if not options:
raise err
for evt in options:
if evt.abi.signature == signature:
return evt
raise err
|
Get an event by its signature. Most often, you can use the
:meth:`~ape.contracts.base.ContractInstance.__getattr__`
method on this class to access events. However, in the case
when you have more than one event with the same name, such
as the case where one event is coming from a base contract,
you can use this method to access the respective events.
Args:
signature (str): The signature of the event.
Returns:
:class:`~ape.contracts.base.ContractEvent`
|
get_event_by_signature
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def get_error_by_signature(self, signature: str) -> type[CustomError]:
"""
Get an error by its signature, similar to
:meth:`~ape.contracts.ContractInstance.get_event_by_signature`.
Args:
signature (str): The signature of the error.
Returns:
:class:`~ape.exceptions.CustomError`
"""
name_from_sig = signature.partition("(")[0].strip()
options = self._errors_.get(name_from_sig, [])
err = ContractDataError(f"No error found with signature '{signature}'.")
if not options:
raise err
for contract_err in options:
if contract_err.abi and contract_err.abi.signature == signature:
return contract_err
raise err
|
Get an error by its signature, similar to
:meth:`~ape.contracts.ContractInstance.get_event_by_signature`.
Args:
signature (str): The signature of the error.
Returns:
:class:`~ape.exceptions.CustomError`
|
get_error_by_signature
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def __dir__(self) -> list[str]:
"""
Display methods to IPython on ``c.[TAB]`` tab completion.
Returns:
list[str]
"""
# NOTE: Type ignores because of this issue: https://github.com/python/typing/issues/1112
# They can be removed after next `mypy` release containing fix.
values = [
"contract_type",
"txn_hash",
self.decode_input.__name__,
self.get_event_by_signature.__name__,
self.invoke_transaction.__name__,
self.call_view_method.__name__,
ContractInstance.creation_metadata.fget.__name__, # type: ignore[attr-defined]
]
return list(
set(self._base_dir_values).union(
self._view_methods_, self._mutable_methods_, self._events_, values
)
)
|
Display methods to IPython on ``c.[TAB]`` tab completion.
Returns:
list[str]
|
__dir__
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def __getattr__(self, attr_name: str) -> Any:
"""
Access a method, property, event, or error on the contract using ``.`` access.
Usage example::
result = contract.vote() # Implies a method named "vote" exists on the contract.
Args:
attr_name (str): The name of the method or property to access.
Returns:
Any: The return value from the contract call, or a transaction receipt.
"""
_assert_not_ipython_check(attr_name)
if attr_name in set(super(BaseAddress, self).__dir__()):
return super(BaseAddress, self).__getattribute__(attr_name)
elif attr_name not in {
*self._view_methods_,
*self._mutable_methods_,
*self._events_,
*self._errors_,
}:
# Didn't find anything that matches
# NOTE: `__getattr__` *must* raise `AttributeError`
name = self.contract_type.name or ContractInstance.__name__
raise ApeAttributeError(f"'{name}' has no attribute '{attr_name}'.")
elif (
int(attr_name in self._view_methods_)
+ int(attr_name in self._mutable_methods_)
+ int(attr_name in self._events_)
+ int(attr_name in self._errors_)
> 1
):
# ABI should not contain a mix of events, mutable and view methods that match
# NOTE: `__getattr__` *must* raise `AttributeError`
cls_name = getattr(type(self), "__name__", ContractInstance.__name__)
raise ApeAttributeError(f"{cls_name} has corrupted ABI.")
if attr_name in self._view_methods_:
return self._view_methods_[attr_name]
elif attr_name in self._mutable_methods_:
return self._mutable_methods_[attr_name]
elif attr_name in self._events_:
evt_options = self._events_[attr_name]
if len(evt_options) > 1:
return ContractEventWrapper(evt_options)
return evt_options[0]
elif attr_name in self._errors_:
err_options = self._errors_[attr_name]
if len(err_options) > 1:
raise ApeAttributeError(
f"Multiple errors named '{attr_name}' in '{self.contract_type.name}'.\n"
f"Use '{self.get_error_by_signature.__name__}' look-up."
)
return err_options[0]
else:
raise ApeAttributeError(
f"No attribute '{attr_name}' found in contract '{self.address}'."
)
|
Access a method, property, event, or error on the contract using ``.`` access.
Usage example::
result = contract.vote() # Implies a method named "vote" exists on the contract.
Args:
attr_name (str): The name of the method or property to access.
Returns:
Any: The return value from the contract call, or a transaction receipt.
|
__getattr__
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def at(
self,
address: "AddressType",
txn_hash: Optional[Union[str, HexBytes]] = None,
fetch_from_explorer: bool = True,
proxy_info: Optional["ProxyInfoAPI"] = None,
detect_proxy: bool = True,
) -> ContractInstance:
"""
Get a contract at the given address.
Usage example::
from ape import project
my_contract = project.MyContract.at("0xAbC1230001112223334445566611855443322111")
Args:
address (str): The address to initialize a contract.
**NOTE**: Things will not work as expected if the contract is not actually
deployed to this address or if the contract at the given address has
a different ABI than :attr:`~ape.contracts.ContractContainer.contract_type`.
txn_hash (Union[str, HexBytes]): The hash of the transaction that deployed the
contract, if available. Defaults to ``None``.
fetch_from_explorer (bool): Set to ``False`` to avoid fetching from an explorer.
proxy_info (:class:`~ape.api.networks.ProxyInfoAPI` | None): Proxy info object to set
to avoid detection; defaults to ``None`` which will detect it if ``detect_proxy=True``.
detect_proxy (bool): Set to ``False`` to avoid detecting missing proxy info.
Returns:
:class:`~ape.contracts.ContractInstance`
"""
return self.chain_manager.contracts.instance_at(
address,
contract_type=self.contract_type,
txn_hash=txn_hash,
fetch_from_explorer=fetch_from_explorer,
proxy_info=proxy_info,
detect_proxy=detect_proxy,
)
|
Get a contract at the given address.
Usage example::
from ape import project
my_contract = project.MyContract.at("0xAbC1230001112223334445566611855443322111")
Args:
address (str): The address to initialize a contract.
**NOTE**: Things will not work as expected if the contract is not actually
deployed to this address or if the contract at the given address has
a different ABI than :attr:`~ape.contracts.ContractContainer.contract_type`.
txn_hash (Union[str, HexBytes]): The hash of the transaction that deployed the
contract, if available. Defaults to ``None``.
fetch_from_explorer (bool): Set to ``False`` to avoid fetching from an explorer.
proxy_info (:class:`~ape.api.networks.ProxyInfoAPI` | None): Proxy info object to set
to avoid detection; defaults to ``None`` which will detect it if ``detect_proxy=True``.
detect_proxy (bool): Set to ``False`` to avoid detecting missing proxy info.
Returns:
:class:`~ape.contracts.ContractInstance`
|
at
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def deploy(self, *args, publish: bool = False, **kwargs) -> ContractInstance:
"""
Deploy a contract.
Args:
*args (Any): The contract's constructor arguments as Python types.
publish (bool): Whether to also perform contract-verification.
Defaults to ``False``.
Returns:
:class:`~ape.contracts.base.ContractInstance`
"""
bytecode = self.contract_type.deployment_bytecode
if not bytecode or bytecode.bytecode in (None, "", "0x"):
raise MissingDeploymentBytecodeError(self.contract_type)
txn = self(*args, **kwargs)
private = kwargs.get("private", False)
if kwargs.get("value") and not self.contract_type.constructor.is_payable:
raise MethodNonPayableError("Sending funds to a non-payable constructor.")
if "sender" in kwargs and hasattr(kwargs["sender"], "call"):
# Handle account-related preparation if needed, such as signing
receipt = self._cache_wrap(lambda: kwargs["sender"].call(txn, **kwargs))
else:
txn = self.provider.prepare_transaction(txn)
receipt = self._cache_wrap(
lambda: (
self.provider.send_private_transaction(txn)
if private
else self.provider.send_transaction(txn)
)
)
address = receipt.contract_address
if not address:
raise ChainError(f"'{receipt.txn_hash}' did not create a contract.")
styled_address = click.style(receipt.contract_address, bold=True)
contract_name = self.contract_type.name or "<Unnamed Contract>"
logger.success(f"Contract '{contract_name}' deployed to: {styled_address}")
instance = ContractInstance.from_receipt(receipt, self.contract_type)
self.chain_manager.contracts.cache_deployment(instance)
if publish:
self.local_project.deployments.track(instance)
self.provider.network.publish_contract(address)
instance.base_path = self.base_path or self.local_project.contracts_folder
return instance
|
Deploy a contract.
Args:
*args (Any): The contract's constructor arguments as Python types.
publish (bool): Whether to also perform contract-verification.
Defaults to ``False``.
Returns:
:class:`~ape.contracts.base.ContractInstance`
|
deploy
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def _cache_wrap(self, function: Callable) -> "ReceiptAPI":
"""
A helper method to ensure a contract type is cached as early on
as possible to help enrich errors from ``deploy()`` transactions
as well produce nicer tracebacks for these errors. It also helps
make assertions about these revert conditions in your tests.
"""
try:
return function()
except ContractLogicError as err:
if address := err.address:
self.chain_manager.contracts[address] = self.contract_type
err = err.with_ape_traceback() # Re-try setting source traceback
new_err = None
try:
# Try enrichment again now that the contract type is cached.
new_err = self.compiler_manager.enrich_error(err)
except Exception:
pass
if new_err:
raise new_err from err
raise # The error after caching.
|
A helper method to ensure a contract type is cached as early on
as possible to help enrich errors from ``deploy()`` transactions
as well produce nicer tracebacks for these errors. It also helps
make assertions about these revert conditions in your tests.
|
_cache_wrap
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def __getattr__(self, item: str) -> Union[ContractContainer, "ContractNamespace"]:
"""
Access the next contract container or namespace.
Args:
item (str): The name of the next node.
Returns:
Union[:class:`~ape.contracts.base.ContractContainer`,
:class:`~ape.contracts.base.ContractNamespace`]
"""
_assert_not_ipython_check(item)
def _get_name(cc: ContractContainer) -> str:
return cc.contract_type.name or ""
for contract in self.contracts:
search_contract_name = _get_name(contract)
search_name = (
search_contract_name.replace(f"{self.name}.", "") if search_contract_name else None
)
if not search_name:
continue
elif search_name == item:
return contract
elif "." in search_name:
next_node = search_name.partition(".")[0]
if next_node != item:
continue
subname = f"{self.name}.{next_node}"
subcontracts = [c for c in self.contracts if _get_name(c).startswith(subname)]
return ContractNamespace(subname, subcontracts)
return self.__getattribute__(item)
|
Access the next contract container or namespace.
Args:
item (str): The name of the next node.
Returns:
Union[:class:`~ape.contracts.base.ContractContainer`,
:class:`~ape.contracts.base.ContractNamespace`]
|
__getattr__
|
python
|
ApeWorX/ape
|
src/ape/contracts/base.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/contracts/base.py
|
Apache-2.0
|
def mnemonic(self, value: str):
"""
The seed phrase for generated test accounts.
**WARNING**: Changing the test-mnemonic mid-session
re-starts the provider (if connected to one).
"""
self.config_manager.test.mnemonic = value
self.containers["test"].mnemonic = value
if provider := self.network_manager.active_provider:
provider.update_settings({"mnemonic": value})
self._accounts_by_index = {}
|
The seed phrase for generated test accounts.
**WARNING**: Changing the test-mnemonic mid-session
re-starts the provider (if connected to one).
|
mnemonic
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def impersonate_account(self, address: AddressType) -> ImpersonatedAccount:
"""
Impersonate an account for testing purposes.
Args:
address (AddressType): The address to impersonate.
"""
try:
result = self.provider.unlock_account(address)
except NotImplementedError as err:
raise AccountsError(
f"Provider '{self.provider.name}' does not support impersonating accounts."
) from err
if result:
if address in self._impersonated_accounts:
return self._impersonated_accounts[address]
account = ImpersonatedAccount(raw_address=address)
self._impersonated_accounts[address] = account
return account
raise AccountsError(f"Unable to unlocked account '{address}'.")
|
Impersonate an account for testing purposes.
Args:
address (AddressType): The address to impersonate.
|
impersonate_account
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def stop_impersonating(self, address: AddressType):
"""
End the impersonating of an account, if it is being impersonated.
Args:
address (AddressType): The address to stop impersonating.
"""
if address in self._impersonated_accounts:
del self._impersonated_accounts[address]
try:
self.provider.relock_account(address)
except NotImplementedError:
pass
|
End the impersonating of an account, if it is being impersonated.
Args:
address (AddressType): The address to stop impersonating.
|
stop_impersonating
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def containers(self) -> dict[str, AccountContainerAPI]:
"""
A dict of all :class:`~ape.api.accounts.AccountContainerAPI` instances
across all installed plugins.
Returns:
dict[str, :class:`~ape.api.accounts.AccountContainerAPI`]
"""
containers = {}
data_folder = self.config_manager.DATA_FOLDER
data_folder.mkdir(exist_ok=True)
for plugin_name, (container_type, account_type) in self.plugin_manager.account_types:
# Ignore containers that contain test accounts.
if issubclass(account_type, TestAccountAPI):
continue
containers[plugin_name] = container_type(name=plugin_name, account_type=account_type)
return containers
|
A dict of all :class:`~ape.api.accounts.AccountContainerAPI` instances
across all installed plugins.
Returns:
dict[str, :class:`~ape.api.accounts.AccountContainerAPI`]
|
containers
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def aliases(self) -> Iterator[str]:
"""
All account aliases from every account-related plugin. The "alias"
is part of the :class:`~ape.api.accounts.AccountAPI`. Use the
account alias to load an account using method
:meth:`~ape.managers.accounts.AccountManager.load`.
Returns:
Iterator[str]
"""
for container in self.containers.values():
yield from container.aliases
|
All account aliases from every account-related plugin. The "alias"
is part of the :class:`~ape.api.accounts.AccountAPI`. Use the
account alias to load an account using method
:meth:`~ape.managers.accounts.AccountManager.load`.
Returns:
Iterator[str]
|
aliases
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def load(self, alias: str) -> AccountAPI:
"""
Get an account by its alias.
Raises:
KeyError: When there is no local account with the given alias.
Returns:
:class:`~ape.api.accounts.AccountAPI`
"""
if alias == "":
raise ValueError("Cannot use empty string as alias!")
elif alias in self._alias_to_account_cache:
return self._alias_to_account_cache[alias]
for account in self:
if account.alias and account.alias == alias:
self._alias_to_account_cache[alias] = account
return account
raise KeyError(f"No account with alias '{alias}'.")
|
Get an account by its alias.
Raises:
KeyError: When there is no local account with the given alias.
Returns:
:class:`~ape.api.accounts.AccountAPI`
|
load
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def __getitem_int(self, account_id: int) -> AccountAPI:
"""
Get an account by index. For example, when you do the CLI command
``ape accounts list --all``, you will see a list of enumerated accounts
by their indices. Use this method as a quicker, ad-hoc way to get an
account from that index.
**NOTE**: It is generally preferred to use
:meth:`~ape.managers.accounts.AccountManager.load` or
:meth:`~ape.managers.accounts.AccountManager.__getitem_str`.
Returns:
:class:`~ape.api.accounts.AccountAPI`
"""
if account_id < 0:
account_id = len(self) + account_id
for idx, account in enumerate(self):
if account_id == idx:
return account
raise IndexError(f"No account at index '{account_id}'.")
|
Get an account by index. For example, when you do the CLI command
``ape accounts list --all``, you will see a list of enumerated accounts
by their indices. Use this method as a quicker, ad-hoc way to get an
account from that index.
**NOTE**: It is generally preferred to use
:meth:`~ape.managers.accounts.AccountManager.load` or
:meth:`~ape.managers.accounts.AccountManager.__getitem_str`.
Returns:
:class:`~ape.api.accounts.AccountAPI`
|
__getitem_int
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def __getitem_slice(self, account_id: slice):
"""
Get list of accounts by slice. For example, when you do the CLI command
``ape accounts list --all``, you will see a list of enumerated accounts
by their indices. Use this method as a quicker, ad-hoc way to get an
accounts from a slice.
**NOTE**: It is generally preferred to use
:meth:`~ape.managers.accounts.AccountManager.load` or
:meth:`~ape.managers.accounts.AccountManager.__getitem_str`.
Returns:
list[:class:`~ape.api.accounts.AccountAPI`]
"""
start_idx = account_id.start or 0
if start_idx < 0:
start_idx += len(self)
stop_idx = account_id.stop or len(self)
if stop_idx < 0:
stop_idx += len(self)
step_size = account_id.step or 1
return [self[i] for i in range(start_idx, stop_idx, step_size)]
|
Get list of accounts by slice. For example, when you do the CLI command
``ape accounts list --all``, you will see a list of enumerated accounts
by their indices. Use this method as a quicker, ad-hoc way to get an
accounts from a slice.
**NOTE**: It is generally preferred to use
:meth:`~ape.managers.accounts.AccountManager.load` or
:meth:`~ape.managers.accounts.AccountManager.__getitem_str`.
Returns:
list[:class:`~ape.api.accounts.AccountAPI`]
|
__getitem_slice
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def __getitem_str(self, account_str: str) -> AccountAPI:
"""
Get an account by address. If we are using a provider that supports unlocking
accounts, this method will return an impersonated account at that address.
Raises:
KeyError: When there is no local account with the given address.
Returns:
:class:`~ape.api.accounts.AccountAPI`
"""
try:
account_id = self.conversion_manager.convert(account_str, AddressType)
except ConversionError as err:
prefix = f"No account with ID '{account_str}'"
if account_str.endswith(".eth"):
suffix = "Do you have `ape-ens` installed?"
else:
suffix = "Do you have the necessary conversion plugins installed?"
raise KeyError(f"{prefix}. {suffix}") from err
for container in self.containers.values():
if account_id in container.accounts:
return container[account_id]
# NOTE: Fallback to `TestAccountContainer`'s method for loading items
return self.test_accounts[account_id]
|
Get an account by address. If we are using a provider that supports unlocking
accounts, this method will return an impersonated account at that address.
Raises:
KeyError: When there is no local account with the given address.
Returns:
:class:`~ape.api.accounts.AccountAPI`
|
__getitem_str
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def __contains__(self, address: AddressType) -> bool:
"""
Determine if the given address matches an account in ``ape``.
Args:
address (:class:`~ape.types.address.AddressType`): The address to check.
Returns:
bool: ``True`` when the given address is found.
"""
return (
any(address in container for container in self.containers.values())
or address in self.test_accounts
)
|
Determine if the given address matches an account in ``ape``.
Args:
address (:class:`~ape.types.address.AddressType`): The address to check.
Returns:
bool: ``True`` when the given address is found.
|
__contains__
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def resolve_address(
self, account_id: Union["BaseAddress", AddressType, str, int, bytes]
) -> Optional[AddressType]:
"""
Resolve the given input to an address.
Args:
account_id (:class:~ape.api.address.BaseAddress, str, int, bytes): The input to resolve.
It handles anything that converts to an AddressType like an ENS or a BaseAddress.
It also handles account aliases Ape is aware of, or int or bytes address values.
Returns:
:class:`~ape.types.AddressType` | None
"""
if isinstance(account_id, str) and account_id.startswith("0x"):
# Was given a hex-address string.
if provider := self.network_manager.active_provider:
return provider.network.ecosystem.decode_address(account_id)
else:
# Assume Ethereum-like.
return self.network_manager.ether.decode_address(account_id)
elif not isinstance(account_id, str):
# Was given either an integer, bytes, or a BaseAddress (account or contract).
return self.conversion_manager.convert(account_id, AddressType)
elif isinstance(account_id, str) and account_id in self.aliases:
# Was given an account alias.
account = self.load(account_id)
return account.address
elif (
isinstance(account_id, str)
and account_id.startswith("TEST::")
and account_id[-1].isdigit()
):
# Test account "alias".
account_idx = int(account_id[-1])
return self.test_accounts[account_idx]
elif isinstance(account_id, str) and not is_hex(account_id):
# Was maybe given an ENS name.
try:
return self.conversion_manager.convert(account_id, AddressType)
except ConversionError:
return None
return None
|
Resolve the given input to an address.
Args:
account_id (:class:~ape.api.address.BaseAddress, str, int, bytes): The input to resolve.
It handles anything that converts to an AddressType like an ENS or a BaseAddress.
It also handles account aliases Ape is aware of, or int or bytes address values.
Returns:
:class:`~ape.types.AddressType` | None
|
resolve_address
|
python
|
ApeWorX/ape
|
src/ape/managers/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/accounts.py
|
Apache-2.0
|
def __getitem__(self, block_number: int) -> BlockAPI:
"""
Get a block by number. Negative numbers start at the chain head and
move backwards. For example, ``-1`` would be the latest block and
``-2`` would be the block prior to that one, and so on.
Args:
block_number (int): The number of the block to get.
Returns:
:class:`~ape.api.providers.BlockAPI`
"""
if block_number < 0:
block_number = len(self) + block_number
return self.provider.get_block(block_number)
|
Get a block by number. Negative numbers start at the chain head and
move backwards. For example, ``-1`` would be the latest block and
``-2`` would be the block prior to that one, and so on.
Args:
block_number (int): The number of the block to get.
Returns:
:class:`~ape.api.providers.BlockAPI`
|
__getitem__
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def query(
self,
*columns: str,
start_block: int = 0,
stop_block: Optional[int] = None,
step: int = 1,
engine_to_use: Optional[str] = None,
) -> pd.DataFrame:
"""
A method for querying blocks and returning an Iterator. If you
do not provide a starting block, the 0 block is assumed. If you do not
provide a stopping block, the last block is assumed. You can pass
``engine_to_use`` to short-circuit engine selection.
Raises:
:class:`~ape.exceptions.ChainError`: When ``stop_block`` is greater
than the chain length.
Args:
*columns (str): columns in the DataFrame to return
start_block (int): The first block, by number, to include in the
query. Defaults to 0.
stop_block (Optional[int]): The last block, by number, to include
in the query. Defaults to the latest block.
step (int): The number of blocks to iterate between block numbers.
Defaults to ``1``.
engine_to_use (Optional[str]): query engine to use, bypasses query
engine selection algorithm.
Returns:
pd.DataFrame
"""
if start_block < 0:
start_block = len(self) + start_block
if stop_block is None:
stop_block = self.height
elif stop_block < 0:
stop_block = len(self) + stop_block
elif stop_block > len(self):
raise ChainError(
f"'stop={stop_block}' cannot be greater than the chain length ({self.height})."
)
query = BlockQuery(
columns=list(columns),
start_block=start_block,
stop_block=stop_block,
step=step,
)
blocks = self.query_manager.query(query, engine_to_use=engine_to_use)
columns: list[str] = validate_and_expand_columns( # type: ignore
columns, self.head.__class__
)
extraction = partial(extract_fields, columns=columns)
data = map(lambda b: extraction(b), blocks)
return pd.DataFrame(columns=columns, data=data)
|
A method for querying blocks and returning an Iterator. If you
do not provide a starting block, the 0 block is assumed. If you do not
provide a stopping block, the last block is assumed. You can pass
``engine_to_use`` to short-circuit engine selection.
Raises:
:class:`~ape.exceptions.ChainError`: When ``stop_block`` is greater
than the chain length.
Args:
*columns (str): columns in the DataFrame to return
start_block (int): The first block, by number, to include in the
query. Defaults to 0.
stop_block (Optional[int]): The last block, by number, to include
in the query. Defaults to the latest block.
step (int): The number of blocks to iterate between block numbers.
Defaults to ``1``.
engine_to_use (Optional[str]): query engine to use, bypasses query
engine selection algorithm.
Returns:
pd.DataFrame
|
query
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def range(
self,
start_or_stop: int,
stop: Optional[int] = None,
step: int = 1,
engine_to_use: Optional[str] = None,
) -> Iterator[BlockAPI]:
"""
Iterate over blocks. Works similarly to python ``range()``.
Raises:
:class:`~ape.exceptions.ChainError`: When ``stop`` is greater
than the chain length.
:class:`~ape.exceptions.ChainError`: When ``stop`` is less
than ``start_block``.
:class:`~ape.exceptions.ChainError`: When ``stop`` is less
than 0.
:class:`~ape.exceptions.ChainError`: When ``start`` is less
than 0.
Args:
start_or_stop (int): When given just a single value, it is the stop.
Otherwise, it is the start. This mimics the behavior of ``range``
built-in Python function.
stop (Optional[int]): The block number to stop before. Also the total
number of blocks to get. If not setting a start value, is set by
the first argument.
step (Optional[int]): The value to increment by. Defaults to ``1``.
number of blocks to get. Defaults to the latest block.
engine_to_use (Optional[str]): query engine to use, bypasses query
engine selection algorithm.
Returns:
Iterator[:class:`~ape.api.providers.BlockAPI`]
"""
if stop is None:
stop = start_or_stop
start = 0
else:
start = start_or_stop
if stop > len(self):
raise ChainError(
f"'stop={stop}' cannot be greater than the chain length ({len(self)}). "
f"Use '{self.poll_blocks.__name__}()' to wait for future blocks."
)
# Note: the range `stop_block` is a non-inclusive stop, while the
# `.query` method uses an inclusive stop, so we must adjust downwards.
query = BlockQuery(
columns=list(self.head.model_fields), # TODO: fetch the block fields from EcosystemAPI
start_block=start,
stop_block=stop - 1,
step=step,
)
blocks = self.query_manager.query(query, engine_to_use=engine_to_use)
yield from cast(Iterator[BlockAPI], blocks)
|
Iterate over blocks. Works similarly to python ``range()``.
Raises:
:class:`~ape.exceptions.ChainError`: When ``stop`` is greater
than the chain length.
:class:`~ape.exceptions.ChainError`: When ``stop`` is less
than ``start_block``.
:class:`~ape.exceptions.ChainError`: When ``stop`` is less
than 0.
:class:`~ape.exceptions.ChainError`: When ``start`` is less
than 0.
Args:
start_or_stop (int): When given just a single value, it is the stop.
Otherwise, it is the start. This mimics the behavior of ``range``
built-in Python function.
stop (Optional[int]): The block number to stop before. Also the total
number of blocks to get. If not setting a start value, is set by
the first argument.
step (Optional[int]): The value to increment by. Defaults to ``1``.
number of blocks to get. Defaults to the latest block.
engine_to_use (Optional[str]): query engine to use, bypasses query
engine selection algorithm.
Returns:
Iterator[:class:`~ape.api.providers.BlockAPI`]
|
range
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def poll_blocks(
self,
start_block: Optional[int] = None,
stop_block: Optional[int] = None,
required_confirmations: Optional[int] = None,
new_block_timeout: Optional[int] = None,
) -> Iterator[BlockAPI]:
"""
Poll new blocks. Optionally set a start block to include historical blocks.
**NOTE**: When a chain reorganization occurs, this method logs an error and
yields the missed blocks, even if they were previously yielded with different
block numbers.
**NOTE**: This is a daemon method; it does not terminate unless an exception occurs
or a ``stop_block`` is given.
Usage example::
from ape import chain
for new_block in chain.blocks.poll_blocks():
print(f"New block found: number={new_block.number}")
Args:
start_block (Optional[int]): The block number to start with. Defaults to the pending
block number.
stop_block (Optional[int]): Optionally set a future block number to stop at.
Defaults to never-ending.
required_confirmations (Optional[int]): The amount of confirmations to wait
before yielding the block. The more confirmations, the less likely a reorg will occur.
Defaults to the network's configured required confirmations.
new_block_timeout (Optional[float]): The amount of time to wait for a new block before
timing out. Defaults to 10 seconds for local networks or ``50 * block_time`` for live
networks.
Returns:
Iterator[:class:`~ape.api.providers.BlockAPI`]
"""
if required_confirmations is None:
required_confirmations = self.network_confirmations
if stop_block is not None and stop_block <= self.chain_manager.blocks.height:
raise ValueError("'stop' argument must be in the future.")
# Get number of last block with the necessary amount of confirmations.
block = None
head_minus_confirms = self.height - required_confirmations
if start_block is not None and start_block <= head_minus_confirms:
# Front-load historical blocks.
for block in self.range(start_block, head_minus_confirms + 1):
yield block
yield from self.provider.poll_blocks(
stop_block=stop_block,
required_confirmations=required_confirmations,
new_block_timeout=new_block_timeout,
)
|
Poll new blocks. Optionally set a start block to include historical blocks.
**NOTE**: When a chain reorganization occurs, this method logs an error and
yields the missed blocks, even if they were previously yielded with different
block numbers.
**NOTE**: This is a daemon method; it does not terminate unless an exception occurs
or a ``stop_block`` is given.
Usage example::
from ape import chain
for new_block in chain.blocks.poll_blocks():
print(f"New block found: number={new_block.number}")
Args:
start_block (Optional[int]): The block number to start with. Defaults to the pending
block number.
stop_block (Optional[int]): Optionally set a future block number to stop at.
Defaults to never-ending.
required_confirmations (Optional[int]): The amount of confirmations to wait
before yielding the block. The more confirmations, the less likely a reorg will occur.
Defaults to the network's configured required confirmations.
new_block_timeout (Optional[float]): The amount of time to wait for a new block before
timing out. Defaults to 10 seconds for local networks or ``50 * block_time`` for live
networks.
Returns:
Iterator[:class:`~ape.api.providers.BlockAPI`]
|
poll_blocks
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def outgoing(self) -> Iterator[ReceiptAPI]:
"""
All outgoing transactions, from earliest to latest.
"""
start_nonce = 0
stop_nonce = len(self) - 1 # just to cache this value
# TODO: Add ephemeral network sessional history to `ape-cache` instead,
# and remove this (replace with `yield from iter(self[:len(self)])`)
for receipt in self.sessional:
if receipt.nonce is None:
# Not an on-chain receipt? idk - has only seen as anomaly in tests.
continue
elif receipt.nonce < start_nonce:
raise QueryEngineError("Sessional history corrupted")
elif receipt.nonce > start_nonce:
# NOTE: There's a gap in our sessional history, so fetch from query engine
yield from iter(self[start_nonce : receipt.nonce + 1])
yield receipt
start_nonce = receipt.nonce + 1 # start next loop on the next item
if start_nonce != stop_nonce:
# NOTE: there is no more session history, so just return query engine iterator
yield from iter(self[start_nonce : stop_nonce + 1])
|
All outgoing transactions, from earliest to latest.
|
outgoing
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def query(
self,
*columns: str,
start_nonce: int = 0,
stop_nonce: Optional[int] = None,
engine_to_use: Optional[str] = None,
) -> pd.DataFrame:
"""
A method for querying transactions made by an account and returning an Iterator.
If you do not provide a starting nonce, the first transaction is assumed.
If you do not provide a stopping block, the last transaction is assumed.
You can pass ``engine_to_use`` to short-circuit engine selection.
Raises:
:class:`~ape.exceptions.ChainError`: When ``stop_nonce`` is greater
than the account's current nonce.
Args:
*columns (str): columns in the DataFrame to return
start_nonce (int): The first transaction, by nonce, to include in the
query. Defaults to 0.
stop_nonce (Optional[int]): The last transaction, by nonce, to include
in the query. Defaults to the latest transaction.
engine_to_use (Optional[str]): query engine to use, bypasses query
engine selection algorithm.
Returns:
pd.DataFrame
"""
if start_nonce < 0:
start_nonce = len(self) + start_nonce
if stop_nonce is None:
stop_nonce = len(self)
elif stop_nonce < 0:
stop_nonce = len(self) + stop_nonce
elif stop_nonce > len(self):
raise ChainError(
f"'stop={stop_nonce}' cannot be greater than account's current nonce ({len(self)})."
)
query = AccountTransactionQuery(
columns=list(columns),
account=self.address,
start_nonce=start_nonce,
stop_nonce=stop_nonce,
)
txns = self.query_manager.query(query, engine_to_use=engine_to_use)
columns = validate_and_expand_columns(columns, ReceiptAPI) # type: ignore
extraction = partial(extract_fields, columns=columns)
data = map(lambda tx: extraction(tx), txns)
return pd.DataFrame(columns=columns, data=data)
|
A method for querying transactions made by an account and returning an Iterator.
If you do not provide a starting nonce, the first transaction is assumed.
If you do not provide a stopping block, the last transaction is assumed.
You can pass ``engine_to_use`` to short-circuit engine selection.
Raises:
:class:`~ape.exceptions.ChainError`: When ``stop_nonce`` is greater
than the account's current nonce.
Args:
*columns (str): columns in the DataFrame to return
start_nonce (int): The first transaction, by nonce, to include in the
query. Defaults to 0.
stop_nonce (Optional[int]): The last transaction, by nonce, to include
in the query. Defaults to the latest transaction.
engine_to_use (Optional[str]): query engine to use, bypasses query
engine selection algorithm.
Returns:
pd.DataFrame
|
query
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def __getitem_str(self, account_or_hash: str) -> Union[AccountHistory, ReceiptAPI]:
"""
Get a receipt from the history by its transaction hash.
If the receipt is not currently cached, will use the provider
to retrieve it.
Args:
account_or_hash (str): The hash of the transaction.
Returns:
:class:`~ape.api.transactions.ReceiptAPI`: The receipt.
"""
def _get_receipt() -> Optional[ReceiptAPI]:
try:
return self._get_receipt(account_or_hash)
except Exception:
return None
is_account = False
if not account_or_hash.startswith("0x"):
# Attempt converting.
try:
account_or_hash = self.conversion_manager.convert(account_or_hash, AddressType)
except Exception:
# Pretend this never happened.
pass
else:
is_account = True
try:
address = self.provider.network.ecosystem.decode_address(account_or_hash)
history = self._get_account_history(address)
if len(history) > 0:
return history
except Exception as err:
msg = f"'{account_or_hash}' is not a known address or transaction hash."
if is_account:
raise ChainError(msg) from err
# Try to treat as transaction hash.
elif receipt := _get_receipt():
return receipt
# Not an account or tx hash (with success).
raise ChainError(msg) from err
# No account history found. Check for transaction hash.
if receipt := _get_receipt():
return receipt
# Nothing found. Return empty history
return history
|
Get a receipt from the history by its transaction hash.
If the receipt is not currently cached, will use the provider
to retrieve it.
Args:
account_or_hash (str): The hash of the transaction.
Returns:
:class:`~ape.api.transactions.ReceiptAPI`: The receipt.
|
__getitem_str
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def append(self, txn_receipt: ReceiptAPI):
"""
Add a transaction to the cache This is useful for sessional-transactions.
Raises:
:class:`~ape.exceptions.ChainError`: When trying to append a transaction
receipt that is already in the list.
Args:
txn_receipt (:class:`~ape.api.transactions.ReceiptAPI`): The transaction receipt.
"""
self._hash_to_receipt_map[txn_receipt.txn_hash] = txn_receipt
key = txn_receipt.sender or ZERO_ADDRESS
address = self.conversion_manager.convert(key, AddressType)
if address not in self._account_history_cache:
self._account_history_cache[address] = AccountHistory(address=address)
self._account_history_cache[address].append(txn_receipt)
|
Add a transaction to the cache This is useful for sessional-transactions.
Raises:
:class:`~ape.exceptions.ChainError`: When trying to append a transaction
receipt that is already in the list.
Args:
txn_receipt (:class:`~ape.api.transactions.ReceiptAPI`): The transaction receipt.
|
append
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def revert_to_block(self, block_number: int):
"""
Remove all receipts past the given block number.
Args:
block_number (int): The block number to revert to.
"""
self._hash_to_receipt_map = {
h: r for h, r in self._hash_to_receipt_map.items() if r.block_number <= block_number
}
for account_history in self._account_history_cache.values():
account_history.revert_to_block(block_number)
|
Remove all receipts past the given block number.
Args:
block_number (int): The block number to revert to.
|
revert_to_block
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def blocks(self) -> BlockContainer:
"""
The list of blocks on the chain.
"""
if self.chain_id not in self._block_container_map:
blocks = BlockContainer()
self._block_container_map[self.chain_id] = blocks
return self._block_container_map[self.chain_id]
|
The list of blocks on the chain.
|
blocks
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def history(self) -> TransactionHistory:
"""
A mapping of transactions from the active session to the account responsible.
"""
try:
chain_id = self.chain_id
except ProviderNotConnectedError:
return TransactionHistory() # Empty list.
if chain_id not in self._transaction_history_map:
history = TransactionHistory()
self._transaction_history_map[chain_id] = history
return self._transaction_history_map[chain_id]
|
A mapping of transactions from the active session to the account responsible.
|
history
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def chain_id(self) -> int:
"""
The blockchain ID.
See `ChainList <https://chainlist.org/>`__ for a comprehensive list of IDs.
"""
network_name = self.provider.network.name
if network_name not in self._chain_id_map:
self._chain_id_map[network_name] = self.provider.chain_id
return self._chain_id_map[network_name]
|
The blockchain ID.
See `ChainList <https://chainlist.org/>`__ for a comprehensive list of IDs.
|
chain_id
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def snapshot(self) -> "SnapshotID":
"""
Record the current state of the blockchain with intent to later
call the method :meth:`~ape.managers.chain.ChainManager.revert`
to go back to this point. This method is for local networks only.
Raises:
NotImplementedError: When the active provider does not support
snapshotting.
Returns:
:class:`~ape.types.SnapshotID`: The snapshot ID.
"""
chain_id = self.provider.chain_id
snapshot_id = self.provider.snapshot()
if snapshot_id not in self._snapshots[chain_id]:
self._snapshots[chain_id].append(snapshot_id)
return snapshot_id
|
Record the current state of the blockchain with intent to later
call the method :meth:`~ape.managers.chain.ChainManager.revert`
to go back to this point. This method is for local networks only.
Raises:
NotImplementedError: When the active provider does not support
snapshotting.
Returns:
:class:`~ape.types.SnapshotID`: The snapshot ID.
|
snapshot
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def restore(self, snapshot_id: Optional["SnapshotID"] = None):
"""
Regress the current call using the given snapshot ID.
Allows developers to go back to a previous state.
Raises:
NotImplementedError: When the active provider does not support
snapshotting.
:class:`~ape.exceptions.UnknownSnapshotError`: When the snapshot ID is not cached.
:class:`~ape.exceptions.ChainError`: When there are no snapshot IDs to select from.
Args:
snapshot_id (Optional[:class:`~ape.types.SnapshotID`]): The snapshot ID. Defaults
to the most recent snapshot ID.
"""
chain_id = self.provider.chain_id
if snapshot_id is None and not self._snapshots[chain_id]:
raise ChainError("There are no snapshots to revert to.")
elif snapshot_id is None:
snapshot_id = self._snapshots[chain_id].pop()
elif snapshot_id not in self._snapshots[chain_id]:
raise UnknownSnapshotError(snapshot_id)
else:
snapshot_index = self._snapshots[chain_id].index(snapshot_id)
self._snapshots[chain_id] = self._snapshots[chain_id][:snapshot_index]
self.provider.restore(snapshot_id)
self.history.revert_to_block(self.blocks.height)
|
Regress the current call using the given snapshot ID.
Allows developers to go back to a previous state.
Raises:
NotImplementedError: When the active provider does not support
snapshotting.
:class:`~ape.exceptions.UnknownSnapshotError`: When the snapshot ID is not cached.
:class:`~ape.exceptions.ChainError`: When there are no snapshot IDs to select from.
Args:
snapshot_id (Optional[:class:`~ape.types.SnapshotID`]): The snapshot ID. Defaults
to the most recent snapshot ID.
|
restore
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def isolate(self):
"""
Run code in an isolated context.
Requires using a local provider that supports snapshotting.
Usages example::
owner = accounts[0]
with chain.isolate():
contract = owner.deploy(project.MyContract)
receipt = contract.fooBar(sender=owner)
"""
snapshot = None
try:
snapshot = self.snapshot()
except APINotImplementedError:
logger.warning("Provider does not support snapshotting.")
pending = self.pending_timestamp
start_ecosystem_name = self.provider.network.ecosystem.name
start_network_name = self.provider.network.name
start_provider_name = self.provider.name
try:
yield
finally:
if snapshot is None:
logger.error("Failed to create snapshot.")
return
end_ecosystem_name = self.provider.network.ecosystem.name
end_network_name = self.provider.network.name
end_provider_name = self.provider.name
if (
start_ecosystem_name != end_ecosystem_name
or start_network_name != end_network_name
or start_provider_name != end_provider_name
):
logger.warning("Provider changed before isolation completed.")
return
self.chain_manager.restore(snapshot)
try:
self.pending_timestamp = pending
except APINotImplementedError:
# Provider does not support time travel.
pass
|
Run code in an isolated context.
Requires using a local provider that supports snapshotting.
Usages example::
owner = accounts[0]
with chain.isolate():
contract = owner.deploy(project.MyContract)
receipt = contract.fooBar(sender=owner)
|
isolate
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def mine(
self,
num_blocks: int = 1,
timestamp: Optional[int] = None,
deltatime: Optional[int] = None,
) -> None:
"""
Mine any given number of blocks.
Raises:
ValueError: When a timestamp AND a deltatime argument are both passed
Args:
num_blocks (int): Choose the number of blocks to mine.
Defaults to 1 block.
timestamp (Optional[int]): Designate a time (in seconds) to begin mining.
Defaults to None.
deltatime (Optional[int]): Designate a change in time (in seconds) to begin mining.
Defaults to None.
"""
if timestamp and deltatime:
raise ValueError("Cannot give both `timestamp` and `deltatime` arguments together.")
if timestamp:
self.pending_timestamp = timestamp
elif deltatime:
self.pending_timestamp += deltatime
self.provider.mine(num_blocks)
|
Mine any given number of blocks.
Raises:
ValueError: When a timestamp AND a deltatime argument are both passed
Args:
num_blocks (int): Choose the number of blocks to mine.
Defaults to 1 block.
timestamp (Optional[int]): Designate a time (in seconds) to begin mining.
Defaults to None.
deltatime (Optional[int]): Designate a change in time (in seconds) to begin mining.
Defaults to None.
|
mine
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def get_balance(
self, address: Union[BaseAddress, AddressType, str], block_id: Optional["BlockID"] = None
) -> int:
"""
Get the balance of the given address. If ``ape-ens`` is installed,
you can pass ENS names.
Args:
address (BaseAddress, AddressType | str): An address, ENS, or account/contract.
block_id (:class:`~ape.types.BlockID` | None): The block ID. Defaults to latest.
Returns:
int: The account balance.
"""
if (isinstance(address, str) and not address.startswith("0x")) or not isinstance(
address, str
):
# Handles accounts, ENS, integers, aliases, everything.
address = self.account_manager.resolve_address(address)
return self.provider.get_balance(address, block_id=block_id)
|
Get the balance of the given address. If ``ape-ens`` is installed,
you can pass ENS names.
Args:
address (BaseAddress, AddressType | str): An address, ENS, or account/contract.
block_id (:class:`~ape.types.BlockID` | None): The block ID. Defaults to latest.
Returns:
int: The account balance.
|
get_balance
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def set_balance(self, account: Union[BaseAddress, AddressType, str], amount: Union[int, str]):
"""
Set an account balance, only works on development chains.
Args:
account (BaseAddress, AddressType | str): The account.
amount (int | str): The new balance.
"""
if isinstance(account, BaseAddress):
account = account.address
if isinstance(amount, str) and len(str(amount).split(" ")) > 1:
# Support values like "1000 ETH".
amount = self.conversion_manager.convert(amount, int)
elif isinstance(amount, str):
# Support hex strings
amount = int(amount, 16)
return self.provider.set_balance(account, amount)
|
Set an account balance, only works on development chains.
Args:
account (BaseAddress, AddressType | str): The account.
amount (int | str): The new balance.
|
set_balance
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def get_receipt(self, transaction_hash: str) -> ReceiptAPI:
"""
Get a transaction receipt from the chain.
Args:
transaction_hash (str): The hash of the transaction.
Returns:
:class:`~ape.apt.transactions.ReceiptAPI`
"""
receipt = self.chain_manager.history[transaction_hash]
if not isinstance(receipt, ReceiptAPI):
raise TransactionNotFoundError(transaction_hash=transaction_hash)
return receipt
|
Get a transaction receipt from the chain.
Args:
transaction_hash (str): The hash of the transaction.
Returns:
:class:`~ape.apt.transactions.ReceiptAPI`
|
get_receipt
|
python
|
ApeWorX/ape
|
src/ape/managers/chain.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/chain.py
|
Apache-2.0
|
def registered_compilers(self) -> dict[str, "CompilerAPI"]:
"""
Each compile-able file extension mapped to its respective
:class:`~ape.api.compiler.CompilerAPI` instance.
Returns:
dict[str, :class:`~ape.api.compiler.CompilerAPI`]: The mapping of file-extensions
to compiler API classes.
"""
registered_compilers = {}
for plugin_name, (extensions, compiler_class) in self.plugin_manager.register_compiler:
self.config_manager.get_config(plugin_name)
compiler = compiler_class()
for extension in extensions:
if extension not in registered_compilers:
registered_compilers[extension] = compiler
return registered_compilers
|
Each compile-able file extension mapped to its respective
:class:`~ape.api.compiler.CompilerAPI` instance.
Returns:
dict[str, :class:`~ape.api.compiler.CompilerAPI`]: The mapping of file-extensions
to compiler API classes.
|
registered_compilers
|
python
|
ApeWorX/ape
|
src/ape/managers/compilers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/compilers.py
|
Apache-2.0
|
def compile(
self,
contract_filepaths: Union[Path, str, Iterable[Union[Path, str]]],
project: Optional["ProjectManager"] = None,
settings: Optional[dict] = None,
excluded_compilers: Optional[list[str]] = None,
) -> Iterator["ContractType"]:
"""
Invoke :meth:`ape.ape.compiler.CompilerAPI.compile` for each of the given files.
For example, use the `ape-solidity plugin <https://github.com/ApeWorX/ape-solidity>`__
to compile ``'.sol'`` files.
Raises:
:class:`~ape.exceptions.CompilerError`: When there is no compiler found for the given
file-extension as well as when there are contract-type collisions across compilers.
Args:
contract_filepaths (Union[Path, str, Iterable[Union[Path, str]]]): The files to
compile, as ``pathlib.Path`` objects or path-strs.
project (Optional[:class:`~ape.managers.project.ProjectManager`]): Optionally
compile a different project that the one from the current-working directory.
settings (Optional[Dict]): Adhoc compiler settings. Defaults to None.
Ensure the compiler name key is present in the dict for it to work.
Returns:
Iterator[``ContractType``]: An iterator of contract types.
"""
pm = project or self.local_project
files_by_ext = defaultdict(list)
if isinstance(contract_filepaths, (str, Path)):
contract_filepaths = (contract_filepaths,)
for path in map(Path, contract_filepaths):
suffix = get_full_extension(path)
if suffix in self.registered_compilers:
files_by_ext[suffix].append(path)
errors = []
tracker: dict[str, str] = {}
settings = settings or {}
for next_ext, path_set in files_by_ext.items():
compiler = self.registered_compilers[next_ext]
if excluded_compilers and compiler.name.lower() in excluded_compilers:
continue
try:
compiler_settings = settings.get(compiler.name, {})
for contract in compiler.compile(path_set, project=pm, settings=compiler_settings):
if contract.name in tracker:
raise CompilerError(
f"ContractType collision. "
f"Contracts '{tracker[contract.name]}' and '{contract.source_id}' "
f"share the name '{contract.name}'."
)
if contract.name and contract.source_id:
tracker[contract.name] = contract.source_id
yield contract
except CompilerError as err:
# One of the compilers failed. Show the error but carry on.
logger.log_debug_stack_trace()
errors.append(err)
continue
if len(errors) == 1:
# If only 1 error, just raise that.
raise errors[0]
elif len(errors) > 1:
# Raise a combined error.
formatted_errors = [f"{e}" for e in errors]
error_message = "\n\n".join(formatted_errors)
raise CompilerError(error_message)
# else: successfully compiled everything!
|
Invoke :meth:`ape.ape.compiler.CompilerAPI.compile` for each of the given files.
For example, use the `ape-solidity plugin <https://github.com/ApeWorX/ape-solidity>`__
to compile ``'.sol'`` files.
Raises:
:class:`~ape.exceptions.CompilerError`: When there is no compiler found for the given
file-extension as well as when there are contract-type collisions across compilers.
Args:
contract_filepaths (Union[Path, str, Iterable[Union[Path, str]]]): The files to
compile, as ``pathlib.Path`` objects or path-strs.
project (Optional[:class:`~ape.managers.project.ProjectManager`]): Optionally
compile a different project that the one from the current-working directory.
settings (Optional[Dict]): Adhoc compiler settings. Defaults to None.
Ensure the compiler name key is present in the dict for it to work.
Returns:
Iterator[``ContractType``]: An iterator of contract types.
|
compile
|
python
|
ApeWorX/ape
|
src/ape/managers/compilers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/compilers.py
|
Apache-2.0
|
def compile_source(
self,
compiler_name: str,
code: str,
project: Optional["ProjectManager"] = None,
settings: Optional[dict] = None,
**kwargs,
) -> ContractContainer:
"""
Compile the given program.
Usage example::
code = '[{"name":"foo","type":"fallback", "stateMutability":"nonpayable"}]'
contract_container = compilers.compile_source(
"ethpm",
code,
contractName="MyContract",
)
Args:
compiler_name (str): The name of the compiler to use.
code (str): The source code to compile.
project (Optional[:class:`~ape.managers.project.ProjectManager`]): Optionally
compile a different project that the one from the current-working directory.
settings (Optional[dict]): Compiler settings.
**kwargs (Any): Additional overrides for the ``ethpm_types.ContractType`` model.
Returns:
``ContractContainer``: A contract container ready to be deployed.
"""
compiler = self.get_compiler(compiler_name, settings=settings)
if not compiler:
raise ValueError(f"Compiler '{compiler_name}' not found.")
contract_type = compiler.compile_code(code, project=project, **kwargs)
return ContractContainer(contract_type=contract_type)
|
Compile the given program.
Usage example::
code = '[{"name":"foo","type":"fallback", "stateMutability":"nonpayable"}]'
contract_container = compilers.compile_source(
"ethpm",
code,
contractName="MyContract",
)
Args:
compiler_name (str): The name of the compiler to use.
code (str): The source code to compile.
project (Optional[:class:`~ape.managers.project.ProjectManager`]): Optionally
compile a different project that the one from the current-working directory.
settings (Optional[dict]): Compiler settings.
**kwargs (Any): Additional overrides for the ``ethpm_types.ContractType`` model.
Returns:
``ContractContainer``: A contract container ready to be deployed.
|
compile_source
|
python
|
ApeWorX/ape
|
src/ape/managers/compilers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/compilers.py
|
Apache-2.0
|
def get_imports(
self,
contract_filepaths: Sequence[Path],
project: Optional["ProjectManager"] = None,
) -> dict[str, list[str]]:
"""
Combine import dicts from all compilers, where the key is a contract's source_id
and the value is a list of import source_ids.
Args:
contract_filepaths (Sequence[pathlib.Path]): A list of source file paths to compile.
project (Optional[:class:`~ape.managers.project.ProjectManager`]): Optionally provide
the project.
Returns:
dict[str, list[str]]: A dictionary like ``{source_id: [import_source_id, ...], ...}``
"""
imports_dict: dict[str, list[str]] = {}
for ext, compiler in self.registered_compilers.items():
try:
sources = [
p for p in contract_filepaths if get_full_extension(p) == ext and p.is_file()
]
imports = compiler.get_imports(contract_filepaths=sources, project=project)
except NotImplementedError:
imports = None
if imports:
imports_dict.update(imports)
return imports_dict
|
Combine import dicts from all compilers, where the key is a contract's source_id
and the value is a list of import source_ids.
Args:
contract_filepaths (Sequence[pathlib.Path]): A list of source file paths to compile.
project (Optional[:class:`~ape.managers.project.ProjectManager`]): Optionally provide
the project.
Returns:
dict[str, list[str]]: A dictionary like ``{source_id: [import_source_id, ...], ...}``
|
get_imports
|
python
|
ApeWorX/ape
|
src/ape/managers/compilers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/compilers.py
|
Apache-2.0
|
def get_references(self, imports_dict: dict[str, list[str]]) -> dict[str, list[str]]:
"""
Provide a mapping containing all referenced source_ids for a given project.
Each entry contains a source_id as a key and list of source_ids that reference a
given contract.
Args:
imports_dict (dict[str, list[str]]): A dictionary of source_ids from all compilers.
Returns:
dict[str, list[str]]: A dictionary like ``{source_id: [referring_source_id, ...], ...}``
"""
references_dict: dict[str, list[str]] = {}
if not imports_dict:
return {}
for key, imports_list in imports_dict.items():
for filepath in imports_list:
if filepath not in references_dict:
references_dict[filepath] = []
references_dict[filepath].append(key)
return references_dict
|
Provide a mapping containing all referenced source_ids for a given project.
Each entry contains a source_id as a key and list of source_ids that reference a
given contract.
Args:
imports_dict (dict[str, list[str]]): A dictionary of source_ids from all compilers.
Returns:
dict[str, list[str]]: A dictionary like ``{source_id: [referring_source_id, ...], ...}``
|
get_references
|
python
|
ApeWorX/ape
|
src/ape/managers/compilers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/compilers.py
|
Apache-2.0
|
def enrich_error(self, err: ContractLogicError) -> ContractLogicError:
"""
Enrich a contract logic error using compiler information, such
known PC locations for compiler runtime errors.
Args:
err (:class:`~ape.exceptions.ContractLogicError`): The exception
to enrich.
Returns:
:class:`~ape.exceptions.ContractLogicError`: The enriched exception.
"""
# First, try enriching using their ABI.
err = self.get_custom_error(err) or err
if not (contract_type := err.contract_type):
return err
# Delegate to compiler APIs.
elif source_id := contract_type.source_id:
# Source ID found! Delegate to a CompilerAPI for enrichment.
ext = get_full_extension(Path(source_id))
if ext not in self.registered_compilers:
# Compiler not found.
return err
compiler = self.registered_compilers[ext]
return compiler.enrich_error(err)
# No further enrichment.
return err
|
Enrich a contract logic error using compiler information, such
known PC locations for compiler runtime errors.
Args:
err (:class:`~ape.exceptions.ContractLogicError`): The exception
to enrich.
Returns:
:class:`~ape.exceptions.ContractLogicError`: The enriched exception.
|
enrich_error
|
python
|
ApeWorX/ape
|
src/ape/managers/compilers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/compilers.py
|
Apache-2.0
|
def get_custom_error(self, err: ContractLogicError) -> Optional[CustomError]:
"""
Get a custom error for the given contract logic error using the contract-type
found from address-data in the error. Returns ``None`` if the given error is
not a custom-error, or it is not able to find the associated contract type or
address.
Args:
err (:class:`~ape.exceptions.ContractLogicError`): The error to enrich
as a custom error.
Returns:
Optional[:class:`~ape.exceptions.CustomError`]
"""
message = err.revert_message
if not message.startswith("0x"):
return None
elif not (address := err.address):
return None
if provider := self.network_manager.active_provider:
ecosystem = provider.network.ecosystem
else:
# Default to Ethereum.
ecosystem = self.network_manager.ethereum
try:
return ecosystem.decode_custom_error(
HexBytes(message),
address,
base_err=err.base_err,
source_traceback=lambda: err.source_traceback,
trace=err.trace,
txn=err.txn,
)
except NotImplementedError:
return None
|
Get a custom error for the given contract logic error using the contract-type
found from address-data in the error. Returns ``None`` if the given error is
not a custom-error, or it is not able to find the associated contract type or
address.
Args:
err (:class:`~ape.exceptions.ContractLogicError`): The error to enrich
as a custom error.
Returns:
Optional[:class:`~ape.exceptions.CustomError`]
|
get_custom_error
|
python
|
ApeWorX/ape
|
src/ape/managers/compilers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/compilers.py
|
Apache-2.0
|
def flatten_contract(self, path: Path, **kwargs) -> "Content":
"""
Get the flattened version of a contract via its source path.
Delegates to the matching :class:`~ape.api.compilers.CompilerAPI`.
Args:
path (``pathlib.Path``): The source path of the contract.
Returns:
``ethpm_types.source.Content``: The flattened contract content.
"""
suffix = get_full_extension(path)
if suffix not in self.registered_compilers:
raise CompilerError(f"Unable to flatten contract. Missing compiler for '{suffix}'.")
compiler = self.registered_compilers[suffix]
return compiler.flatten_contract(path, **kwargs)
|
Get the flattened version of a contract via its source path.
Delegates to the matching :class:`~ape.api.compilers.CompilerAPI`.
Args:
path (``pathlib.Path``): The source path of the contract.
Returns:
``ethpm_types.source.Content``: The flattened contract content.
|
flatten_contract
|
python
|
ApeWorX/ape
|
src/ape/managers/compilers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/compilers.py
|
Apache-2.0
|
def can_trace_source(self, filename: str) -> bool:
"""
Check if Ape is able trace the source lines for the given file.
Checks that both the compiler is registered and that it supports
the :meth:`~ape.api.compilers.CompilerAPI.trace_source` API method.
Args:
filename (str): The file to check.
Returns:
bool: ``True`` when the source is traceable.
"""
path = Path(filename)
if not path.is_file():
return False
extension = get_full_extension(path)
if extension in self.registered_compilers:
compiler = self.registered_compilers[extension]
if compiler.supports_source_tracing:
return True
# We are not able to get coverage for this file.
return False
|
Check if Ape is able trace the source lines for the given file.
Checks that both the compiler is registered and that it supports
the :meth:`~ape.api.compilers.CompilerAPI.trace_source` API method.
Args:
filename (str): The file to check.
Returns:
bool: ``True`` when the source is traceable.
|
can_trace_source
|
python
|
ApeWorX/ape
|
src/ape/managers/compilers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/compilers.py
|
Apache-2.0
|
def isolate_data_folder(
self, keep: Optional[Union[Iterable[str], str]] = None
) -> Iterator[Path]:
"""
Change Ape's DATA_FOLDER to point a temporary path,
in a context, for testing purposes. Any data
cached to disk will not persist.
Args:
keep (Optional[Union[Iterable[str], str]]): Optionally, pass in
a key of subdirectory names to include in the new isolated
data folder. For example, pass ing ``"packages"`` to avoid
having to re-download dependencies in an isolated environment.
Returns:
Iterator[Path]: The temporary data folder.
"""
original_data_folder = self.DATA_FOLDER
if in_tempdir(original_data_folder):
# Already isolated.
yield original_data_folder
else:
keep = [keep] if isinstance(keep, str) else keep or []
try:
with create_tempdir() as temp_data_folder:
# Copy in items from "keep".
for item in keep:
path_to_keep = original_data_folder / item
if not path_to_keep.is_dir():
continue
dest_path = temp_data_folder / item
shutil.copytree(path_to_keep, dest_path)
self.DATA_FOLDER = temp_data_folder
yield temp_data_folder
finally:
self.DATA_FOLDER = original_data_folder
|
Change Ape's DATA_FOLDER to point a temporary path,
in a context, for testing purposes. Any data
cached to disk will not persist.
Args:
keep (Optional[Union[Iterable[str], str]]): Optionally, pass in
a key of subdirectory names to include in the new isolated
data folder. For example, pass ing ``"packages"`` to avoid
having to re-download dependencies in an isolated environment.
Returns:
Iterator[Path]: The temporary data folder.
|
isolate_data_folder
|
python
|
ApeWorX/ape
|
src/ape/managers/config.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/config.py
|
Apache-2.0
|
def convert(self, value: Any, to_type: Union[type, tuple, list]) -> Any:
"""
Convert the given value to the given type. This method accesses
all :class:`~ape.api.convert.ConverterAPI` instances known to
`ape`` and selects the appropriate one, so long that it exists.
Raises:
:class:`~ape.exceptions.ConversionError`: When there is not a registered
converter for the given arguments.
Args:
value (any): The value to convert.
to_type (to_type): The type to convert the value to.
Returns:
any: The same given value but with the new given type.
"""
if isinstance(value, (list, tuple)) and isinstance(to_type, tuple):
# We expected to convert a tuple type, so convert each item in the tuple.
# NOTE: We allow values to be a list, just in case it is a list
return [self.convert(v, t) for v, t in zip(value, to_type)]
elif isinstance(value, (list, tuple)) and isinstance(to_type, list) and len(to_type) == 1:
# We expected to convert an array type(dynamic or static),
# so convert each item in the list.
# NOTE: type for static and dynamic array is a single item
# list containing the type of the array.
return [self.convert(v, to_type[0]) for v in value]
elif isinstance(to_type, (list, tuple)):
raise ConversionError(
f"Value '{value}' must be a list or tuple when given multiple types."
)
elif to_type is ChecksumAddress:
# Use our Annotated alias.
return self.convert(value, AddressType)
elif to_type not in self._converters:
options = ", ".join([_get_type_name_from_type(t) for t in self._converters])
raise ConversionError(f"Type '{to_type}' must be one of [{options}].")
elif self.is_type(value, to_type) and not isinstance(value, (list, tuple)):
# NOTE: Always process lists and tuples
return value
for converter in self._converters[to_type]:
try:
is_convertible = converter.is_convertible(value)
except Exception as err:
# If errors while checking if we can convert, log the error
# and assume it's not convertible.
converter_name = converter.__class__.__name__
msg = f"Issue while checking `{converter_name}.is_convertible()`: {err}"
logger.error(msg)
continue
if not is_convertible:
continue
try:
return converter.convert(value)
except Exception as err:
try:
error_value = f" '{value}' (type={type(value)}) "
except Exception:
error_value = " "
message = f"Failed to convert{error_value}"
if converter_type_name := getattr(type(converter), "__name__", None):
message = f"{message}using '{converter_type_name}'."
raise ConversionError(message) from err
raise ConversionError(f"No conversion registered to handle '{value}'.")
|
Convert the given value to the given type. This method accesses
all :class:`~ape.api.convert.ConverterAPI` instances known to
`ape`` and selects the appropriate one, so long that it exists.
Raises:
:class:`~ape.exceptions.ConversionError`: When there is not a registered
converter for the given arguments.
Args:
value (any): The value to convert.
to_type (to_type): The type to convert the value to.
Returns:
any: The same given value but with the new given type.
|
convert
|
python
|
ApeWorX/ape
|
src/ape/managers/converters.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/converters.py
|
Apache-2.0
|
def get_converter(self, name: str) -> ConverterAPI:
"""
Get a converter plugin by name.
Args:
name (str): The name of the converter.
Returns:
:class:`~ape.api.converters.ConverterAPI`: The converter.
"""
for converter_ls in self._converters.values():
for converter in converter_ls:
if converter.name == name.lower():
return converter
raise ConversionError("No converters with name '{name}'.'")
|
Get a converter plugin by name.
Args:
name (str): The name of the converter.
Returns:
:class:`~ape.api.converters.ConverterAPI`: The converter.
|
get_converter
|
python
|
ApeWorX/ape
|
src/ape/managers/converters.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/converters.py
|
Apache-2.0
|
def running_nodes(self) -> NodeProcessMap:
"""
All running development nodes managed by Ape.
"""
path = self.config_manager.DATA_FOLDER / "processes" / "nodes.json"
try:
return NodeProcessMap.model_validate_file(path)
except ValidationError:
path.unlink(missing_ok=True)
return NodeProcessMap.model_validate_file(path)
|
All running development nodes managed by Ape.
|
running_nodes
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def get_running_node(self, pid: int) -> "SubprocessProvider":
"""
Get a running subprocess provider for the given ``pid``.
Args:
pid (int): The process ID.
Returns:
class:`~ape.api.providers.SubprocessProvider`
"""
if not (data := self.running_nodes.get(pid)):
raise NetworkError(f"No running node for pid '{pid}'.")
uri: Optional[Union[str, Path]] = None
if ipc := data.ipc_path:
if ipc.exists():
uri = ipc
else:
uri = data.http_uri or data.ws_uri
if uri is None:
NetworkError(f"Cannot connect to node on PID '{pid}': Missing URI data.")
# In this case, we want the more connectable network choice.
network_parts = data.network_choice.split(":")
network_choice = f"{':'.join(network_parts[:2])}:{uri}"
provider_settings: dict = {
network_parts[0]: {
network_parts[1]: {
"ipc_path": data.ipc_path,
"http_uri": data.http_uri,
"ws_uri": data.ws_uri,
"uri": None,
}
}
}
provider = self.get_provider_from_choice(
network_choice=network_choice, provider_settings=provider_settings or None
)
# If this is not a subprocess provider, it may be ok to proceed.
# However, the rest of Ape will assume it is.
return provider # type: ignore[return-value]
|
Get a running subprocess provider for the given ``pid``.
Args:
pid (int): The process ID.
Returns:
class:`~ape.api.providers.SubprocessProvider`
|
get_running_node
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def kill_node_process(self, *process_ids: int) -> dict[int, NodeProcessData]:
"""
Kill a node process managed by Ape.
Args:
*process_ids (int): The process ID to kill.
Returns:
dict[str, :class:`~ape.managers.networks.NodeProcessData`]: The process data
of all terminated processes.
"""
if not self.running_nodes:
return {}
pids_killed = {}
for pid in process_ids:
if not (data := self.running_nodes.nodes.get(pid)):
continue
try:
provider = self.get_running_node(pid)
except Exception:
# Still try to kill the process (below).
pass
else:
# Gracefully disconnect _before_ killing process.
provider.disconnect()
try:
os.kill(pid, signal.SIGTERM)
except Exception:
pass
else:
pids_killed[pid] = data
self.running_nodes.remove_processes(*process_ids)
return pids_killed
|
Kill a node process managed by Ape.
Args:
*process_ids (int): The process ID to kill.
Returns:
dict[str, :class:`~ape.managers.networks.NodeProcessData`]: The process data
of all terminated processes.
|
kill_node_process
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def get_request_headers(
self, ecosystem_name: str, network_name: str, provider_name: str
) -> "RPCHeaders":
"""
All request headers to be used when connecting to this network.
"""
ecosystem = self.get_ecosystem(ecosystem_name)
network = ecosystem.get_network(network_name)
provider = network.get_provider(provider_name)
headers = self.config_manager._get_request_headers()
for obj in (ecosystem, network, provider):
for key, value in obj._get_request_headers().items():
headers[key] = value
return headers
|
All request headers to be used when connecting to this network.
|
get_request_headers
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def fork(
self,
provider_name: Optional[str] = None,
provider_settings: Optional[dict] = None,
block_number: Optional[int] = None,
) -> ProviderContextManager:
"""
Fork the currently connected network.
Args:
provider_name (str, optional): The name of the provider to get. Defaults to ``None``.
When ``None``, returns the default provider.
provider_settings (dict, optional): Settings to apply to the provider. Defaults to
``None``.
block_number (Optional[int]): Optionally specify the block number you wish to fork.
Negative block numbers are relative to HEAD. Defaults to the configured fork
block number or HEAD.
Returns:
:class:`~ape.api.networks.ProviderContextManager`
"""
network_name = self.network.name
is_fork_already = network_name.endswith("-fork")
forked_network_name = network_name if is_fork_already else f"{network_name}-fork"
try:
forked_network = self.ecosystem.get_network(forked_network_name)
except NetworkNotFoundError as err:
raise NetworkError(f"Unable to fork network '{network_name}'.") from err
provider_settings = provider_settings or {}
if is_fork_already and "host" not in provider_settings:
# Forking a fork- to ensure is using a different Port,
# use the "auto-port" feature.
provider_settings["host"] = "auto"
fork_settings = {}
if block_number is not None:
# Negative block_number means relative to HEAD
if block_number < 0:
latest_block_number = self.provider.get_block("latest").number or 0
block_number = latest_block_number + block_number
if block_number < 0:
# If the block number is still negative, they have forked past genesis.
raise NetworkError("Unable to fork past genesis block.")
# Ensure block_number is set in config for this network
fork_settings["block_number"] = block_number
if uri := self.provider.connection_str:
fork_settings["upstream_provider"] = uri
_dict_overlay(
provider_settings,
{"fork": {self.ecosystem.name: {self.network.name: fork_settings}}},
)
shared_kwargs: dict = {
"provider_settings": provider_settings,
"disconnect_after": True,
}
return (
forked_network.use_provider(provider_name, **shared_kwargs)
if provider_name
else forked_network.use_default_provider(**shared_kwargs)
)
|
Fork the currently connected network.
Args:
provider_name (str, optional): The name of the provider to get. Defaults to ``None``.
When ``None``, returns the default provider.
provider_settings (dict, optional): Settings to apply to the provider. Defaults to
``None``.
block_number (Optional[int]): Optionally specify the block number you wish to fork.
Negative block numbers are relative to HEAD. Defaults to the configured fork
block number or HEAD.
Returns:
:class:`~ape.api.networks.ProviderContextManager`
|
fork
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def provider_names(self) -> set[str]:
"""
The set of all provider names in ``ape``.
"""
return set(
provider
for ecosystem in self.ecosystems.values()
for network in ecosystem.networks.values()
for provider in network.providers
)
|
The set of all provider names in ``ape``.
|
provider_names
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def ecosystems(self) -> dict[str, "EcosystemAPI"]:
"""
All the registered ecosystems in ``ape``, such as ``ethereum``.
"""
return {
**self._evmchains_ecosystems,
**self._plugin_ecosystems,
**self._custom_ecosystems,
}
|
All the registered ecosystems in ``ape``, such as ``ethereum``.
|
ecosystems
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def create_custom_provider(
self,
connection_str: str,
provider_cls: type["ProviderAPI"] = EthereumNodeProvider,
provider_name: Optional[str] = None,
) -> "ProviderAPI":
"""
Create a custom connection to a URI using the EthereumNodeProvider provider.
**NOTE**: This provider will assume EVM-like behavior and this is generally not recommended.
Use plugins when possible!
Args:
connection_str (str): The connection string of the node, such as its URI
when using HTTP.
provider_cls (type[:class:`~ape.api.providers.ProviderAPI`]): Defaults to
:class:`~ape_ethereum.providers.EthereumNodeProvider`.
provider_name (Optional[str]): The name of the provider. Defaults to best guess.
Returns:
:class:`~ape.api.providers.ProviderAPI`: The Geth provider
implementation that comes with Ape.
"""
network = self.ethereum.custom_network
if provider_name is None:
if issubclass(provider_cls, EthereumNodeProvider):
name = "node"
elif cls_name := getattr(provider_cls, "name", None):
name = cls_name
elif cls_name := provider_cls.__name__:
name = cls_name.lower()
else:
# Would be unusual for this to happen though.
name = "provider"
else:
name = provider_name
provider_settings: dict = {}
if connection_str.startswith("https://") or connection_str.startswith("http://"):
provider_settings["uri"] = connection_str
elif connection_str.endswith(".ipc"):
provider_settings["ipc_path"] = connection_str
else:
raise NetworkError(f"Scheme for '{connection_str}' not yet supported.")
return (provider_cls or EthereumNodeProvider)(
name=name,
network=network,
provider_settings=provider_settings,
data_folder=self.ethereum.data_folder / name,
)
|
Create a custom connection to a URI using the EthereumNodeProvider provider.
**NOTE**: This provider will assume EVM-like behavior and this is generally not recommended.
Use plugins when possible!
Args:
connection_str (str): The connection string of the node, such as its URI
when using HTTP.
provider_cls (type[:class:`~ape.api.providers.ProviderAPI`]): Defaults to
:class:`~ape_ethereum.providers.EthereumNodeProvider`.
provider_name (Optional[str]): The name of the provider. Defaults to best guess.
Returns:
:class:`~ape.api.providers.ProviderAPI`: The Geth provider
implementation that comes with Ape.
|
create_custom_provider
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def get_network_choices(
self,
ecosystem_filter: Optional[Union[list[str], str]] = None,
network_filter: Optional[Union[list[str], str]] = None,
provider_filter: Optional[Union[list[str], str]] = None,
) -> Iterator[str]:
"""
The set of all possible network choices available as a "network selection"
e.g. ``--network [ECOSYSTEM:NETWORK:PROVIDER]``.
Each value is in the form ``ecosystem:network:provider`` and shortened options also
appear in the list. For example, ``::node`` would default to ``:ethereum:local:node``
and both will be in the returned list. The values come from each
:class:`~ape.api.providers.ProviderAPI` that is installed.
Use the CLI command ``ape networks list`` to list all the possible network
combinations.
Args:
ecosystem_filter (Optional[Union[list[str], str]]): Get only the specified ecosystems.
Defaults to getting all ecosystems.
network_filter (Optional[Union[list[str], str]]): Get only the specified networks.
Defaults to getting all networks in ecosystems.
provider_filter (Optional[Union[list[str], str]]): Get only the specified providers.
Defaults to getting all providers in networks.
Returns:
Iterator[str]: An iterator over all the network-choice possibilities.
"""
ecosystem_filter = _validate_filter(ecosystem_filter, self.ecosystem_names)
network_filter = _validate_filter(network_filter, self.network_names)
provider_filter = _validate_filter(provider_filter, self.provider_names)
ecosystem_items = self.ecosystems
if ecosystem_filter:
ecosystem_items = {n: e for n, e in ecosystem_items.items() if n in ecosystem_filter}
for ecosystem_name, ecosystem in ecosystem_items.items():
network_items = ecosystem.networks
if network_filter:
network_items = {n: net for n, net in network_items.items() if n in network_filter}
if not network_items:
continue
ecosystem_has_providers = False
for network_name, network in network_items.items():
providers = network.providers
if provider_filter:
providers = [n for n in providers if n in provider_filter]
network_has_providers = len(providers) > 0
if not ecosystem_has_providers:
# Only check if we still haven't found any
ecosystem_has_providers = network_has_providers
if not network_has_providers:
continue
for provider_name in providers:
if (
ecosystem_name == self.default_ecosystem.name
and network_name == ecosystem.default_network_name
):
yield f"::{provider_name}"
if ecosystem_name == self.default_ecosystem.name:
yield f":{network_name}:{provider_name}"
if network_name == ecosystem.default_network_name:
yield f"{ecosystem_name}::{provider_name}"
# Always include the full path as an option.
yield f"{ecosystem_name}:{network_name}:{provider_name}"
# Providers were yielded if we reached this point.
if ecosystem_name == self.default_ecosystem.name:
yield f":{network_name}"
yield f"{ecosystem_name}:{network_name}"
if ecosystem_has_providers:
yield ecosystem_name
|
The set of all possible network choices available as a "network selection"
e.g. ``--network [ECOSYSTEM:NETWORK:PROVIDER]``.
Each value is in the form ``ecosystem:network:provider`` and shortened options also
appear in the list. For example, ``::node`` would default to ``:ethereum:local:node``
and both will be in the returned list. The values come from each
:class:`~ape.api.providers.ProviderAPI` that is installed.
Use the CLI command ``ape networks list`` to list all the possible network
combinations.
Args:
ecosystem_filter (Optional[Union[list[str], str]]): Get only the specified ecosystems.
Defaults to getting all ecosystems.
network_filter (Optional[Union[list[str], str]]): Get only the specified networks.
Defaults to getting all networks in ecosystems.
provider_filter (Optional[Union[list[str], str]]): Get only the specified providers.
Defaults to getting all providers in networks.
Returns:
Iterator[str]: An iterator over all the network-choice possibilities.
|
get_network_choices
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def get_ecosystem(self, ecosystem_name: str) -> "EcosystemAPI":
"""
Get the ecosystem for the given name.
Args:
ecosystem_name (str): The name of the ecosystem to get.
Raises:
:class:`~ape.exceptions.NetworkError`: When the ecosystem is not found.
Returns:
:class:`~ape.api.networks.EcosystemAPI`
"""
# NOTE: This method purposely avoids "just checking self.ecosystems"
# for performance reasons and exiting the search as early as possible.
ecosystem_name = ecosystem_name.lower().replace(" ", "-")
try:
return self._plugin_ecosystems[ecosystem_name]
except KeyError:
pass
# Check if custom.
try:
return self._custom_ecosystems[ecosystem_name]
except KeyError:
pass
if ecosystem := self._get_ecosystem_from_evmchains(ecosystem_name):
return ecosystem
raise EcosystemNotFoundError(ecosystem_name, options=self.ecosystem_names)
|
Get the ecosystem for the given name.
Args:
ecosystem_name (str): The name of the ecosystem to get.
Raises:
:class:`~ape.exceptions.NetworkError`: When the ecosystem is not found.
Returns:
:class:`~ape.api.networks.EcosystemAPI`
|
get_ecosystem
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def get_provider_from_choice(
self,
network_choice: Optional[str] = None,
provider_settings: Optional[dict] = None,
) -> "ProviderAPI":
"""
Get a :class:`~ape.api.providers.ProviderAPI` from a network choice.
A network choice is any value returned from
:meth:`~ape.managers.networks.NetworkManager.get_network_choices`. Use the
CLI command ``ape networks list`` to list all the possible network
combinations.
Raises:
:class:`~ape.exceptions.NetworkError`: When the given network choice does not
match any known network.
Args:
network_choice (str, optional): The network choice
(see :meth:`~ape.managers.networks.NetworkManager.get_network_choices`).
Defaults to the default ecosystem, network, and provider combination.
provider_settings (dict, optional): Settings for the provider. Defaults to None.
Returns:
:class:`~ape.api.providers.ProviderAPI`
"""
if network_choice is None:
default_network = self.default_ecosystem.default_network
return default_network.get_provider(provider_settings=provider_settings)
elif network_choice.startswith("pid://"):
# Was given a process ID (already running node on local machine).
pid_str = network_choice[len("pid://") :]
if not pid_str.isdigit():
raise ValueError(f"Invalid PID: {pid_str}")
return self.get_running_node(int(pid_str))
elif _is_adhoc_url(network_choice):
# Custom network w/o ecosystem & network spec.
return self.create_custom_provider(network_choice)
selections = network_choice.split(":")
# NOTE: Handle case when URI is passed e.g. "http://..."
if len(selections) > 3:
provider_value = ":".join(selections[2:])
selections[2] = provider_value
selections = selections[:3]
if _is_adhoc_url(provider_value):
selections[1] = selections[1] or "custom"
if selections == network_choice or len(selections) == 1:
# Either split didn't work (in which case it matches the start)
# or there was nothing after the ``:`` (e.g. "ethereum:")
ecosystem = self.get_ecosystem(selections[0] or self.default_ecosystem.name)
# By default, the "local" network should be specified for
# any ecosystem (this should not correspond to a production chain)
default_network = ecosystem.default_network
return default_network.get_provider(provider_settings=provider_settings)
elif len(selections) == 2:
# Only ecosystem and network were specified, not provider
ecosystem_name, network_name = selections
ecosystem = self.get_ecosystem(ecosystem_name or self.default_ecosystem.name)
network = ecosystem.get_network(network_name or ecosystem.default_network_name)
return network.get_provider(provider_settings=provider_settings)
elif len(selections) == 3:
# Everything is specified, use specified provider for ecosystem and network
ecosystem_name, network_name, provider_name = selections
ecosystem = (
self.get_ecosystem(ecosystem_name) if ecosystem_name else self.default_ecosystem
)
network = ecosystem.get_network(network_name or ecosystem.default_network_name)
return network.get_provider(
provider_name=provider_name, provider_settings=provider_settings
)
else:
# NOTE: Might be unreachable
raise NetworkError("Invalid network selection.")
|
Get a :class:`~ape.api.providers.ProviderAPI` from a network choice.
A network choice is any value returned from
:meth:`~ape.managers.networks.NetworkManager.get_network_choices`. Use the
CLI command ``ape networks list`` to list all the possible network
combinations.
Raises:
:class:`~ape.exceptions.NetworkError`: When the given network choice does not
match any known network.
Args:
network_choice (str, optional): The network choice
(see :meth:`~ape.managers.networks.NetworkManager.get_network_choices`).
Defaults to the default ecosystem, network, and provider combination.
provider_settings (dict, optional): Settings for the provider. Defaults to None.
Returns:
:class:`~ape.api.providers.ProviderAPI`
|
get_provider_from_choice
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def parse_network_choice(
self,
network_choice: Optional[str] = None,
provider_settings: Optional[dict] = None,
disconnect_after: bool = False,
disconnect_on_exit: bool = True,
) -> ProviderContextManager:
"""
Parse a network choice into a context manager for managing a temporary
connection to a provider. See
:meth:`~ape.managers.networks.NetworkManager.get_network_choices` for all
available choices (or use CLI command ``ape networks list``).
Raises:
:class:`~ape.exceptions.NetworkError`: When the given network choice does not
match any known network.
Args:
network_choice (str, optional): The network choice
(see :meth:`~ape.managers.networks.NetworkManager.get_network_choices`).
Defaults to the default ecosystem, network, and provider combination.
provider_settings (dict, optional): Settings for the provider. Defaults to None.
disconnect_after (bool): Set to True to terminate the connection completely
at the end of context. NOTE: May only work if the network was also started
from this session.
disconnect_on_exit (bool): Whether to disconnect on the exit of the python
session. Defaults to ``True``.
Returns:
:class:`~api.api.networks.ProviderContextManager`
"""
provider = self.get_provider_from_choice(
network_choice=network_choice, provider_settings=provider_settings
)
return ProviderContextManager(
provider=provider,
disconnect_after=disconnect_after,
disconnect_on_exit=disconnect_on_exit,
)
|
Parse a network choice into a context manager for managing a temporary
connection to a provider. See
:meth:`~ape.managers.networks.NetworkManager.get_network_choices` for all
available choices (or use CLI command ``ape networks list``).
Raises:
:class:`~ape.exceptions.NetworkError`: When the given network choice does not
match any known network.
Args:
network_choice (str, optional): The network choice
(see :meth:`~ape.managers.networks.NetworkManager.get_network_choices`).
Defaults to the default ecosystem, network, and provider combination.
provider_settings (dict, optional): Settings for the provider. Defaults to None.
disconnect_after (bool): Set to True to terminate the connection completely
at the end of context. NOTE: May only work if the network was also started
from this session.
disconnect_on_exit (bool): Whether to disconnect on the exit of the python
session. Defaults to ``True``.
Returns:
:class:`~api.api.networks.ProviderContextManager`
|
parse_network_choice
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def set_default_ecosystem(self, ecosystem_name: str):
"""
Change the default ecosystem.
Raises:
:class:`~ape.exceptions.NetworkError`: When the given ecosystem name is unknown.
Args:
ecosystem_name (str): The name of the ecosystem to set
as the default.
"""
if ecosystem_name in self.ecosystem_names:
self._default_ecosystem_name = ecosystem_name
else:
raise EcosystemNotFoundError(ecosystem_name, options=self.ecosystem_names)
|
Change the default ecosystem.
Raises:
:class:`~ape.exceptions.NetworkError`: When the given ecosystem name is unknown.
Args:
ecosystem_name (str): The name of the ecosystem to set
as the default.
|
set_default_ecosystem
|
python
|
ApeWorX/ape
|
src/ape/managers/networks.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/networks.py
|
Apache-2.0
|
def valid_impl(api_class: Any) -> bool:
"""
Check if an API class is valid. The class must not have any unimplemented
abstract methods.
Args:
api_class (any)
Returns:
bool
"""
if isinstance(api_class, tuple):
return all(valid_impl(c) for c in api_class)
# Is not an ABC base class or abstractdataclass
if not hasattr(api_class, "__abstractmethods__"):
return True # not an abstract class
return len(api_class.__abstractmethods__) == 0
|
Check if an API class is valid. The class must not have any unimplemented
abstract methods.
Args:
api_class (any)
Returns:
bool
|
valid_impl
|
python
|
ApeWorX/ape
|
src/ape/managers/plugins.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/plugins.py
|
Apache-2.0
|
def get(self, source_id: str) -> Optional[Source]:
"""
Get a Source by source_id.
Args:
source_id (str): The source identifier.
Returns:
Source | None
"""
if source_id in self._sources:
return self._sources[source_id]
for path in self.paths:
if self._get_source_id(path) == source_id:
text: Union[str, dict]
if path.is_file():
try:
text = path.read_text(encoding="utf8")
except Exception:
continue
else:
text = {}
src = Source.model_validate(text)
self._sources[source_id] = src
return src
return None
|
Get a Source by source_id.
Args:
source_id (str): The source identifier.
Returns:
Source | None
|
get
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def is_excluded(self, path: Path) -> bool:
"""
Check if the given path is considered an "excluded"
file based on the configured ignore-patterns.
Args:
path (Path): The path to check.
Returns:
bool
"""
source_id = self._get_source_id(path)
if source_id in self._exclude_cache:
return self._exclude_cache[source_id]
# Non-files and hidden files are ignored.
is_file = path.is_file()
if not is_file or path.name.startswith("."):
# Ignore random hidden files if they are known source types.
self._exclude_cache[source_id] = True
return True
# Files with missing compiler extensions are also ignored.
suffix = get_full_extension(path)
registered = self.compiler_manager.registered_compilers
if suffix not in registered:
self._exclude_cache[source_id] = True
return True
# If we get here, we have a matching compiler and this source exists.
# Check if is excluded.
source_id = self._get_source_id(path)
options = (str(path), path.name, source_id)
parent_dir_name = path.parent.name
for excl in self.exclude_globs:
if isinstance(excl, Pattern):
for opt in options:
if not excl.match(opt):
continue
self._exclude_cache[source_id] = True
return True
else:
# perf: Check parent directory first to exclude faster by marking them all.
if path_match(parent_dir_name, excl):
self._exclude_cache[source_id] = True
for sub in get_all_files_in_directory(path.parent):
sub_source_id = self._get_source_id(sub)
self._exclude_cache[sub_source_id] = True
return True
for opt in options:
if path_match(opt, excl):
self._exclude_cache[source_id] = True
return True
self._exclude_cache[source_id] = False
return False
|
Check if the given path is considered an "excluded"
file based on the configured ignore-patterns.
Args:
path (Path): The path to check.
Returns:
bool
|
is_excluded
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def lookup(self, path_id: Union[str, Path]) -> Optional[Path]:
"""
Look-up a path by given a sub-path or a source ID.
Args:
path_id (Union[str, Path]): Either part of a path
or a source ID.
Returns:
Path: The full path to the source file.
"""
input_path = Path(path_id)
if input_path.is_file() and input_path.is_relative_to(self.root_path):
# Already given an existing file.
return input_path.absolute()
input_stem = input_path.stem
input_extension = get_full_extension(input_path) or None
def find_in_dir(dir_path: Path, path: Path) -> Optional[Path]:
# Try exact match with or without extension
possible_matches = []
contracts_folder = self.get_contracts_path()
if path.is_absolute():
full_path = path
elif contracts_folder in (dir_path / path).parents:
# Check if a file with an exact match exists.
full_path = dir_path / path
else:
# User did not include contracts-prefix.
full_path = contracts_folder / path
if full_path.is_file():
return full_path
# Check for exact match with no given extension.
if input_extension is None:
if full_path.parent.is_dir():
for file in full_path.parent.iterdir():
if not file.is_file():
continue
# Check exact match w/o extension.
prefix = str(file.with_suffix("")).strip(" /\\")
if str(full_path).strip(" /\\") == prefix:
return file
# Look for stem-only matches (last resort).
for file_path in dir_path.rglob("*"):
if file_path.stem == input_stem:
possible_matches.append(file_path)
# If we have possible matches, return the one with the closest relative path
if possible_matches:
# Prioritize the exact relative path or first match in the list
possible_matches.sort(key=lambda p: len(str(p.relative_to(dir_path))))
return possible_matches[0]
return None
# Derive the relative path from the given key_contract_path.
relative_path = input_path.relative_to(input_path.anchor)
return find_in_dir(self.root_path, relative_path)
|
Look-up a path by given a sub-path or a source ID.
Args:
path_id (Union[str, Path]): Either part of a path
or a source ID.
Returns:
Path: The full path to the source file.
|
lookup
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def refresh(self):
"""
Reset file-caches to handle session-changes.
(Typically not needed to be called by users).
"""
(self.__dict__ or {}).pop("_all_files", None)
self._path_to_source_id = {}
self._path_cache = None
|
Reset file-caches to handle session-changes.
(Typically not needed to be called by users).
|
refresh
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def get(
self, name: str, compile_missing: bool = True, check_for_changes: bool = True
) -> Optional[ContractContainer]:
"""
Get a contract by name.
Args:
name (str): The name of the contract.
compile_missing (bool): Set to ``False`` to not attempt compiling
if the contract can't be found. Note: modified sources are
re-compiled regardless of this flag.
check_for_changes (bool): Set to ``False`` if avoiding checking
for changes.
Returns:
ContractContainer | None
"""
existing_types = self.project.manifest.contract_types or {}
contract_type = existing_types.get(name)
if not contract_type:
if compile_missing:
self._compile_missing_contracts(self.sources.paths)
return self.get(name, compile_missing=False)
return None
source_id = contract_type.source_id or ""
source_found = source_id in self.sources
if not check_for_changes and source_found:
return ContractContainer(contract_type)
ext = get_full_extension(source_id)
if ext not in self.compiler_manager.registered_compilers:
return ContractContainer(contract_type)
if source_found:
if check_for_changes and self._detect_change(source_id):
compiled = {
ct.name: ct
for ct in self.compiler_manager.compile(source_id, project=self.project)
if ct.name
}
if compiled:
self.project._update_contract_types(compiled)
if name in compiled:
return ContractContainer(compiled[name])
return ContractContainer(contract_type)
if compile_missing:
self._compile_missing_contracts(self.sources.paths)
return self.get(name, compile_missing=False)
return None
|
Get a contract by name.
Args:
name (str): The name of the contract.
compile_missing (bool): Set to ``False`` to not attempt compiling
if the contract can't be found. Note: modified sources are
re-compiled regardless of this flag.
check_for_changes (bool): Set to ``False`` if avoiding checking
for changes.
Returns:
ContractContainer | None
|
get
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def installed(self) -> bool:
"""
``True`` when a project is available. Note: Installed does not mean
the dependency is compiled!
"""
if self._installation is not None:
return True
try:
project_path = self.project_path
except ProjectError:
# Fails when version ID errors out (bad config / missing required install etc.)
return False
if project_path.is_dir():
if any(x for x in self.project_path.iterdir() if not x.name.startswith(".")):
return True
return False
|
``True`` when a project is available. Note: Installed does not mean
the dependency is compiled!
|
installed
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def install(
self,
use_cache: bool = True,
config_override: Optional[dict] = None,
recurse: bool = True,
) -> "ProjectManager":
"""
Install this dependency.
Args:
use_cache (bool): To force a reinstalling, like a refresh, set this
to ``False``.
config_override (dict): Optionally change the configuration during install.
recurse (bool): Set to ``False`` to avoid installing dependency of dependencies.
Returns:
:class:`~ape.managers.project.ProjectManager`: The resulting project, ready
for compiling.
"""
config_override = {**(self.api.config_override or {}), **(config_override or {})}
project = None
did_fetch = False
if self._installation is not None and use_cache:
# Already has a cached installation.
if config_override:
self._installation.reconfigure(**config_override)
return self._installation
elif not self._project_disk_cache_exists or not use_cache:
# Project does not yet exist in the cache. We have to fetch the sources.
unpacked = False
if use_cache and self.manifest_path.is_file():
# Attempt using sources from manifest. This may happen
# if having deleted dependencies but not their manifests.
man = PackageManifest.model_validate_json(
self.manifest_path.read_text(encoding="utf8")
)
if man.sources:
self.project_path.mkdir(parents=True, exist_ok=True)
man.unpack_sources(self.project_path)
unpacked = True
# Either never fetched, it is missing but present in manifest, or we are forcing.
if not unpacked and not self._tried_fetch:
logger.debug(f"Fetching {self.api.package_id} {self.api.version_id}")
# No sources found! Fetch the project.
shutil.rmtree(self.project_path, ignore_errors=True)
self.project_path.parent.mkdir(parents=True, exist_ok=True)
self._tried_fetch = True
logger.info(f"Installing {self.clean_package_id} {self.api.version_id}")
try:
self.api.fetch(self.project_path)
except Exception as err:
raise ProjectError(f"Fetching failed: {err}")
did_fetch = True
# Reset global tried-fetch if it succeeded, so it can refresh.
self._tried_fetch = False
# Set name / version for the project, if it needs.
if "name" not in config_override:
config_override["name"] = self.api.name
if "version" not in config_override:
config_override["version"] = self.api.version_id
if self.project_path.is_dir():
paths = get_all_files_in_directory(self.project_path)
# Check if given only a manifest.
if len(paths) == 1:
suffix = get_full_extension(paths[0])
if suffix == ".json":
path = paths[0]
try:
manifest = PackageManifest.model_validate_json(
path.read_text(encoding="utf8")
)
except Exception:
# False alarm.
pass
else:
# Using a manifest project, unless this is just emptiness.
if (
manifest.sources
or manifest.contract_types
or manifest.name
or manifest.version
):
project = Project.from_manifest(
manifest, config_override=config_override
)
if project is None:
# Using an unpacked local-project.
project = LocalProject(
self.project_path,
manifest_path=self.manifest_path,
config_override=config_override,
)
elif self.manifest_path.is_file():
# Manifest-only project with manifest populated and not project-dir.
project = Project.from_manifest(self.manifest_path, config_override=config_override)
else:
raise ProjectError("Project install failed.")
# Cache for next time.
self._installation = project
# Install dependencies of dependencies if fetching for the first time.
if did_fetch and recurse:
spec = project.dependencies.get_project_dependencies(use_cache=use_cache)
list(spec)
return project
|
Install this dependency.
Args:
use_cache (bool): To force a reinstalling, like a refresh, set this
to ``False``.
config_override (dict): Optionally change the configuration during install.
recurse (bool): Set to ``False`` to avoid installing dependency of dependencies.
Returns:
:class:`~ape.managers.project.ProjectManager`: The resulting project, ready
for compiling.
|
install
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def compile(
self,
use_cache: bool = True,
config_override: Optional[dict] = None,
allow_install: bool = False,
) -> dict[str, ContractContainer]:
"""
Compile a dependency.
Args:
use_cache (bool): Set to ``False`` to force a re-compile.
config_override (Optional[dict]): Optionally override the configuration,
which may be needed for compiling.
allow_install (bool): Set to ``True`` to allow installing.
Returns:
dict[str, :class:`~ape.contracts.ContractContainer`]
"""
override = {**self.api.config_override, **(config_override or {})}
self.api.config_override = override
if not self.installed and allow_install:
project = self.install()
else:
# Will raise if not installed and allow_install=False.
project = self.project
if override:
# Ensure is using most up-to-date config override.
project.reconfigure(**override)
self._cache.cache_api(self.api)
if result := project.load_contracts(use_cache=use_cache):
return result
# Failed to get any contract types out of the dependency project.
# Try to figure out the best reason as to why this happened.
contracts_folder = project.contracts_folder
message = "Compiling dependency produced no contract types."
if isinstance(project, LocalProject):
all_files = [x.name for x in get_all_files_in_directory(contracts_folder)]
has_solidity_sources = any(get_full_extension(Path(x)) == ".sol" for x in all_files)
has_vyper_sources = any(
get_full_extension(Path(x)) in (".vy", ".vyi") for x in all_files
)
compilers = self.compiler_manager.registered_compilers
warn_sol = has_solidity_sources and ".sol" not in compilers
warn_vyper = has_vyper_sources and ".vy" not in compilers
suffix = ""
if warn_sol:
suffix = "Try installing 'ape-solidity'"
if warn_vyper:
suffix += " or 'ape-vyper'"
elif warn_vyper:
suffix = "Try installing 'ape-vyper'"
elif len(all_files) == 0:
suffix = f"No source files found! (contracts_folder={clean_path(contracts_folder)})"
if suffix:
message = f"{message} {suffix}."
logger.warning(message)
return {}
|
Compile a dependency.
Args:
use_cache (bool): Set to ``False`` to force a re-compile.
config_override (Optional[dict]): Optionally override the configuration,
which may be needed for compiling.
allow_install (bool): Set to ``True`` to allow installing.
Returns:
dict[str, :class:`~ape.contracts.ContractContainer`]
|
compile
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def cache_api(self, api: DependencyAPI) -> Path:
"""
Cache a dependency JSON for usage outside the project.
"""
api_file = self.get_api_path(api.package_id, api.version_id)
api_file.parent.mkdir(parents=True, exist_ok=True)
api_file.unlink(missing_ok=True)
# NOTE: All the excludes only for saving disk space.
json_text = api.model_dump_json(
by_alias=True,
mode="json",
exclude_none=True,
exclude_unset=True,
exclude_defaults=True,
)
api_file.write_text(json_text, encoding="utf8")
return api_file
|
Cache a dependency JSON for usage outside the project.
|
cache_api
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def isolate_changes(self):
"""
Allows you to make changes affecting the Ape packages cache in a context.
For example, temporarily install local "dev" packages for testing purposes.
"""
with create_tempdir() as tmpdir:
packages_cache = tmpdir / "packages"
packages_cache.parent.mkdir(parents=True, exist_ok=True)
if self.root.is_dir():
shutil.copytree(self.root, packages_cache)
try:
yield
finally:
shutil.rmtree(self.root)
if packages_cache.is_dir():
# Restore.
shutil.copytree(packages_cache, self.root)
|
Allows you to make changes affecting the Ape packages cache in a context.
For example, temporarily install local "dev" packages for testing purposes.
|
isolate_changes
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def get_project_dependencies(
self,
use_cache: bool = True,
config_override: Optional[dict] = None,
name: Optional[str] = None,
version: Optional[str] = None,
allow_install: bool = True,
strict: bool = False,
recurse: bool = True,
) -> Iterator[Dependency]:
"""
Get dependencies specified in the project's ``ape-config.yaml`` file.
Args:
use_cache (bool): Set to ``False`` to force-reinstall dependencies.
Defaults to ``True``. Does not work with ``allow_install=False``.
config_override (Optional[dict]): Override shared configuration for each dependency.
name (Optional[str]): Optionally only get dependencies with a certain name.
version (Optional[str]): Optionally only get dependencies with certain version.
allow_install (bool): Set to ``False`` to not allow installing uninstalled specified dependencies.
strict (bool): ``True`` requires the dependency to either be installed or install properly.
recurse (bool): Set to ``False`` to not recursively install dependencies of dependencies.
Returns:
Iterator[:class:`~ape.managers.project.Dependency`]
"""
for api in self.config_apis:
try:
api_version_id = api.version_id
except Exception:
api_version_id = None
if (name is not None and api.name != name and api.package_id != name) or (
version is not None and api_version_id != version
):
continue
# Ensure the dependency API data is known.
if api_version_id is not None:
dependency = self.add(api)
else:
# Errored.
dependency = Dependency(api)
if allow_install:
try:
dependency.install(
use_cache=use_cache, config_override=config_override, recurse=recurse
)
except ProjectError as err:
if strict:
raise # This error.
# This dependency has issues. Let's wait to until the user
# actually requests something before failing, and
# yield an uninstalled version of the specified dependency for
# them to fix.
logger.error(str(err))
yield dependency
|
Get dependencies specified in the project's ``ape-config.yaml`` file.
Args:
use_cache (bool): Set to ``False`` to force-reinstall dependencies.
Defaults to ``True``. Does not work with ``allow_install=False``.
config_override (Optional[dict]): Override shared configuration for each dependency.
name (Optional[str]): Optionally only get dependencies with a certain name.
version (Optional[str]): Optionally only get dependencies with certain version.
allow_install (bool): Set to ``False`` to not allow installing uninstalled specified dependencies.
strict (bool): ``True`` requires the dependency to either be installed or install properly.
recurse (bool): Set to ``False`` to not recursively install dependencies of dependencies.
Returns:
Iterator[:class:`~ape.managers.project.Dependency`]
|
get_project_dependencies
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def all(self) -> Iterator[Dependency]:
"""
All dependencies known by Ape, regardless of their project
affiliation. NOTE: By "installed" here, we simply
mean the API files are cached and known by Ape.
However, it does not guarantee the project is
installed.
"""
if not self.packages_cache.api_folder.is_dir():
return
for package_versions in self.packages_cache.api_folder.iterdir():
if not package_versions.is_dir():
continue
for api_file in package_versions.iterdir():
if not api_file.is_file():
continue
data = json.loads(api_file.read_text(encoding="utf-8"))
api = self.decode_dependency(**data)
if api.name == self.project.name:
# Don't include self as a dependency
# (happens when compiling a dependency)
continue
yield self._create_dependency(api)
|
All dependencies known by Ape, regardless of their project
affiliation. NOTE: By "installed" here, we simply
mean the API files are cached and known by Ape.
However, it does not guarantee the project is
installed.
|
all
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def get_dependency_api(self, package_id: str, version: Optional[str] = None) -> DependencyAPI:
"""
Get a dependency API. If not given version and there are multiple,
returns the latest.
Args:
package_id (str): The package ID or name of the dependency.
version (str): The version of the dependency.
Returns:
:class:`~ape.api.projects.DependencyAPI`
"""
# Check by package ID first.
if dependency := self._get_dependency_api_by_package_id(package_id, version=version):
return dependency
elif dependency := self._get_dependency_api_by_package_id(
package_id, version=version, attr="name"
):
return dependency
package_str = f"{package_id}@{version}" if version else package_id
message = f"No matching dependency found with package ID '{package_str}'"
raise ProjectError(message)
|
Get a dependency API. If not given version and there are multiple,
returns the latest.
Args:
package_id (str): The package ID or name of the dependency.
version (str): The version of the dependency.
Returns:
:class:`~ape.api.projects.DependencyAPI`
|
get_dependency_api
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def get_versions(self, name: str, allow_install: bool = True) -> Iterator[Dependency]:
"""
Get all installed versions of a dependency.
Args:
name (str): The name of the dependency.
allow_install (bool): Set to ``False`` to not allow installing.
Returns:
Iterator[:class:`~ape.managers.project.Dependency`]
"""
# First, check specified.
versions_yielded = set()
for dependency in self.get_project_dependencies(name=name, allow_install=allow_install):
if dependency.version in versions_yielded:
continue
yield dependency
versions_yielded.add(dependency.version)
# Yield any remaining installed.
using_package_id = False
for dependency in self.all:
if dependency.package_id != name:
continue
using_package_id = True
if dependency.version in versions_yielded:
continue
yield dependency
versions_yielded.add(dependency.version)
if using_package_id:
# Done.
return
# Never yield. Check if using short-name.
for dependency in self.all:
if dependency.name != name:
continue
elif dependency.version in versions_yielded:
continue
yield dependency
versions_yielded.add(dependency.version)
|
Get all installed versions of a dependency.
Args:
name (str): The name of the dependency.
allow_install (bool): Set to ``False`` to not allow installing.
Returns:
Iterator[:class:`~ape.managers.project.Dependency`]
|
get_versions
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def get_dependency(
self, dependency_id: str, version: str, allow_install: bool = True
) -> Dependency:
"""
Get a dependency.
Args:
dependency_id (str): The package ID of the dependency. You can also
provide the short-name of the dependency.
version (str): The version identifier.
allow_install (bool): If the dependency API is known but the
project is not installed, attempt to install it. Defaults to ``True``.
Raises:
:class:`~ape.exceptions.ProjectError`: When unable to find the
dependency.
Returns:
class:`~ape.managers.project.Dependency`
"""
version_options = _version_to_options(version)
# Also try the lower of the name
# so `OpenZeppelin` would give you `openzeppelin`.
id_options = [dependency_id]
if dependency_id.lower() != dependency_id:
# Ensure we try dependency_id without lower first.
id_options.append(dependency_id.lower())
def try_get():
for dep_id in id_options:
for v in version_options:
# NOTE: `allow_install=False` here because we install
# _after_ exhausting all options.
if dependency := self._get(dep_id, v, allow_install=False):
return dependency
if res := try_get():
return res
if allow_install:
# Try installing first.
self.install()
if res := try_get():
return res
raise ProjectError(f"Dependency '{dependency_id}' with version '{version}' not found.")
|
Get a dependency.
Args:
dependency_id (str): The package ID of the dependency. You can also
provide the short-name of the dependency.
version (str): The version identifier.
allow_install (bool): If the dependency API is known but the
project is not installed, attempt to install it. Defaults to ``True``.
Raises:
:class:`~ape.exceptions.ProjectError`: When unable to find the
dependency.
Returns:
class:`~ape.managers.project.Dependency`
|
get_dependency
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def decode_dependency(self, **item: Any) -> DependencyAPI:
"""
Decode data into a :class:`~ape.api.projects.DependencyAPI`.
Args:
**item: The same data you put in your ``dependencies:`` config.
Raises:
:class:`~ape.exceptions.ProjectError`: When unable to handle the
given API data.
Returns:
:class:`~ape.api.projects.DependencyAPI`
"""
for key, cls in self.types.items():
if key in item:
return cls.model_validate(item)
name = item.get("name") or f"{item}" # NOTE: Using 'or' for short-circuit eval
raise ProjectError(
f"No installed dependency API that supports '{name}'. "
f"Keys={', '.join([x for x in item.keys()])}"
)
|
Decode data into a :class:`~ape.api.projects.DependencyAPI`.
Args:
**item: The same data you put in your ``dependencies:`` config.
Raises:
:class:`~ape.exceptions.ProjectError`: When unable to handle the
given API data.
Returns:
:class:`~ape.api.projects.DependencyAPI`
|
decode_dependency
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def add(self, dependency: Union[dict, DependencyAPI]) -> Dependency:
"""
Add the dependency API data. This sets up a dependency such that
it can be fetched.
Args:
dependency (dict | :class:`~ape.api.projects.DependencyAPI`): The
API data necessary for fetching the dependency.
Returns:
class:`~ape.managers.project.Dependency`
"""
api = self.decode_dependency(**dependency) if isinstance(dependency, dict) else dependency
self.packages_cache.cache_api(api)
# Avoid infinite loop where Ape re-tries installing the dependency
# again and again in error situations.
install_if_not_found = False
try:
return self.get_dependency(
api.package_id,
api.version_id,
allow_install=install_if_not_found,
)
except ProjectError:
raise # Avoids bottom except.
except Exception as err:
raise ProjectError(
f"Failed to add dependency {api.name}@{api.version_id}: {err}"
) from err
|
Add the dependency API data. This sets up a dependency such that
it can be fetched.
Args:
dependency (dict | :class:`~ape.api.projects.DependencyAPI`): The
API data necessary for fetching the dependency.
Returns:
class:`~ape.managers.project.Dependency`
|
add
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def install(self, **dependency: Any) -> Union[Dependency, list[Dependency]]:
"""
Install dependencies.
Args:
**dependency: Dependency data, same to what you put in `dependencies:` config.
When excluded, installs all project-specified dependencies. Also, use
``use_cache=False`` to force re-installing and ``recurse=False`` to avoid
installing dependencies of dependencies.
Returns:
:class:`~ape.managers.project.Dependency` when given data else a list
of them, one for each specified.
"""
use_cache: bool = dependency.pop("use_cache", True)
recurse: bool = dependency.pop("recurse", True)
if dependency:
return self.install_dependency(dependency, use_cache=use_cache, recurse=recurse)
# Install all project's.
result: list[Dependency] = []
for dep in self.get_project_dependencies(
use_cache=use_cache, allow_install=True, recurse=recurse
):
result.append(dep)
return result
|
Install dependencies.
Args:
**dependency: Dependency data, same to what you put in `dependencies:` config.
When excluded, installs all project-specified dependencies. Also, use
``use_cache=False`` to force re-installing and ``recurse=False`` to avoid
installing dependencies of dependencies.
Returns:
:class:`~ape.managers.project.Dependency` when given data else a list
of them, one for each specified.
|
install
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def unpack(self, base_path: Path, cache_name: str = ".cache"):
"""
Move dependencies into a .cache folder.
Ideal for isolated, temporary projects.
Args:
base_path (Path): The target path.
cache_name (str): The cache folder name to create
at the target path. Defaults to ``.cache`` because
that is what ``ape-solidity`` uses.
"""
cache_folder = base_path / cache_name
for dependency in self.specified:
dependency.unpack(cache_folder)
|
Move dependencies into a .cache folder.
Ideal for isolated, temporary projects.
Args:
base_path (Path): The target path.
cache_name (str): The cache folder name to create
at the target path. Defaults to ``.cache`` because
that is what ``ape-solidity`` uses.
|
unpack
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def from_manifest(
cls, manifest: Union[PackageManifest, Path, str], config_override: Optional[dict] = None
) -> "Project":
"""
Create an Ape project using only a manifest.
Args:
manifest (Union[PackageManifest, Path, str]): Either a manifest or a
path to a manifest file.
config_override (Optional[Dict]): Optionally provide a config override.
Returns:
:class:`~ape.managers.project.ProjectManifest`
"""
config_override = config_override or {}
manifest = _load_manifest(manifest) if isinstance(manifest, (Path, str)) else manifest
return Project(manifest, config_override=config_override)
|
Create an Ape project using only a manifest.
Args:
manifest (Union[PackageManifest, Path, str]): Either a manifest or a
path to a manifest file.
config_override (Optional[Dict]): Optionally provide a config override.
Returns:
:class:`~ape.managers.project.ProjectManifest`
|
from_manifest
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def from_python_library(
cls, package_name: str, config_override: Optional[dict] = None
) -> "LocalProject":
"""
Create an Ape project instance from an installed Python package.
This is useful for when Ape or Vyper projects are published to
pypi.
Args:
package_name (str): The name of the package's folder that would
appear in site-packages.
config_override (dict | None): Optionally override the configuration
for this project.
Returns:
:class:`~ape.managers.project.LocalProject`
"""
try:
pkg_path = get_package_path(package_name)
except ValueError as err:
raise ProjectError(str(err)) from err
# Treat site-package as a local-project.
return LocalProject(pkg_path, config_override=config_override)
|
Create an Ape project instance from an installed Python package.
This is useful for when Ape or Vyper projects are published to
pypi.
Args:
package_name (str): The name of the package's folder that would
appear in site-packages.
config_override (dict | None): Optionally override the configuration
for this project.
Returns:
:class:`~ape.managers.project.LocalProject`
|
from_python_library
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def isolate_in_tempdir(self, **config_override) -> Iterator["LocalProject"]:
"""
Clone this project to a temporary directory and return
its project.
"""
config_override = config_override or {}
name = config_override.get("name", self.name)
chdir = config_override.pop("chdir", False)
with create_tempdir(name=name) as path:
if chdir:
with self.chdir(path):
yield self.unpack(path, config_override=config_override)
else:
yield self.unpack(path, config_override=config_override)
|
Clone this project to a temporary directory and return
its project.
|
isolate_in_tempdir
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def unpack(self, destination: Path, config_override: Optional[dict] = None) -> "LocalProject":
"""
Unpack the project to a location using the information
from the manifest. Converts a manifest-based project
to a local one.
"""
config_override = {**self._config_override, **(config_override or {})}
sources = self.sources or {}
# Unpack contracts.
for source_id, src in sources.items():
path = destination / source_id
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(str(src.content), encoding="utf8")
# Unpack config file.
# NOTE: Always unpacks into a regular .yaml config file for simplicity
# and maximum portability.
self.config.write_to_disk(destination / "ape-config.yaml")
return LocalProject(destination, config_override=config_override)
|
Unpack the project to a location using the information
from the manifest. Converts a manifest-based project
to a local one.
|
unpack
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def update_manifest(self, **kwargs):
"""
Change manifest values. Overwrites.
Args:
**kwargs: Top-level manifest attributes.
"""
for k, v in kwargs.items():
setattr(self._manifest, k, v)
|
Change manifest values. Overwrites.
Args:
**kwargs: Top-level manifest attributes.
|
update_manifest
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def add_compiler_data(self, compiler_data: Iterable[Compiler]) -> list[Compiler]:
"""
Add compiler data to the existing cached manifest.
Args:
compiler_data (Iterable[``ethpm_types.Compiler``]): Compilers to add.
Returns:
List[``ethpm_types.source.Compiler``]: The full list of compilers.
"""
# Validate given data.
given_compilers = set(compiler_data)
num_compilers = len([x for x in compiler_data])
if len(given_compilers) != num_compilers:
raise ProjectError(
f"`{self.add_compiler_data.__name__}()` was given multiple of the same compiler. "
"Please filter inputs."
)
# Filter out given compilers without contract types.
given_compilers = {c for c in given_compilers if c.contractTypes}
if len(given_compilers) != num_compilers:
logger.warning(
f"`{self.add_compiler_data.__name__}()` given compilers without contract types. "
"Ignoring these inputs."
)
for given_compiler in given_compilers:
other_given_compilers = [c for c in given_compilers if c != given_compiler]
contract_types_from_others = [
n for c in other_given_compilers for n in (c.contractTypes or [])
]
collisions = {
n for n in (given_compiler.contractTypes or []) if n in contract_types_from_others
}
if collisions:
collide_str = ", ".join(collisions)
raise ProjectError(f"Contract type(s) '{collide_str}' collision across compilers.")
new_types = [n for c in given_compilers for n in (c.contractTypes or [])]
# Merge given compilers with existing compilers.
existing_compilers = self.manifest.compilers or []
# Existing compilers remaining after processing new compilers.
remaining_existing_compilers: list[Compiler] = []
for existing_compiler in existing_compilers:
find_iter = iter(x for x in compiler_data if x == existing_compiler)
if matching_given_compiler := next(find_iter, None):
# Compiler already exists in the system, possibly with different contract types.
# Merge contract types.
matching_given_compiler.contractTypes = list(
{
*(existing_compiler.contractTypes or []),
*(matching_given_compiler.contractTypes or []),
}
)
# NOTE: Purposely we don't add the existing compiler back,
# as it is the same as the given compiler, (meaning same
# name, version, and settings), and we have
# merged their contract types.
continue
else:
# Filter out contract types added now under a different compiler.
existing_compiler.contractTypes = [
c for c in (existing_compiler.contractTypes or []) if c not in new_types
]
# Clear output selection for new types, since they are present in the new compiler.
if existing_compiler.settings and "outputSelection" in existing_compiler.settings:
new_src_ids = {
(self.manifest.contract_types or {})[x].source_id
for x in new_types
if x in (self.manifest.contract_types or {})
and (self.manifest.contract_types or {})[x].source_id is not None
}
existing_compiler.settings["outputSelection"] = {
k: v
for k, v in existing_compiler.settings["outputSelection"].items()
if k not in new_src_ids
}
# Remove compilers without contract types.
if existing_compiler.contractTypes:
remaining_existing_compilers.append(existing_compiler)
# Use Compiler.__hash__ to remove duplicated.
# Also, sort for consistency.
compilers = sorted(
list({*remaining_existing_compilers, *compiler_data}),
key=lambda x: f"{x.name}@{x.version}",
)
self.update_manifest(compilers=compilers)
return self._manifest.compilers or compilers # for mypy.
|
Add compiler data to the existing cached manifest.
Args:
compiler_data (Iterable[``ethpm_types.Compiler``]): Compilers to add.
Returns:
List[``ethpm_types.source.Compiler``]: The full list of compilers.
|
add_compiler_data
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def reconfigure(self, **overrides):
"""
Change a project's config.
Args:
**overrides: Config key-value pairs. Completely overrides
existing.
"""
if "config" in self.__dict__:
# Delete cached property.
del self.__dict__["config"]
original_override = self._config_override
self._config_override = overrides
try:
_ = self.config
except Exception:
# Ensure changes don't persist.
self._config_override = original_override
raise # Whatever error it is
self._invalidate_project_dependent_caches()
|
Change a project's config.
Args:
**overrides: Config key-value pairs. Completely overrides
existing.
|
reconfigure
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def instance_map(self) -> dict[str, dict[str, EthPMContractInstance]]:
"""
The mapping needed for deployments publishing in an ethpm manifest.
"""
result: dict[str, dict[str, EthPMContractInstance]] = {}
if not self.cache_folder.is_dir():
return result
for ecosystem_path in self.cache_folder.iterdir():
if not ecosystem_path.is_dir():
continue
chain = ecosystem_path.name
for deployment in ecosystem_path.iterdir():
if not self._is_deployment(deployment):
continue
instance = EthPMContractInstance.model_validate_json(
deployment.read_text(encoding="utf8")
)
if not instance.block:
continue
bip122_uri = f"blockchain://{chain}/block/{instance.block.replace('0x', '')}"
if bip122_uri in result:
result[bip122_uri][deployment.name] = instance
else:
result[bip122_uri] = {deployment.name: instance}
return result
|
The mapping needed for deployments publishing in an ethpm manifest.
|
instance_map
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def track(self, contract: ContractInstance, allow_dev: bool = False):
"""
Indicate that a contract deployment should be included in the package manifest
upon publication.
**NOTE**: Deployments are automatically tracked for contracts. However, only
deployments passed to this method are included in the final, publishable manifest.
Args:
contract (:class:`~ape.contracts.base.ContractInstance`): The contract
to track as a deployment of the project.
allow_dev (bool): Set to ``True`` if simulating in a local dev environment.
"""
if not allow_dev and self.provider.network.is_dev:
raise ProjectError("Can only publish deployments on a live network.")
elif not (contract_name := contract.contract_type.name):
raise ProjectError("Contract name required when publishing.")
receipt = None
err_msg = f"Contract '{contract_name}' transaction receipt is unknown."
try:
if creation := contract.creation_metadata:
receipt = creation.receipt
except ChainError as err:
raise ProjectError(err_msg) from err
if not receipt:
raise ProjectError(err_msg)
block_number = receipt.block_number
block_hash_bytes = self.provider.get_block(block_number).hash
if not block_hash_bytes:
# Mostly for mypy, not sure this can ever happen.
raise ProjectError(
f"Block hash containing transaction for '{contract_name}' "
f"at block_number={block_number} is unknown."
)
block_hash = to_hex(block_hash_bytes)
contract_type_str = (
f"{contract.contract_type.source_id}:{contract_name}"
if contract.contract_type.source_id
else contract_name
)
artifact = EthPMContractInstance(
address=contract.address,
block=block_hash,
contractType=contract_type_str,
transaction=cast(HexStr, contract.txn_hash),
runtimeBytecode=contract.contract_type.runtime_bytecode,
)
if not (block_0_hash := self.provider.get_block(0).hash):
raise ProjectError("Chain missing hash for block 0 (required for BIP-122 chain ID).")
bip122_chain_id = f"{to_hex(block_0_hash)[2:]}"
deployments_folder = self.cache_folder / bip122_chain_id
deployments_folder.mkdir(exist_ok=True, parents=True)
destination = deployments_folder / f"{contract_name}.json"
if destination.is_file():
logger.debug("Deployment already tracked. Re-tracking.")
# NOTE: missing_ok=True to handle race condition.
destination.unlink(missing_ok=True)
destination.write_text(artifact.model_dump_json(), encoding="utf8")
|
Indicate that a contract deployment should be included in the package manifest
upon publication.
**NOTE**: Deployments are automatically tracked for contracts. However, only
deployments passed to this method are included in the final, publishable manifest.
Args:
contract (:class:`~ape.contracts.base.ContractInstance`): The contract
to track as a deployment of the project.
allow_dev (bool): Set to ``True`` if simulating in a local dev environment.
|
track
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def __iter__(self) -> Iterator[EthPMContractInstance]:
"""
Get project deployments.
Returns:
Iterator[ethpm_types.ContractInstance]
"""
if not self.cache_folder.is_dir():
return
for ecosystem_path in self.cache_folder.iterdir():
if not ecosystem_path.is_dir():
continue
for deployment in ecosystem_path.iterdir():
if not self._is_deployment(deployment):
continue
yield EthPMContractInstance.model_validate_json(deployment.read_text())
|
Get project deployments.
Returns:
Iterator[ethpm_types.ContractInstance]
|
__iter__
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def project_api(self) -> ProjectAPI:
"""
The 'type' of project this is, such as an Ape project
or a Brownie project (or something else).
"""
default_project = self._get_ape_project_api()
valid_apis: list[ProjectAPI] = [default_project] if default_project else []
# ape-config.yaml does no exist. Check for another ProjectAPI type.
project_classes: Iterator[type[ProjectAPI]] = (
t[1]
for t in self.plugin_manager.projects # type: ignore
)
plugins = (t for t in project_classes if not issubclass(t, ApeProject))
for api in plugins:
if instance := api.attempt_validate(path=self._base_path):
valid_apis.append(instance)
num_apis = len(valid_apis)
if num_apis == 1:
# Only 1 valid API- we can proceed from here.
return valid_apis[0]
elif num_apis == 0:
# Invalid project: not a likely scenario, as ApeProject should always work.
raise ProjectError(f"'{self._base_path.name}' is not recognized as a project.")
# More than 1 valid API. Remove default unless its config exists.
if valid_apis[0] == default_project:
if default_project.config_file.is_file():
# If Ape is configured for real, we want these changes at the end of the list,
# since they are most final.
valid_apis = [*valid_apis[1:], valid_apis[0]]
else:
# Remove, as we have others that are _actually_ valid, and the default is not needed.
valid_apis = valid_apis[1:]
if len(valid_apis) == 1:
# After removing the unnecessary default project type, there is only 1 valid project type left.
return valid_apis[0]
# If we get here, there are more than 1 project types we should use.
return MultiProject(apis=valid_apis, path=self._base_path)
|
The 'type' of project this is, such as an Ape project
or a Brownie project (or something else).
|
project_api
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def config(self) -> ApeConfig:
"""
The project configuration (including global defaults).
"""
# NOTE: Accessing the config this way allows us
# to be a different project than the cwd project.
project_config = self.project_api.extract_config(**self._config_override)
return self.config_manager.merge_with_global(project_config)
|
The project configuration (including global defaults).
|
config
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
def in_tempdir(self) -> bool:
"""
``True`` when this project is in the temporary directory,
meaning existing only in the temporary directory
namespace.
"""
if not self.path:
return False
return in_tempdir(self.path)
|
``True`` when this project is in the temporary directory,
meaning existing only in the temporary directory
namespace.
|
in_tempdir
|
python
|
ApeWorX/ape
|
src/ape/managers/project.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/managers/project.py
|
Apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.