code
stringlengths 66
870k
| docstring
stringlengths 19
26.7k
| func_name
stringlengths 1
138
| language
stringclasses 1
value | repo
stringlengths 7
68
| path
stringlengths 5
324
| url
stringlengths 46
389
| license
stringclasses 7
values |
---|---|---|---|---|---|---|---|
def as_our_module(cls_or_def: _MOD_T, doc_str: Optional[str] = None) -> _MOD_T:
"""
Ape sometimes reclaims definitions from other packages, such as
class:`~ape.types.signatures.SignableMessage`). When doing so, the doc str
may be different than ours, and the module may still refer to
the original package. This method steals the given class as-if
it were ours. Logic borrowed from starknet-py.
https://github.com/software-mansion/starknet.py/blob/0.10.1-alpha/starknet_py/utils/docs.py#L10-L24
Args:
cls_or_def (_MOD_T): The class or definition to borrow.
doc_str (str): Optionally change the doc string.
Returns:
The borrowed-version of the class or definition.
"""
if cls_or_def is None:
return cls_or_def
elif doc_str is not None:
cls_or_def.__doc__ = doc_str
frame = inspect.stack()[1]
if module := inspect.getmodule(frame[0]):
cls_or_def.__module__ = module.__name__
return cls_or_def
|
Ape sometimes reclaims definitions from other packages, such as
class:`~ape.types.signatures.SignableMessage`). When doing so, the doc str
may be different than ours, and the module may still refer to
the original package. This method steals the given class as-if
it were ours. Logic borrowed from starknet-py.
https://github.com/software-mansion/starknet.py/blob/0.10.1-alpha/starknet_py/utils/docs.py#L10-L24
Args:
cls_or_def (_MOD_T): The class or definition to borrow.
doc_str (str): Optionally change the doc string.
Returns:
The borrowed-version of the class or definition.
|
as_our_module
|
python
|
ApeWorX/ape
|
src/ape/utils/misc.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/misc.py
|
Apache-2.0
|
def derive_public_key(private_key: bytes) -> HexBytes:
"""
Derive the public key for the given private key.
Args:
private_key (bytes): The private key.
Returns:
HexBytes: The public key.
"""
pk = keys.PrivateKey(private_key)
public_key = f"{pk.public_key}"[2:]
return HexBytes(bytes.fromhex(public_key))
|
Derive the public key for the given private key.
Args:
private_key (bytes): The private key.
Returns:
HexBytes: The public key.
|
derive_public_key
|
python
|
ApeWorX/ape
|
src/ape/utils/misc.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/misc.py
|
Apache-2.0
|
def get_relative_path(target: Path, anchor: Path) -> Path:
"""
Compute the relative path of ``target`` relative to ``anchor``,
which may or may not share a common ancestor.
**NOTE ON PERFORMANCE**: Both paths must be absolute to
use this method. If you know both methods are absolute,
this method is a performance boost. If you have to first
call `.absolute()` on the paths, use
`target.relative_to(anchor)` instead; as it will be
faster in that case.
Args:
target (pathlib.Path): The path we are interested in.
anchor (pathlib.Path): The path we are starting from.
Returns:
pathlib.Path: The new path to the target path from the anchor path.
"""
# Calculate common prefix length
common_parts = 0
for target_part, anchor_part in zip(target.parts, anchor.parts):
if target_part == anchor_part:
common_parts += 1
else:
break
# Calculate the relative path
relative_parts = [".."] * (len(anchor.parts) - common_parts) + list(target.parts[common_parts:])
return Path(*relative_parts)
|
Compute the relative path of ``target`` relative to ``anchor``,
which may or may not share a common ancestor.
**NOTE ON PERFORMANCE**: Both paths must be absolute to
use this method. If you know both methods are absolute,
this method is a performance boost. If you have to first
call `.absolute()` on the paths, use
`target.relative_to(anchor)` instead; as it will be
faster in that case.
Args:
target (pathlib.Path): The path we are interested in.
anchor (pathlib.Path): The path we are starting from.
Returns:
pathlib.Path: The new path to the target path from the anchor path.
|
get_relative_path
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def get_all_files_in_directory(
path: Path, pattern: Optional[Union[Pattern, str]] = None, max_files: Optional[int] = None
) -> list[Path]:
"""
Returns all the files in a directory structure (recursive).
For example, given a directory structure like::
dir_a: dir_b, file_a, file_b
dir_b: file_c
and you provide the path to ``dir_a``, it will return a list containing
the paths to ``file_a``, ``file_b`` and ``file_c``.
Args:
path (pathlib.Path): A directory containing files of interest.
pattern (Optional[Union[Pattern, str]]): Optionally provide a regex
pattern to match.
max_files (Optional[int]): Optionally set a max file count. This is useful
because huge file structures will be very slow.
Returns:
list[pathlib.Path]: A list of files in the given directory.
"""
if path.is_file():
return [path]
elif not path.is_dir():
return []
pattern_obj: Optional[Pattern] = None
if isinstance(pattern, str):
pattern_obj = re.compile(pattern)
elif pattern is not None:
pattern_obj = pattern
result: list[Path] = []
append_result = result.append # Local variable for faster access
for file in path.rglob("*.*"):
if not file.is_file() or (pattern_obj is not None and not pattern_obj.match(file.name)):
continue
append_result(file)
if max_files is not None and len(result) >= max_files:
break
return result
|
Returns all the files in a directory structure (recursive).
For example, given a directory structure like::
dir_a: dir_b, file_a, file_b
dir_b: file_c
and you provide the path to ``dir_a``, it will return a list containing
the paths to ``file_a``, ``file_b`` and ``file_c``.
Args:
path (pathlib.Path): A directory containing files of interest.
pattern (Optional[Union[Pattern, str]]): Optionally provide a regex
pattern to match.
max_files (Optional[int]): Optionally set a max file count. This is useful
because huge file structures will be very slow.
Returns:
list[pathlib.Path]: A list of files in the given directory.
|
get_all_files_in_directory
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def get_full_extension(path: Union[Path, str]) -> str:
"""
For a path like ``Path("Contract.t.sol")``,
returns ``.t.sol``, unlike the regular Path
property ``.suffix`` which returns ``.sol``.
Args:
path (Path | str): The path with an extension.
Returns:
str: The full suffix
"""
if not path:
return ""
path = Path(path)
if path.is_dir() or path.suffix == "":
return ""
name = path.name
parts = name.split(".")
if len(parts) > 2 and name.startswith("."):
return "." + ".".join(parts[2:])
elif len(parts) > 1:
return "." + ".".join(parts[1:])
return ""
|
For a path like ``Path("Contract.t.sol")``,
returns ``.t.sol``, unlike the regular Path
property ``.suffix`` which returns ``.sol``.
Args:
path (Path | str): The path with an extension.
Returns:
str: The full suffix
|
get_full_extension
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def create_tempdir(name: Optional[str] = None) -> Iterator[Path]:
"""
Create a temporary directory. Differs from ``TemporaryDirectory()``
context-call alone because it automatically resolves the path.
Args:
name (Optional[str]): Optional provide a name of the directory.
Else, defaults to root of ``tempfile.TemporaryDirectory()``
(resolved).
Returns:
Iterator[Path]: Context managing the temporary directory.
"""
with TemporaryDirectory() as temp_dir:
temp_path = Path(temp_dir).resolve()
if name:
path = temp_path / name
path.mkdir()
else:
path = temp_path
yield path
|
Create a temporary directory. Differs from ``TemporaryDirectory()``
context-call alone because it automatically resolves the path.
Args:
name (Optional[str]): Optional provide a name of the directory.
Else, defaults to root of ``tempfile.TemporaryDirectory()``
(resolved).
Returns:
Iterator[Path]: Context managing the temporary directory.
|
create_tempdir
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def run_in_tempdir(
fn: Callable[[Path], Any],
name: Optional[str] = None,
):
"""
Run the given function in a temporary directory with its path
resolved.
Args:
fn (Callable): A function that takes a path. It gets called
with the resolved path to the temporary directory.
name (Optional[str]): Optionally name the temporary directory.
Returns:
Any: The result of the function call.
"""
with create_tempdir(name=name) as temp_dir:
return fn(temp_dir)
|
Run the given function in a temporary directory with its path
resolved.
Args:
fn (Callable): A function that takes a path. It gets called
with the resolved path to the temporary directory.
name (Optional[str]): Optionally name the temporary directory.
Returns:
Any: The result of the function call.
|
run_in_tempdir
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def in_tempdir(path: Path) -> bool:
"""
Returns ``True`` when the given path is in a temporary directory.
Args:
path (Path): The path to check.
Returns:
bool
"""
temp_dir = os.path.normpath(f"{Path(gettempdir()).resolve()}")
normalized_path = os.path.normpath(path)
return normalized_path.startswith(temp_dir)
|
Returns ``True`` when the given path is in a temporary directory.
Args:
path (Path): The path to check.
Returns:
bool
|
in_tempdir
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def path_match(path: Union[str, Path], *exclusions: str) -> bool:
"""
A better glob-matching function. For example:
>>> from pathlib import Path
>>> p = Path("test/to/.build/me/2/file.json")
>>> p.match("**/.build/**")
False
>>> from ape.utils.os import path_match
>>> path_match(p, "**/.build/**")
True
"""
path_str = str(path)
path_path = Path(path)
for excl in exclusions:
if fnmatch(path_str, excl):
return True
elif fnmatch(path_path.name, excl):
return True
else:
# If the exclusion is he full name of any of the parents
# (e.g. ".cache", it is a match).
for parent in path_path.parents:
if parent.name == excl:
return True
# Walk the path recursively.
relative_str = path_str.replace(str(parent), "").strip(os.path.sep)
if fnmatch(relative_str, excl):
return True
return False
|
A better glob-matching function. For example:
>>> from pathlib import Path
>>> p = Path("test/to/.build/me/2/file.json")
>>> p.match("**/.build/**")
False
>>> from ape.utils.os import path_match
>>> path_match(p, "**/.build/**")
True
|
path_match
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def clean_path(path: Path) -> str:
"""
Replace the home directory with key ``$HOME`` and return
the path as a str. This is used for outputting paths
with less doxxing.
Args:
path (Path): The path to sanitize.
Returns:
str: A sanitized path-str.
"""
home = Path.home()
if path.is_relative_to(home):
return f"$HOME{os.path.sep}{path.relative_to(home)}"
return f"{path}"
|
Replace the home directory with key ``$HOME`` and return
the path as a str. This is used for outputting paths
with less doxxing.
Args:
path (Path): The path to sanitize.
Returns:
str: A sanitized path-str.
|
clean_path
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def get_package_path(package_name: str) -> Path:
"""
Get the path to a package from site-packages.
Args:
package_name (str): The name of the package.
Returns:
Path
"""
try:
dist = distribution(package_name)
except PackageNotFoundError as err:
raise ValueError(f"Package '{package_name}' not found in site-packages.") from err
package_path = Path(str(dist.locate_file(""))) / package_name
if not package_path.exists():
raise ValueError(f"Package '{package_name}' not found in site-packages.")
return package_path
|
Get the path to a package from site-packages.
Args:
package_name (str): The name of the package.
Returns:
Path
|
get_package_path
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def extract_archive(archive_file: Path, destination: Optional[Path] = None):
"""
Extract an archive file. Supports ``.zip`` or ``.tar.gz``.
Args:
archive_file (Path): The file-path to the archive.
destination (Optional[Path]): Optionally provide a destination.
Defaults to the parent directory of the archive file.
"""
destination = destination or archive_file.parent
if archive_file.suffix == ".zip":
with zipfile.ZipFile(archive_file, "r") as zip_ref:
zip_members = zip_ref.namelist()
if top_level_dir := os.path.commonpath(zip_members):
for zip_member in zip_members:
# Modify the member name to remove the top-level directory.
member_path = Path(zip_member)
relative_path = (
member_path.relative_to(top_level_dir) if top_level_dir else member_path
)
target_path = destination / relative_path
if member_path.is_dir():
target_path.mkdir(parents=True, exist_ok=True)
else:
target_path.parent.mkdir(parents=True, exist_ok=True)
with zip_ref.open(member_path.as_posix()) as source:
target_path.write_bytes(source.read())
else:
zip_ref.extractall(f"{destination}")
elif archive_file.name.endswith(".tar.gz"):
with tarfile.open(archive_file, "r:gz") as tar_ref:
tar_members = tar_ref.getmembers()
if top_level_dir := os.path.commonpath([m.name for m in tar_members]):
for tar_member in tar_members:
# Modify the member name to remove the top-level directory.
tar_member.name = os.path.relpath(tar_member.name, top_level_dir)
tar_ref.extract(tar_member, path=destination)
else:
tar_ref.extractall(path=f"{destination}")
else:
raise ValueError(f"Unsupported zip format: '{archive_file.suffix}'.")
|
Extract an archive file. Supports ``.zip`` or ``.tar.gz``.
Args:
archive_file (Path): The file-path to the archive.
destination (Optional[Path]): Optionally provide a destination.
Defaults to the parent directory of the archive file.
|
extract_archive
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def within_directory(directory: Path):
"""
A context-manager for changing the cwd to the given path.
Args:
directory (Path): The directory to change.
"""
here = Path.cwd()
if directory != here:
os.chdir(directory)
try:
yield
finally:
if Path.cwd() != here:
os.chdir(here)
|
A context-manager for changing the cwd to the given path.
Args:
directory (Path): The directory to change.
|
within_directory
|
python
|
ApeWorX/ape
|
src/ape/utils/os.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/os.py
|
Apache-2.0
|
def spawn(target, *args, **kwargs):
"""
Spawn a new daemon thread. Borrowed from the ``py-geth`` library.
"""
thread = threading.Thread(
target=target,
args=args,
kwargs=kwargs,
)
thread.daemon = True
thread.start()
return thread
|
Spawn a new daemon thread. Borrowed from the ``py-geth`` library.
|
spawn
|
python
|
ApeWorX/ape
|
src/ape/utils/process.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/process.py
|
Apache-2.0
|
def allow_disconnected(fn: Callable):
"""
A decorator that instead of raising :class:`~ape.exceptions.ProviderNotConnectedError`
warns and returns ``None``.
Usage example::
from typing import Optional
from ape.types import SnapshotID
from ape.utils import return_none_when_disconnected
@allow_disconnected
def try_snapshot(self) -> Optional[SnapshotID]:
return self.chain.snapshot()
"""
def inner(*args, **kwargs):
try:
return fn(*args, **kwargs)
except ProviderNotConnectedError:
logger.warning("Provider is not connected.")
return None
return inner
|
A decorator that instead of raising :class:`~ape.exceptions.ProviderNotConnectedError`
warns and returns ``None``.
Usage example::
from typing import Optional
from ape.types import SnapshotID
from ape.utils import return_none_when_disconnected
@allow_disconnected
def try_snapshot(self) -> Optional[SnapshotID]:
return self.chain.snapshot()
|
allow_disconnected
|
python
|
ApeWorX/ape
|
src/ape/utils/rpc.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/rpc.py
|
Apache-2.0
|
def stream_response(download_url: str, progress_bar_description: str = "Downloading") -> bytes:
"""
Download HTTP content by streaming and returning the bytes.
Progress bar will be displayed in the CLI.
Args:
download_url (str): String to get files to download.
progress_bar_description (str): Downloading word.
Returns:
bytes: Content in bytes to show the progress.
"""
response = requests.get(download_url, stream=True)
response.raise_for_status()
total_size = int(response.headers.get("content-length", 0))
progress_bar = tqdm(total=total_size, unit="iB", unit_scale=True, leave=False)
progress_bar.set_description(progress_bar_description)
content = b""
for data in response.iter_content(1024, decode_unicode=True):
progress_bar.update(len(data))
content += data
progress_bar.close()
return content
|
Download HTTP content by streaming and returning the bytes.
Progress bar will be displayed in the CLI.
Args:
download_url (str): String to get files to download.
progress_bar_description (str): Downloading word.
Returns:
bytes: Content in bytes to show the progress.
|
stream_response
|
python
|
ApeWorX/ape
|
src/ape/utils/rpc.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/rpc.py
|
Apache-2.0
|
def generate_dev_accounts(
mnemonic: str = DEFAULT_TEST_MNEMONIC,
number_of_accounts: int = DEFAULT_NUMBER_OF_TEST_ACCOUNTS,
hd_path: str = DEFAULT_TEST_HD_PATH,
start_index: int = 0,
) -> list[GeneratedDevAccount]:
"""
Create accounts from the given test mnemonic.
Use these accounts (or the mnemonic) in chain-genesis
for testing providers.
Args:
mnemonic (str): mnemonic phrase or seed words.
number_of_accounts (int): Number of accounts. Defaults to ``10``.
hd_path(str): Hard Wallets/HD Keys derivation path format.
Defaults to ``"m/44'/60'/0'/0"``.
start_index (int): The index to start from in the path. Defaults
to 0.
Returns:
list[:class:`~ape.utils.GeneratedDevAccount`]: List of development accounts.
"""
# perf: lazy imports so module loads faster.
from eth_account.hdaccount.mnemonic import Mnemonic
seed = Mnemonic.to_seed(mnemonic)
hd_path_format = (
hd_path if "{}" in hd_path or "{0}" in hd_path else f"{hd_path.rstrip('/')}/{{}}"
)
return [
_generate_dev_account(hd_path_format, i, seed)
for i in range(start_index, start_index + number_of_accounts)
]
|
Create accounts from the given test mnemonic.
Use these accounts (or the mnemonic) in chain-genesis
for testing providers.
Args:
mnemonic (str): mnemonic phrase or seed words.
number_of_accounts (int): Number of accounts. Defaults to ``10``.
hd_path(str): Hard Wallets/HD Keys derivation path format.
Defaults to ``"m/44'/60'/0'/0"``.
start_index (int): The index to start from in the path. Defaults
to 0.
Returns:
list[:class:`~ape.utils.GeneratedDevAccount`]: List of development accounts.
|
generate_dev_accounts
|
python
|
ApeWorX/ape
|
src/ape/utils/testing.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/testing.py
|
Apache-2.0
|
def _validate_account_passphrase(passphrase: str) -> str:
"""Make sure given passphrase is valid for account encryption"""
if not passphrase or not isinstance(passphrase, str):
raise AccountsError("Account file encryption passphrase must be provided.")
if len(passphrase) < MIN_PASSPHRASE_LENGTH:
warn("Passphrase length is extremely short. Consider using something longer.")
if not (_has_num(passphrase) or _has_special(passphrase)):
warn("Passphrase complexity is simple. Consider using numbers and special characters.")
return passphrase
|
Make sure given passphrase is valid for account encryption
|
_validate_account_passphrase
|
python
|
ApeWorX/ape
|
src/ape/utils/validators.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape/utils/validators.py
|
Apache-2.0
|
def set_autosign(self, enabled: bool, passphrase: Optional[str] = None):
"""
Allow this account to automatically sign messages and transactions.
Args:
enabled (bool): ``True`` to enable, ``False`` to disable.
passphrase (Optional[str]): Optionally provide the passphrase.
If not provided, you will be prompted to enter it.
"""
if enabled:
self.unlock(passphrase=passphrase)
logger.warning("Danger! This account will now sign any transaction it's given.")
self.__autosign = enabled
if not enabled:
# Re-lock if was turning off
self.locked = True
self.__cached_key = None
|
Allow this account to automatically sign messages and transactions.
Args:
enabled (bool): ``True`` to enable, ``False`` to disable.
passphrase (Optional[str]): Optionally provide the passphrase.
If not provided, you will be prompted to enter it.
|
set_autosign
|
python
|
ApeWorX/ape
|
src/ape_accounts/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_accounts/accounts.py
|
Apache-2.0
|
def _write_and_return_account(
alias: str, passphrase: str, account: "LocalAccount"
) -> KeyfileAccount:
"""Write an account to disk and return an Ape KeyfileAccount"""
path = ManagerAccessMixin.account_manager.containers["accounts"].data_folder.joinpath(
f"{alias}.json"
)
path.write_text(json.dumps(EthAccount.encrypt(account.key, passphrase)), encoding="utf8")
return KeyfileAccount(keyfile_path=path)
|
Write an account to disk and return an Ape KeyfileAccount
|
_write_and_return_account
|
python
|
ApeWorX/ape
|
src/ape_accounts/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_accounts/accounts.py
|
Apache-2.0
|
def generate_account(
alias: str, passphrase: str, hd_path: str = ETHEREUM_DEFAULT_PATH, word_count: int = 12
) -> tuple[KeyfileAccount, str]:
"""
Generate a new account.
Args:
alias (str): The alias name of the account.
passphrase (str): Passphrase used to encrypt the account storage file.
hd_path (str): The hierarchical deterministic path to use when generating the account.
Defaults to `m/44'/60'/0'/0/0`.
word_count (int): The amount of words to use in the generated mnemonic.
Returns:
Tuple of :class:`~ape_accounts.accounts.KeyfileAccount` and mnemonic for the generated
account.
"""
EthAccount.enable_unaudited_hdwallet_features()
alias = _validate_account_alias(alias)
passphrase = _validate_account_passphrase(passphrase)
account, mnemonic = EthAccount.create_with_mnemonic(num_words=word_count, account_path=hd_path)
ape_account = _write_and_return_account(alias, passphrase, account)
return ape_account, mnemonic
|
Generate a new account.
Args:
alias (str): The alias name of the account.
passphrase (str): Passphrase used to encrypt the account storage file.
hd_path (str): The hierarchical deterministic path to use when generating the account.
Defaults to `m/44'/60'/0'/0/0`.
word_count (int): The amount of words to use in the generated mnemonic.
Returns:
Tuple of :class:`~ape_accounts.accounts.KeyfileAccount` and mnemonic for the generated
account.
|
generate_account
|
python
|
ApeWorX/ape
|
src/ape_accounts/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_accounts/accounts.py
|
Apache-2.0
|
def import_account_from_mnemonic(
alias: str, passphrase: str, mnemonic: str, hd_path: str = ETHEREUM_DEFAULT_PATH
) -> KeyfileAccount:
"""
Import a new account from a mnemonic seed phrase.
Args:
alias (str): The alias name of the account.
passphrase (str): Passphrase used to encrypt the account storage file.
mnemonic (str): List of space-separated words representing the mnemonic seed phrase.
hd_path (str): The hierarchical deterministic path to use when generating the account.
Defaults to `m/44'/60'/0'/0/0`.
Returns:
Tuple of AccountAPI and mnemonic for the generated account.
"""
EthAccount.enable_unaudited_hdwallet_features()
alias = _validate_account_alias(alias)
passphrase = _validate_account_passphrase(passphrase)
account = EthAccount.from_mnemonic(mnemonic, account_path=hd_path)
return _write_and_return_account(alias, passphrase, account)
|
Import a new account from a mnemonic seed phrase.
Args:
alias (str): The alias name of the account.
passphrase (str): Passphrase used to encrypt the account storage file.
mnemonic (str): List of space-separated words representing the mnemonic seed phrase.
hd_path (str): The hierarchical deterministic path to use when generating the account.
Defaults to `m/44'/60'/0'/0/0`.
Returns:
Tuple of AccountAPI and mnemonic for the generated account.
|
import_account_from_mnemonic
|
python
|
ApeWorX/ape
|
src/ape_accounts/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_accounts/accounts.py
|
Apache-2.0
|
def import_account_from_private_key(
alias: str, passphrase: str, private_key: str
) -> KeyfileAccount:
"""
Import a new account from a mnemonic seed phrase.
Args:
alias (str): The alias name of the account.
passphrase (str): Passphrase used to encrypt the account storage file.
private_key (str): Hex string private key to import.
Returns:
Tuple of AccountAPI and mnemonic for the generated account.
"""
alias = _validate_account_alias(alias)
passphrase = _validate_account_passphrase(passphrase)
account = EthAccount.from_key(to_bytes(hexstr=private_key))
return _write_and_return_account(alias, passphrase, account)
|
Import a new account from a mnemonic seed phrase.
Args:
alias (str): The alias name of the account.
passphrase (str): Passphrase used to encrypt the account storage file.
private_key (str): Hex string private key to import.
Returns:
Tuple of AccountAPI and mnemonic for the generated account.
|
import_account_from_private_key
|
python
|
ApeWorX/ape
|
src/ape_accounts/accounts.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_accounts/accounts.py
|
Apache-2.0
|
def cli():
"""
Command-line helper for managing local accounts. You can unlock local accounts from
scripts or the console using the accounts.load() method.
"""
|
Command-line helper for managing local accounts. You can unlock local accounts from
scripts or the console using the accounts.load() method.
|
cli
|
python
|
ApeWorX/ape
|
src/ape_accounts/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_accounts/_cli.py
|
Apache-2.0
|
def show_delegate(account):
"""Show if an existing delegate is authorized for account"""
if contract := account.delegate:
click.echo(f"{account.address} is delegated to {contract.address}")
else:
click.secho(f"{account.address} has no delegate", fg="red")
|
Show if an existing delegate is authorized for account
|
show_delegate
|
python
|
ApeWorX/ape
|
src/ape_accounts/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_accounts/_cli.py
|
Apache-2.0
|
def authorize_delegate(account, receiver, data, gas_limit, contract):
"""Authorize and set delegate for account"""
account.set_delegate(contract, receiver=receiver, data=data, gas_limit=gas_limit)
click.echo(f"{account.address} is now delegated to {contract}")
|
Authorize and set delegate for account
|
authorize_delegate
|
python
|
ApeWorX/ape
|
src/ape_accounts/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_accounts/_cli.py
|
Apache-2.0
|
def _get_database_file(self, ecosystem_name: str, network_name: str) -> Path:
"""
Allows us to figure out what the file *will be*, mostly used for database management.
Args:
ecosystem_name (str): Name of the ecosystem to store data for (ex: ethereum)
network_name (str): name of the network to store data for (ex: mainnet)
Raises:
:class:`~ape.exceptions.QueryEngineError`: If a local network is provided.
"""
if network_name == LOCAL_NETWORK_NAME:
# NOTE: no need to cache local network, no use for data
raise QueryEngineError("Cannot cache local data")
if "-fork" in network_name:
# NOTE: send query to pull from upstream
network_name = network_name.replace("-fork", "")
return self.config_manager.DATA_FOLDER / ecosystem_name / network_name / "cache.db"
|
Allows us to figure out what the file *will be*, mostly used for database management.
Args:
ecosystem_name (str): Name of the ecosystem to store data for (ex: ethereum)
network_name (str): name of the network to store data for (ex: mainnet)
Raises:
:class:`~ape.exceptions.QueryEngineError`: If a local network is provided.
|
_get_database_file
|
python
|
ApeWorX/ape
|
src/ape_cache/query.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/query.py
|
Apache-2.0
|
def init_database(self, ecosystem_name: str, network_name: str):
"""
Initialize the SQLite database for caching of provider data.
Args:
ecosystem_name (str): Name of the ecosystem to store data for (ex: ethereum)
network_name (str): name of the network to store data for (ex: mainnet)
Raises:
:class:`~ape.exceptions.QueryEngineError`: When the database has not been initialized
"""
database_file = self._get_database_file(ecosystem_name, network_name)
if database_file.is_file():
raise QueryEngineError("Database has already been initialized")
# NOTE: Make sure database folder location has been created
database_file.parent.mkdir(exist_ok=True, parents=True)
models.Base.metadata.create_all( # type: ignore
bind=create_engine(self._get_sqlite_uri(database_file), pool_pre_ping=True)
)
|
Initialize the SQLite database for caching of provider data.
Args:
ecosystem_name (str): Name of the ecosystem to store data for (ex: ethereum)
network_name (str): name of the network to store data for (ex: mainnet)
Raises:
:class:`~ape.exceptions.QueryEngineError`: When the database has not been initialized
|
init_database
|
python
|
ApeWorX/ape
|
src/ape_cache/query.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/query.py
|
Apache-2.0
|
def purge_database(self, ecosystem_name: str, network_name: str):
"""
Removes the SQLite database file from disk.
Args:
ecosystem_name (str): Name of the ecosystem to store data for (ex: ethereum)
network_name (str): name of the network to store data for (ex: mainnet)
Raises:
:class:`~ape.exceptions.QueryEngineError`: When the database has not been initialized
"""
database_file = self._get_database_file(ecosystem_name, network_name)
if not database_file.is_file():
raise QueryEngineError("Database must be initialized")
database_file.unlink()
|
Removes the SQLite database file from disk.
Args:
ecosystem_name (str): Name of the ecosystem to store data for (ex: ethereum)
network_name (str): name of the network to store data for (ex: mainnet)
Raises:
:class:`~ape.exceptions.QueryEngineError`: When the database has not been initialized
|
purge_database
|
python
|
ApeWorX/ape
|
src/ape_cache/query.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/query.py
|
Apache-2.0
|
def database_connection(self):
"""
Returns a connection for the currently active network.
**NOTE**: Creates a database if it doesn't exist.
Raises:
:class:`~ape.exceptions.QueryEngineError`: If you are not connected to a provider,
or if the database has not been initialized.
Returns:
Optional[`sqlalchemy.engine.Connection`]
"""
if self.provider.network.is_local:
return None
if not self.network_manager.connected:
raise QueryEngineError("Not connected to a provider")
database_file = self._get_database_file(
self.provider.network.ecosystem.name, self.provider.network.name
)
if not database_file.is_file():
# NOTE: Raising `info` here hints user that they can initialize the cache db
logger.info("`ape-cache` database has not been initialized")
self.database_bypass = True
return None
try:
sqlite_uri = self._get_sqlite_uri(database_file)
return create_engine(sqlite_uri, pool_pre_ping=True).connect()
except QueryEngineError as e:
logger.debug(f"Exception when querying:\n{e}")
return None
except Exception as e:
logger.warning(f"Unhandled exception when querying:\n{e}")
self.database_bypass = True
return None
|
Returns a connection for the currently active network.
**NOTE**: Creates a database if it doesn't exist.
Raises:
:class:`~ape.exceptions.QueryEngineError`: If you are not connected to a provider,
or if the database has not been initialized.
Returns:
Optional[`sqlalchemy.engine.Connection`]
|
database_connection
|
python
|
ApeWorX/ape
|
src/ape_cache/query.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/query.py
|
Apache-2.0
|
def _estimate_query_clause(self, query: QueryType) -> Select:
"""
A singledispatchmethod that returns a select statement.
Args:
query (QueryType): Choice of query type to perform a
check of the number of rows that match the clause.
Raises:
:class:`~ape.exceptions.QueryEngineError`: When given an
incompatible QueryType.
Returns:
`sqlalchemy.sql.expression.Select`
"""
raise QueryEngineError(
"""
Not a compatible QueryType. For more details see our docs
https://docs.apeworx.io/ape/stable/methoddocs/exceptions.html#ape.exceptions.QueryEngineError
"""
)
|
A singledispatchmethod that returns a select statement.
Args:
query (QueryType): Choice of query type to perform a
check of the number of rows that match the clause.
Raises:
:class:`~ape.exceptions.QueryEngineError`: When given an
incompatible QueryType.
Returns:
`sqlalchemy.sql.expression.Select`
|
_estimate_query_clause
|
python
|
ApeWorX/ape
|
src/ape_cache/query.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/query.py
|
Apache-2.0
|
def estimate_query(self, query: QueryType) -> Optional[int]:
"""
Method called by the client to return a query time estimate.
Args:
query (QueryType): Choice of query type to perform a
check of the number of rows that match the clause.
Returns:
Optional[int]
"""
# NOTE: Because of Python shortcircuiting, the first time `database_connection` is missing
# this will lock the class var `database_bypass` in place for the rest of the session
if self.database_bypass or self.database_connection is None:
# No database, or some other issue
return None
try:
with self.database_connection as conn:
result = conn.execute(self._estimate_query_clause(query))
if not result:
return None
return self._compute_estimate(query, result)
except QueryEngineError as err:
logger.debug(f"Bypassing cache database: {err}")
# Note: The reason we return None instead of failing is that we want
# a failure of the query to bypass the query logic so that the
# estimation phase does not fail in `QueryManager`.
return None
|
Method called by the client to return a query time estimate.
Args:
query (QueryType): Choice of query type to perform a
check of the number of rows that match the clause.
Returns:
Optional[int]
|
estimate_query
|
python
|
ApeWorX/ape
|
src/ape_cache/query.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/query.py
|
Apache-2.0
|
def perform_query(self, query: QueryType) -> Iterator: # type: ignore
"""
Performs the requested query from cache.
Args:
query (QueryType): Choice of query type to perform a
check of the number of rows that match the clause.
Raises:
:class:`~ape.exceptions.QueryEngineError`: When given an
incompatible QueryType, or encounters some sort of error
in the database or estimation logic.
Returns:
Iterator
"""
raise QueryEngineError(
"Not a compatible QueryType. For more details see our docs "
"https://docs.apeworx.io/ape/stable/methoddocs/"
"exceptions.html#ape.exceptions.QueryEngineError"
)
|
Performs the requested query from cache.
Args:
query (QueryType): Choice of query type to perform a
check of the number of rows that match the clause.
Raises:
:class:`~ape.exceptions.QueryEngineError`: When given an
incompatible QueryType, or encounters some sort of error
in the database or estimation logic.
Returns:
Iterator
|
perform_query
|
python
|
ApeWorX/ape
|
src/ape_cache/query.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/query.py
|
Apache-2.0
|
def _cache_update_clause(self, query: QueryType) -> Insert:
"""
Update cache database Insert statement.
Args:
query (QueryType): Choice of query type to perform a
check of the number of rows that match the clause.
Raises:
:class:`~ape.exceptions.QueryEngineError`: When given an
incompatible QueryType, or encounters some sort of error
in the database or estimation logic.
Returns:
`sqlalchemy.sql.Expression.Insert`
"""
# Can't cache this query
raise QueryEngineError(
"Not a compatible QueryType. For more details see our docs "
"https://docs.apeworx.io/ape/stable/methoddocs/"
"exceptions.html#ape.exceptions.QueryEngineError"
)
|
Update cache database Insert statement.
Args:
query (QueryType): Choice of query type to perform a
check of the number of rows that match the clause.
Raises:
:class:`~ape.exceptions.QueryEngineError`: When given an
incompatible QueryType, or encounters some sort of error
in the database or estimation logic.
Returns:
`sqlalchemy.sql.Expression.Insert`
|
_cache_update_clause
|
python
|
ApeWorX/ape
|
src/ape_cache/query.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/query.py
|
Apache-2.0
|
def init(ecosystem, network):
"""
Initializes an SQLite database and creates a file to store data
from the provider.
Note that ape cannot store local data in this database. You have to
give an ecosystem name and a network name to initialize the database.
"""
get_engine().init_database(ecosystem.name, network.name)
logger.success(f"Caching database initialized for {ecosystem.name}:{network.name}.")
|
Initializes an SQLite database and creates a file to store data
from the provider.
Note that ape cannot store local data in this database. You have to
give an ecosystem name and a network name to initialize the database.
|
init
|
python
|
ApeWorX/ape
|
src/ape_cache/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/_cli.py
|
Apache-2.0
|
def query(query_str):
"""
Allows for a query of the database from an SQL statement.
Note that without an SQL statement, this method will not return
any data from the caching database.
Also note that an ecosystem name and a network name are required
to make the correct connection to the database.
"""
with get_engine().database_connection as conn:
results = conn.execute(query_str).fetchall()
if results:
pd = import_module("pandas")
click.echo(pd.DataFrame(results))
|
Allows for a query of the database from an SQL statement.
Note that without an SQL statement, this method will not return
any data from the caching database.
Also note that an ecosystem name and a network name are required
to make the correct connection to the database.
|
query
|
python
|
ApeWorX/ape
|
src/ape_cache/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/_cli.py
|
Apache-2.0
|
def purge(ecosystem, network):
"""
Purges data from the selected database instance.
Note that this is a destructive purge, and will remove the database file from disk.
If you want to store data in the caching system, you will have to
re-initiate the database following a purge.
Note that an ecosystem name and network name are required to
purge the database of choice.
"""
ecosystem_name = network.ecosystem.name
network_name = network.name
get_engine().purge_database(ecosystem_name, network_name)
logger.success(f"Caching database purged for {ecosystem_name}:{network_name}.")
|
Purges data from the selected database instance.
Note that this is a destructive purge, and will remove the database file from disk.
If you want to store data in the caching system, you will have to
re-initiate the database following a purge.
Note that an ecosystem name and network name are required to
purge the database of choice.
|
purge
|
python
|
ApeWorX/ape
|
src/ape_cache/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_cache/_cli.py
|
Apache-2.0
|
def serialize_exclude(self, exclude, info):
"""
Exclude is put back with the weird r-prefix so we can
go to-and-from.
"""
result: list[str] = []
for excl in exclude:
if isinstance(excl, Pattern):
result.append(f'r"{excl.pattern}"')
else:
result.append(excl)
return result
|
Exclude is put back with the weird r-prefix so we can
go to-and-from.
|
serialize_exclude
|
python
|
ApeWorX/ape
|
src/ape_compile/config.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_compile/config.py
|
Apache-2.0
|
def cli(
cli_ctx,
project,
file_paths: set[Path],
use_cache: bool,
display_size: bool,
include_dependencies,
excluded_compilers: list[str],
config_override,
):
"""
Compiles the manifest for this project and saves the results
back to the manifest.
Note that ape automatically recompiles any changed contracts each time
a project is loaded. You do not have to manually trigger a recompile.
"""
compiled = False
errored = False
if cfg := config_override:
project.reconfigure(**cfg)
if file_paths:
contracts = {
k: v.contract_type
for k, v in project.load_contracts(
*file_paths, use_cache=use_cache, excluded_compilers=excluded_compilers
).items()
}
cli_ctx.logger.success("'local project' compiled.")
compiled = True
if display_size:
_display_byte_code_sizes(cli_ctx, contracts)
if (include_dependencies or project.config.compile.include_dependencies) and len(
project.dependencies
) > 0:
for dependency in project.dependencies:
if use_cache and dependency.compiled:
continue
# Even if compiling we failed, we at least tried,
# and so we don't need to warn "Nothing to compile".
compiled = True
try:
contract_types: dict[str, ContractType] = {
c.contract_type.name: c.contract_type
for c in dependency.compile(use_cache=use_cache, allow_install=True).values()
}
except Exception as err:
msg = f"Dependency '{dependency.name}' not installed. Reason: {err}"
cli_ctx.logger.error(msg)
errored = True
continue
cli_ctx.logger.success(f"'{dependency.project.name}' compiled.")
if display_size:
_display_byte_code_sizes(cli_ctx, contract_types)
if not compiled:
from ape.utils.os import clean_path # perf: lazy import
folder = clean_path(project.contracts_folder)
cli_ctx.logger.warning(f"Nothing to compile ({folder}).")
if errored:
# Ensure exit code.
sys.exit(1)
|
Compiles the manifest for this project and saves the results
back to the manifest.
Note that ape automatically recompiles any changed contracts each time
a project is loaded. You do not have to manually trigger a recompile.
|
cli
|
python
|
ApeWorX/ape
|
src/ape_compile/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_compile/_cli.py
|
Apache-2.0
|
def ape(self, line: str = ""):
"""
Run Ape CLI commands within an ``ape console`` session.
Usage example::
%ape accounts list
"""
runner = CliRunner()
if "console" in [x.strip("\"' \t\n") for x in shlex.split(line)]:
# Prevent running console within console because usually bad
# stuff happens when you try to do this.
raise ValueError("Unable to run `console` within a console.")
result = runner.invoke(cli, line)
if result.output:
click.echo(result.output)
return result
|
Run Ape CLI commands within an ``ape console`` session.
Usage example::
%ape accounts list
|
ape
|
python
|
ApeWorX/ape
|
src/ape_console/plugin.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_console/plugin.py
|
Apache-2.0
|
def bal(self, line: str = ""):
"""
Show an account balance in human-readable form.
Usage example::
account = accounts.load("me")
%bal account
"""
if not line:
raise ValueError("Missing argument.")
provider = ape.networks.provider
ecosystem = provider.network.ecosystem
result = eval(line, self.ipython.user_global_ns, self.ipython.user_ns)
if isinstance(result, str) and result.startswith("0x"):
address = result
else:
# Handles accounts, ENS, integers, BaseAddress, and aliases.
address = ManagerAccessMixin.account_manager.resolve_address(result) or f"{result}"
decimals = ecosystem.fee_token_decimals
symbol = ecosystem.fee_token_symbol
balance = provider.get_balance(address)
return f"{round(balance / 10**decimals, 8)} {symbol}"
|
Show an account balance in human-readable form.
Usage example::
account = accounts.load("me")
%bal account
|
bal
|
python
|
ApeWorX/ape
|
src/ape_console/plugin.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_console/plugin.py
|
Apache-2.0
|
def cli(cli_ctx, project, code):
"""Opens a console for the local project."""
verbose = cli_ctx.logger.level == logging.DEBUG
return console(project=project, verbose=verbose, code=code)
|
Opens a console for the local project.
|
cli
|
python
|
ApeWorX/ape
|
src/ape_console/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_console/_cli.py
|
Apache-2.0
|
def create_transaction(self, **kwargs) -> "TransactionAPI":
"""
Returns a transaction using the given constructor kwargs.
**NOTE**: This generally should not be called by the user since this API method is used as a
hook for Ecosystems to customize how transactions are created.
Returns:
:class:`~ape.api.transactions.TransactionAPI`
"""
# Handle all aliases.
tx_data = dict(kwargs)
tx_data = _correct_key(
"max_priority_fee",
tx_data,
("max_priority_fee_per_gas", "maxPriorityFeePerGas", "maxPriorityFee"),
)
tx_data = _correct_key("max_fee", tx_data, ("max_fee_per_gas", "maxFeePerGas", "maxFee"))
tx_data = _correct_key("gas", tx_data, ("gas_limit", "gasLimit"))
tx_data = _correct_key("gas_price", tx_data, ("gasPrice",))
tx_data = _correct_key(
"type",
tx_data,
("txType", "tx_type", "txnType", "txn_type", "transactionType", "transaction_type"),
)
tx_data = _correct_key("maxFeePerBlobGas", tx_data, ("max_fee_per_blob_gas",))
tx_data = _correct_key("blobVersionedHashes", tx_data, ("blob_versioned_hashes",))
# Handle unique value specifications, such as "1 ether".
if "value" in tx_data and not isinstance(tx_data["value"], int):
value = tx_data["value"] or 0 # Convert None to 0.
tx_data["value"] = self.conversion_manager.convert(value, int)
# None is not allowed, the user likely means `b""`.
if "data" in tx_data and tx_data["data"] is None:
tx_data["data"] = b""
# Deduce the transaction type.
transaction_types: dict[TransactionType, type[TransactionAPI]] = {
TransactionType.STATIC: StaticFeeTransaction,
TransactionType.ACCESS_LIST: AccessListTransaction,
TransactionType.DYNAMIC: DynamicFeeTransaction,
TransactionType.SHARED_BLOB: SharedBlobTransaction,
TransactionType.SET_CODE: SetCodeTransaction,
}
if "type" in tx_data:
# May be None in data.
if tx_data["type"] is None:
# Explicit `None` means used default.
version = self.default_transaction_type
elif isinstance(tx_data["type"], TransactionType):
version = tx_data["type"]
elif isinstance(tx_data["type"], int):
version = TransactionType(tx_data["type"])
else:
# Using hex values or alike.
version = TransactionType(self.conversion_manager.convert(tx_data["type"], int))
# NOTE: Determine these in reverse order
elif "authorizationList" in tx_data:
version = TransactionType.SET_CODE
elif "maxFeePerBlobGas" in tx_data or "blobVersionedHashes" in tx_data:
version = TransactionType.SHARED_BLOB
elif "max_fee" in tx_data or "max_priority_fee" in tx_data:
version = TransactionType.DYNAMIC
elif "access_list" in tx_data or "accessList" in tx_data:
version = TransactionType.ACCESS_LIST
elif "gas_price" in tx_data:
version = TransactionType.STATIC
else:
version = self.default_transaction_type
tx_data["type"] = version.value
# This causes problems in pydantic for some reason.
# NOTE: This must happen after deducing the tx type!
if "gas_price" in tx_data and tx_data["gas_price"] is None:
del tx_data["gas_price"]
txn_class = transaction_types[version]
if "required_confirmations" not in tx_data or tx_data["required_confirmations"] is None:
# Attempt to use default required-confirmations from `ape-config.yaml`.
required_confirmations = 0
active_provider = self.network_manager.active_provider
if active_provider:
required_confirmations = active_provider.network.required_confirmations
tx_data["required_confirmations"] = required_confirmations
if isinstance(tx_data.get("chainId"), str):
tx_data["chainId"] = int(tx_data["chainId"], 16)
elif (
"chainId" not in tx_data or tx_data["chainId"] is None
) and self.network_manager.active_provider is not None:
tx_data["chainId"] = self.provider.chain_id
if "input" in tx_data:
tx_data["data"] = tx_data.pop("input")
if all(field in tx_data for field in ("v", "r", "s")):
tx_data["signature"] = TransactionSignature(
v=tx_data["v"],
r=bytes(tx_data["r"]),
s=bytes(tx_data["s"]),
)
if "gas" not in tx_data:
tx_data["gas"] = None
return txn_class.model_validate(tx_data)
|
Returns a transaction using the given constructor kwargs.
**NOTE**: This generally should not be called by the user since this API method is used as a
hook for Ecosystems to customize how transactions are created.
Returns:
:class:`~ape.api.transactions.TransactionAPI`
|
create_transaction
|
python
|
ApeWorX/ape
|
src/ape_ethereum/ecosystem.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/ecosystem.py
|
Apache-2.0
|
def get_deployment_address(self, address: AddressType, nonce: int) -> AddressType:
"""
Calculate the deployment address of a contract before it is deployed.
This is useful if the address is an argument to another contract's deployment
and you have not yet deployed the first contract yet.
"""
sender_bytes = to_bytes(hexstr=address)
encoded = rlp.encode([sender_bytes, nonce])
address_bytes = keccak(encoded)[12:]
return self.decode_address(address_bytes)
|
Calculate the deployment address of a contract before it is deployed.
This is useful if the address is an argument to another contract's deployment
and you have not yet deployed the first contract yet.
|
get_deployment_address
|
python
|
ApeWorX/ape
|
src/ape_ethereum/ecosystem.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/ecosystem.py
|
Apache-2.0
|
def _sanitize_web3_url(msg: str) -> str:
"""Sanitize RPC URI from given log string"""
# `auto` used by some providers to figure it out automatically
if "URI: " not in msg or "URI: auto" in msg:
return msg
parts = msg.split("URI: ")
prefix = parts[0].strip()
rest = parts[1].split(" ")
# * To remove the `,` from the url http://127.0.0.1:8545,
if "," in rest[0]:
rest[0] = rest[0].rstrip(",")
sanitized_url = sanitize_url(rest[0])
return f"{prefix} URI: {sanitized_url} {' '.join(rest[1:])}"
|
Sanitize RPC URI from given log string
|
_sanitize_web3_url
|
python
|
ApeWorX/ape
|
src/ape_ethereum/provider.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/provider.py
|
Apache-2.0
|
def assert_web3_provider_uri_env_var_not_set():
"""
Environment variable $WEB3_PROVIDER_URI causes problems
when used with Ape (ignores Ape's networks). Use
this validator to eliminate the concern.
Raises:
:class:`~ape.exceptions.ProviderError`: If environment variable
WEB3_PROVIDER_URI exists in ``os.environ``.
"""
if WEB3_PROVIDER_URI_ENV_VAR_NAME not in os.environ:
return
# NOTE: This was the source of confusion for user when they noticed
# Ape would only connect to RPC URL set by an environment variable
# named $WEB3_PROVIDER_URI instead of whatever network they were telling Ape.
raise ProviderError(
"Ape does not support Web3.py's environment variable "
f"${WEB3_PROVIDER_URI_ENV_VAR_NAME}. If you are using this environment "
"variable name incidentally, please use a different name. If you are "
"trying to set the network in Web3.py, please use Ape's `ape-config.yaml` "
"or `--network` option instead."
)
|
Environment variable $WEB3_PROVIDER_URI causes problems
when used with Ape (ignores Ape's networks). Use
this validator to eliminate the concern.
Raises:
:class:`~ape.exceptions.ProviderError`: If environment variable
WEB3_PROVIDER_URI exists in ``os.environ``.
|
assert_web3_provider_uri_env_var_not_set
|
python
|
ApeWorX/ape
|
src/ape_ethereum/provider.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/provider.py
|
Apache-2.0
|
def web3(self) -> Web3:
"""
Access to the ``web3`` object as if you did ``Web3(HTTPProvider(uri))``.
"""
if web3 := self._web3:
return web3
raise ProviderNotConnectedError()
|
Access to the ``web3`` object as if you did ``Web3(HTTPProvider(uri))``.
|
web3
|
python
|
ApeWorX/ape
|
src/ape_ethereum/provider.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/provider.py
|
Apache-2.0
|
def _configured_rpc(self) -> Optional[str]:
"""
First of URI, HTTP_URI, WS_URI, IPC_PATH
found in the provider_settings or config.
"""
# NOTE: Even though this only returns 1 value,
# each configured URI is passed in to web3 and
# will be used as each specific types of data
# is requested.
if rpc := self._configured_uri:
# The user specifically configured "uri:"
return rpc
elif rpc := self._configured_http_uri:
# Use their configured HTTP URI.
return rpc
elif rpc := self._configured_ws_uri:
# Use their configured WS URI.
return rpc
elif rpc := self._configured_ipc_path:
return rpc
return None
|
First of URI, HTTP_URI, WS_URI, IPC_PATH
found in the provider_settings or config.
|
_configured_rpc
|
python
|
ApeWorX/ape
|
src/ape_ethereum/provider.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/provider.py
|
Apache-2.0
|
def _get_connected_rpc(self, validator: Callable[[str], bool]) -> Optional[str]:
"""
The connected HTTP URI. If using providers
like `ape-node`, configure your URI and that will
be returned here instead.
"""
if web3 := self._web3:
if endpoint_uri := getattr(web3.provider, "endpoint_uri", None):
if isinstance(endpoint_uri, str) and validator(endpoint_uri):
return endpoint_uri
return None
|
The connected HTTP URI. If using providers
like `ape-node`, configure your URI and that will
be returned here instead.
|
_get_connected_rpc
|
python
|
ApeWorX/ape
|
src/ape_ethereum/provider.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/provider.py
|
Apache-2.0
|
def call_trace_approach(self) -> Optional[TraceApproach]:
"""
The default tracing approach to use when building up a call-tree.
By default, Ape attempts to use the faster approach. Meaning, if
geth-call-tracer or parity are available, Ape will use one of those
instead of building a call-trace entirely from struct-logs.
"""
if approach := self._call_trace_approach:
return approach
return self.settings.get("call_trace_approach")
|
The default tracing approach to use when building up a call-tree.
By default, Ape attempts to use the faster approach. Meaning, if
geth-call-tracer or parity are available, Ape will use one of those
instead of building a call-trace entirely from struct-logs.
|
call_trace_approach
|
python
|
ApeWorX/ape
|
src/ape_ethereum/provider.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/provider.py
|
Apache-2.0
|
def create_access_list(
self, transaction: TransactionAPI, block_id: Optional["BlockID"] = None
) -> list[AccessList]:
"""
Get the access list for a transaction use ``eth_createAccessList``.
Args:
transaction (:class:`~ape.api.transactions.TransactionAPI`): The
transaction to check.
block_id (:class:`~ape.types.BlockID`): Optionally specify a block
ID. Defaults to using the latest block.
Returns:
list[:class:`~ape_ethereum.transactions.AccessList`]
"""
# NOTE: Using JSON mode since used in request data.
tx_dict = transaction.model_dump(by_alias=True, mode="json", exclude={"chain_id"})
tx_dict_converted = {}
for key, val in tx_dict.items():
if isinstance(val, int):
# This RPC requires hex-str values.
if val > 0:
tx_dict_converted[key] = to_hex(val)
# else: 0-values cause problems.
else:
tx_dict_converted[key] = val
if not tx_dict_converted.get("to") and tx_dict.get("data") in (None, "0x"):
# Contract creation with no data, can skip.
return []
arguments: list = [tx_dict_converted]
if block_id is not None:
arguments.append(block_id)
result = self.make_request("eth_createAccessList", arguments)
return [AccessList.model_validate(x) for x in result.get("accessList", [])]
|
Get the access list for a transaction use ``eth_createAccessList``.
Args:
transaction (:class:`~ape.api.transactions.TransactionAPI`): The
transaction to check.
block_id (:class:`~ape.types.BlockID`): Optionally specify a block
ID. Defaults to using the latest block.
Returns:
list[:class:`~ape_ethereum.transactions.AccessList`]
|
create_access_list
|
python
|
ApeWorX/ape
|
src/ape_ethereum/provider.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/provider.py
|
Apache-2.0
|
def has_poa_history(self) -> bool:
"""
``True`` if detected any PoA history. If the chain was _ever_ PoA, the special
middleware is needed for web3.py. Provider plugins use this property when
creating Web3 instances.
"""
findings = False
for option in ("earliest", "latest"):
try:
block = self.web3.eth.get_block(option) # type: ignore[arg-type]
except ExtraDataLengthError:
findings = True
break
except Exception:
# Some chains are "light" and we may not be able to detect
# if it needs PoA middleware.
continue
else:
findings = (
"proofOfAuthorityData" in block
or len(block.get("extraData", "")) > MAX_EXTRADATA_LENGTH
)
if findings:
break
return findings
|
``True`` if detected any PoA history. If the chain was _ever_ PoA, the special
middleware is needed for web3.py. Provider plugins use this property when
creating Web3 instances.
|
has_poa_history
|
python
|
ApeWorX/ape
|
src/ape_ethereum/provider.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/provider.py
|
Apache-2.0
|
def perform_contract_creation_query(
self, query: ContractCreationQuery
) -> Iterator[ContractCreation]:
"""
Find when a contract was deployed using binary search and block tracing.
"""
# skip the search if there is still no code at address at head
if not self.chain_manager.get_code(query.contract):
return None
def find_creation_block(lo, hi):
# perform a binary search to find the block when the contract was deployed.
# takes log2(height), doesn't work with contracts that have been reinit.
while hi - lo > 1:
mid = (lo + hi) // 2
code = self.chain_manager.get_code(query.contract, block_id=mid)
if not code:
lo = mid
else:
hi = mid
if self.chain_manager.get_code(query.contract, block_id=hi):
return hi
return None
try:
block = find_creation_block(0, self.chain_manager.blocks.height)
except ProviderError:
self.supports_contract_creation = False
return None
# iterate over block transaction traces to find the deployment call
# this method also supports contracts created by factories
try:
if "geth" in self.provider.client_version.lower():
yield from self._find_creation_in_block_via_geth(block, query.contract)
else:
yield from self._find_creation_in_block_via_parity(block, query.contract)
except (ProviderError, APINotImplementedError):
self.supports_contract_creation = False
return None
self.supports_contract_creation = True
|
Find when a contract was deployed using binary search and block tracing.
|
perform_contract_creation_query
|
python
|
ApeWorX/ape
|
src/ape_ethereum/query.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/query.py
|
Apache-2.0
|
def transaction(self) -> dict:
"""
The transaction data (obtained differently on
calls versus transactions).
"""
|
The transaction data (obtained differently on
calls versus transactions).
|
transaction
|
python
|
ApeWorX/ape
|
src/ape_ethereum/trace.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/trace.py
|
Apache-2.0
|
def get_calltree(self) -> CallTreeNode:
"""
Get an un-enriched call-tree node.
"""
|
Get an un-enriched call-tree node.
|
get_calltree
|
python
|
ApeWorX/ape
|
src/ape_ethereum/trace.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/trace.py
|
Apache-2.0
|
def raw_trace_frames(self) -> Iterator[dict]:
"""
The raw trace ``"structLogs"`` from ``debug_traceTransaction``
for deeper investigation.
"""
if self._frames:
yield from self._frames
else:
for frame in self._stream_struct_logs():
self._frames.append(frame)
yield frame
|
The raw trace ``"structLogs"`` from ``debug_traceTransaction``
for deeper investigation.
|
raw_trace_frames
|
python
|
ApeWorX/ape
|
src/ape_ethereum/trace.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/trace.py
|
Apache-2.0
|
def debug_logs_typed(self) -> list[tuple[Any]]:
"""
Extract messages to console outputted by contracts via print() or console.log() statements
"""
try:
trace = self.trace
# Some providers do not implement this, so skip.
except NotImplementedError:
logger.debug("Call tree not available, skipping debug log extraction")
return []
# If the trace is not available, no logs are available.
if trace is None or not isinstance(trace, Trace):
return []
return list(trace.debug_logs)
|
Extract messages to console outputted by contracts via print() or console.log() statements
|
debug_logs_typed
|
python
|
ApeWorX/ape
|
src/ape_ethereum/transactions.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/transactions.py
|
Apache-2.0
|
def is_console_log(call: "CallTreeNode") -> "TypeGuard[CallTreeNode]":
"""Determine if a call is a standard console.log() call"""
return (
call.address == HexBytes(CONSOLE_ADDRESS)
and to_hex(call.calldata[:4]) in console_contract.identifier_lookup
)
|
Determine if a call is a standard console.log() call
|
is_console_log
|
python
|
ApeWorX/ape
|
src/ape_ethereum/_print.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/_print.py
|
Apache-2.0
|
def is_vyper_print(call: "CallTreeNode") -> "TypeGuard[CallTreeNode]":
"""Determine if a call is a standard Vyper print() call"""
if call.address != HexBytes(CONSOLE_ADDRESS) or call.calldata[:4] != VYPER_PRINT_METHOD_ID:
return False
schema, _ = decode(["string", "bytes"], call.calldata[4:])
types = schema.strip("()").split(",")
# Now we look at the first arg to try and determine if it's an ABI signature
# TODO: Tighten this up. This is not entirely accurate, but should mostly get us there.
return len(types) > 0 and (
types[0].startswith("uint")
or types[0].startswith("int")
or types[0].startswith("bytes")
or types[0] == "string"
)
|
Determine if a call is a standard Vyper print() call
|
is_vyper_print
|
python
|
ApeWorX/ape
|
src/ape_ethereum/_print.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/_print.py
|
Apache-2.0
|
def console_log(method_abi: MethodABI, calldata: str) -> tuple[Any]:
"""Return logged data for console.log() calls"""
bcalldata = decode_hex(calldata)
data = ape.networks.ethereum.decode_calldata(method_abi, bcalldata)
return tuple(data.values())
|
Return logged data for console.log() calls
|
console_log
|
python
|
ApeWorX/ape
|
src/ape_ethereum/_print.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/_print.py
|
Apache-2.0
|
def vyper_print(calldata: str) -> tuple[Any]:
"""Return logged data for print() calls"""
schema, payload = decode(["string", "bytes"], HexBytes(calldata))
data = decode(schema.strip("()").split(","), payload)
return tuple(data)
|
Return logged data for print() calls
|
vyper_print
|
python
|
ApeWorX/ape
|
src/ape_ethereum/_print.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/_print.py
|
Apache-2.0
|
def extract_debug_logs(call: "CallTreeNode") -> Iterable[tuple[Any]]:
"""Filter calls to console.log() and print() from a transactions call tree"""
if is_vyper_print(call) and call.calldata is not None:
yield vyper_print(add_0x_prefix(to_hex(call.calldata[4:])))
elif is_console_log(call) and call.calldata is not None:
method_abi = console_contract.identifier_lookup.get(to_hex(call.calldata[:4]))
if isinstance(method_abi, MethodABI):
yield console_log(method_abi, to_hex(call.calldata[4:]))
elif call.calls is not None:
for sub_call in call.calls:
yield from extract_debug_logs(sub_call)
|
Filter calls to console.log() and print() from a transactions call tree
|
extract_debug_logs
|
python
|
ApeWorX/ape
|
src/ape_ethereum/_print.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/_print.py
|
Apache-2.0
|
def __init__(
self,
address: "AddressType" = MULTICALL3_ADDRESS,
supported_chains: Optional[list[int]] = None,
) -> None:
"""
Initialize a new Multicall session object. By default, there are no calls to make.
"""
self.address = address
self.supported_chains = supported_chains or SUPPORTED_CHAINS
self.calls: list[dict] = []
|
Initialize a new Multicall session object. By default, there are no calls to make.
|
__init__
|
python
|
ApeWorX/ape
|
src/ape_ethereum/multicall/handlers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/multicall/handlers.py
|
Apache-2.0
|
def inject(cls) -> ModuleType:
"""
Create the multicall module contract on-chain, so we can use it.
Must use a provider that supports ``debug_setCode``.
Usage example::
from ape_ethereum import multicall
@pytest.fixture(scope="session")
def use_multicall():
# NOTE: use this fixture any test where you want to use a multicall
return multicall.BaseMulticall.inject()
"""
from ape_ethereum import multicall
provider = cls.network_manager.provider
provider.set_code(MULTICALL3_ADDRESS, MULTICALL3_CODE)
if provider.chain_id not in SUPPORTED_CHAINS:
SUPPORTED_CHAINS.append(provider.chain_id)
return multicall
|
Create the multicall module contract on-chain, so we can use it.
Must use a provider that supports ``debug_setCode``.
Usage example::
from ape_ethereum import multicall
@pytest.fixture(scope="session")
def use_multicall():
# NOTE: use this fixture any test where you want to use a multicall
return multicall.BaseMulticall.inject()
|
inject
|
python
|
ApeWorX/ape
|
src/ape_ethereum/multicall/handlers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/multicall/handlers.py
|
Apache-2.0
|
def add(
self,
call: ContractMethodHandler,
*args,
allowFailure: bool = True,
value: int = 0,
) -> "BaseMulticall":
"""
Adds a call to the Multicall session object.
Raises:
:class:`~ape_ethereum.multicall.exceptions.InvalidOption`: If one
of the kwarg modifiers is not able to be used.
Args:
call (:class:`~ape_ethereum.multicall.handlers.ContractMethodHandler`):
The method to call.
*args: The arguments to invoke the method with.
allowFailure (bool): Whether the call is allowed to fail.
value (int): The amount of ether to forward with the call.
Returns:
:class:`~ape_ethereum.multicall.handlers.BaseMulticall`: returns itself
to emulate a builder pattern.
"""
# Append call dict to the list
# NOTE: Depending upon `_handler_method_abi` at time when `__call__` is triggered,
# some of these properties will be unused
self.calls.append(
{
"target": call.contract.address,
"allowFailure": allowFailure,
"value": value,
"callData": call.encode_input(*args),
}
)
return self
|
Adds a call to the Multicall session object.
Raises:
:class:`~ape_ethereum.multicall.exceptions.InvalidOption`: If one
of the kwarg modifiers is not able to be used.
Args:
call (:class:`~ape_ethereum.multicall.handlers.ContractMethodHandler`):
The method to call.
*args: The arguments to invoke the method with.
allowFailure (bool): Whether the call is allowed to fail.
value (int): The amount of ether to forward with the call.
Returns:
:class:`~ape_ethereum.multicall.handlers.BaseMulticall`: returns itself
to emulate a builder pattern.
|
add
|
python
|
ApeWorX/ape
|
src/ape_ethereum/multicall/handlers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/multicall/handlers.py
|
Apache-2.0
|
def __call__(self, **txn_kwargs) -> "ReceiptAPI":
"""
Execute the Multicall transaction. The transaction will broadcast again every time
the ``Transaction`` object is called.
Raises:
:class:`UnsupportedChain`: If there is not an instance of Multicall3 deployed
on the current chain at the expected address.
Args:
**txn_kwargs: the kwargs to pass through to the transaction handler.
Returns:
:class:`~ape.api.transactions.ReceiptAPI`
"""
self._validate_calls(**txn_kwargs)
return self.handler(self.calls, **txn_kwargs)
|
Execute the Multicall transaction. The transaction will broadcast again every time
the ``Transaction`` object is called.
Raises:
:class:`UnsupportedChain`: If there is not an instance of Multicall3 deployed
on the current chain at the expected address.
Args:
**txn_kwargs: the kwargs to pass through to the transaction handler.
Returns:
:class:`~ape.api.transactions.ReceiptAPI`
|
__call__
|
python
|
ApeWorX/ape
|
src/ape_ethereum/multicall/handlers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/multicall/handlers.py
|
Apache-2.0
|
def as_transaction(self, **txn_kwargs) -> "TransactionAPI":
"""
Encode the Multicall transaction as a ``TransactionAPI`` object, but do not execute it.
Returns:
:class:`~ape.api.transactions.TransactionAPI`
"""
self._validate_calls(**txn_kwargs)
return self.handler.as_transaction(self.calls, **txn_kwargs)
|
Encode the Multicall transaction as a ``TransactionAPI`` object, but do not execute it.
Returns:
:class:`~ape.api.transactions.TransactionAPI`
|
as_transaction
|
python
|
ApeWorX/ape
|
src/ape_ethereum/multicall/handlers.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_ethereum/multicall/handlers.py
|
Apache-2.0
|
def cli(cli_ctx, github, project_name):
"""
``ape init`` allows the user to create an ape project with
default folders and ape-config.yaml.
"""
if github:
from ape.utils._github import github_client
org, repo = github
github_client.clone_repo(org, repo, Path.cwd())
shutil.rmtree(Path.cwd() / ".git", ignore_errors=True)
else:
project_folder = Path.cwd()
for folder_name in ("contracts", "tests", "scripts"):
# Create target Directory
folder = project_folder / folder_name
if folder.exists():
cli_ctx.logger.warning(f"'{folder}' exists")
else:
folder.mkdir()
git_ignore_path = project_folder / ".gitignore"
if git_ignore_path.exists():
cli_ctx.logger.warning(f"Unable to create .gitignore: '{git_ignore_path}' file exists.")
else:
git_ignore_path.touch()
git_ignore_path.write_text(GITIGNORE_CONTENT.lstrip(), encoding="utf8")
ape_config = project_folder / "ape-config.yaml"
if ape_config.exists():
cli_ctx.logger.warning(f"'{ape_config}' exists")
else:
ape_config.write_text(f"name: {project_name}\n", encoding="utf8")
cli_ctx.logger.success(f"{project_name} is written in ape-config.yaml")
|
``ape init`` allows the user to create an ape project with
default folders and ape-config.yaml.
|
cli
|
python
|
ApeWorX/ape
|
src/ape_init/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_init/_cli.py
|
Apache-2.0
|
def cli():
"""
Command-line helper for managing networks.
"""
|
Command-line helper for managing networks.
|
cli
|
python
|
ApeWorX/ape
|
src/ape_networks/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_networks/_cli.py
|
Apache-2.0
|
def _list(cli_ctx, output_format, ecosystem_filter, network_filter, provider_filter, running):
"""
List all the registered ecosystems, networks, and providers.
"""
if running:
# TODO: Honor filter args.
_print_running_networks(cli_ctx)
return
network_data = cli_ctx.network_manager.get_network_data(
ecosystem_filter=ecosystem_filter,
network_filter=network_filter,
provider_filter=provider_filter,
)
if output_format == OutputFormat.TREE:
default_suffix = "[dim default] (default)"
ecosystems = network_data["ecosystems"]
ecosystems = sorted(ecosystems, key=lambda e: e["name"])
def make_sub_tree(data: dict, create_tree: Callable) -> Tree:
name = f"[bold green]{data['name']}"
if data.get("isDefault"):
name += default_suffix
sub_tree = create_tree(name)
return sub_tree
for ecosystem in ecosystems:
ecosystem_tree = make_sub_tree(ecosystem, Tree)
_networks = {n["name"]: n for n in ecosystem["networks"]}
_networks = {n: _networks[n] for n in sorted(_networks)}
for network_name, network in _networks.items():
providers = network["providers"]
if providers:
network_tree = make_sub_tree(network, ecosystem_tree.add)
providers = sorted(providers, key=lambda p: p["name"])
for provider in providers:
make_sub_tree(provider, network_tree.add)
if _networks and ecosystem_tree.children:
echo_rich_text(ecosystem_tree)
elif output_format == OutputFormat.YAML:
if not isinstance(network_data, dict):
raise TypeError(
f"Unexpected network data type: {type(network_data)}. "
f"Expecting dict. YAML dump will fail."
)
try:
click.echo(yaml.safe_dump(network_data, sort_keys=True).strip())
except ValueError as err:
try:
data_str = json.dumps(network_data)
except Exception:
data_str = str(network_data)
raise NetworkError(
f"Network data did not dump to YAML: {data_str}\nActual err: {err}"
) from err
|
List all the registered ecosystems, networks, and providers.
|
_list
|
python
|
ApeWorX/ape
|
src/ape_networks/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_networks/_cli.py
|
Apache-2.0
|
def run(cli_ctx, provider, block_time, background):
"""
Start a subprocess node as if running independently
and stream stdout and stderr.
"""
from ape.api.providers import SubprocessProvider
# Ignore extra loggers, such as web3 loggers.
cli_ctx.logger._extra_loggers = {}
if not isinstance(provider, SubprocessProvider):
cli_ctx.abort(
f"`ape networks run` requires a provider that manages a process, not '{provider.name}'."
)
elif provider.is_connected:
cli_ctx.abort("Process already running.")
# Set block time if provided
if block_time is not None:
provider.provider_settings.update({"block_time": block_time})
# Start showing process logs.
original_level = cli_ctx.logger.level
original_format = cli_ctx.logger.fmt
cli_ctx.logger.set_level(LogLevel.DEBUG)
# Change format to exclude log level (since it is always just DEBUG)
cli_ctx.logger.format(fmt="%(message)s")
try:
_run(cli_ctx, provider, background=background)
finally:
cli_ctx.logger.set_level(original_level)
cli_ctx.logger.format(fmt=original_format)
|
Start a subprocess node as if running independently
and stream stdout and stderr.
|
run
|
python
|
ApeWorX/ape
|
src/ape_networks/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_networks/_cli.py
|
Apache-2.0
|
def create_genesis_data(alloc: Alloc, chain_id: int) -> "GenesisDataTypedDict":
"""
A wrapper around genesis data for py-geth that
fills in more defaults.
"""
return {
"alloc": alloc,
"config": {
"arrowGlacierBlock": 0,
"berlinBlock": 0,
"byzantiumBlock": 0,
"cancunTime": 0,
"chainId": chain_id,
"constantinopleBlock": 0,
"daoForkBlock": 0,
"daoForkSupport": True,
"eip150Block": 0,
"eip155Block": 0,
"eip158Block": 0,
"ethash": {},
"grayGlacierBlock": 0,
"homesteadBlock": 0,
"istanbulBlock": 0,
"londonBlock": 0,
"petersburgBlock": 0,
"shanghaiTime": 0,
"terminalTotalDifficulty": 0,
"terminalTotalDifficultyPassed": True,
"blobSchedule": {
"cancun": {"target": 3, "max": 6, "baseFeeUpdateFraction": 3338477},
"prague": {"target": 6, "max": 9, "baseFeeUpdateFraction": 5007716},
},
},
"coinbase": ZERO_ADDRESS,
"difficulty": "0x0",
"gasLimit": "0x0",
"extraData": "0x",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"nonce": "0x0",
"timestamp": "0x0",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"baseFeePerGas": "0x0",
}
|
A wrapper around genesis data for py-geth that
fills in more defaults.
|
create_genesis_data
|
python
|
ApeWorX/ape
|
src/ape_node/provider.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_node/provider.py
|
Apache-2.0
|
def cli():
"""
Command-line helper for managing plugins.
"""
|
Command-line helper for managing plugins.
|
cli
|
python
|
ApeWorX/ape
|
src/ape_plugins/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_plugins/_cli.py
|
Apache-2.0
|
def plugins_argument():
"""
An argument that is either the given list of plugins
or plugins loaded from the local config file.
"""
def load_from_file(ctx, file_path: Path) -> list["PluginMetadata"]:
from ape.plugins._utils import PluginMetadata
from ape.utils.misc import load_config
if file_path.is_dir():
name_options = (
"ape-config.yaml",
"ape-config.yml",
"ape-config.json",
"pyproject.toml",
)
for option in name_options:
if (file_path / option).is_file():
file_path = file_path / option
break
if file_path.is_file():
config = load_config(file_path)
if plugins := config.get("plugins"):
return [PluginMetadata.model_validate(d) for d in plugins]
ctx.obj.logger.warning(f"No plugins found at '{file_path}'.")
return []
def callback(ctx, param, value: tuple[str]):
from ape.plugins._utils import PluginMetadata
res = []
if not value:
ctx.obj.abort("You must give at least one requirement to install.")
elif len(value) == 1:
# User passed in a path to a file.
file_path = Path(value[0]).expanduser().resolve()
res = (
load_from_file(ctx, file_path)
if file_path.exists()
else [PluginMetadata(name=v) for v in value[0].split(" ")]
)
else:
res = [PluginMetadata(name=v) for v in value]
return res
return click.argument(
"plugins",
callback=callback,
nargs=-1,
metavar="PLUGIN-NAMES or path/to/project-dir",
)
|
An argument that is either the given list of plugins
or plugins loaded from the local config file.
|
plugins_argument
|
python
|
ApeWorX/ape
|
src/ape_plugins/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_plugins/_cli.py
|
Apache-2.0
|
def _install(name, spec, exit_on_fail: bool = True) -> int:
"""
Helper function to install or update a Python package using pip.
Args:
name (str): The package name.
spec (str): Version specifier, e.g., '==1.0.0', '>=1.0.0', etc.
exit_on_fail (bool): Set to ``False`` to not exit on fail.
Returns:
The process return-code.
"""
from ape.plugins._utils import PIP_COMMAND
arguments = [*PIP_COMMAND, "install", f"{name}{spec}", "--quiet"]
# Run the installation process and capture output for error checking
completed_process = subprocess.run(
arguments,
capture_output=True,
text=True, # Output as string
check=False, # Allow manual error handling
)
# Check for installation errors
if completed_process.returncode != 0:
message = f"Failed to install/update {name}"
if completed_process.stdout:
message += f": {completed_process.stdout}"
if completed_process.stderr:
message += f": {completed_process.stderr}"
logger.error(message)
if exit_on_fail:
sys.exit(completed_process.returncode)
else:
logger.info(f"Successfully installed/updated {name}")
return completed_process.returncode
|
Helper function to install or update a Python package using pip.
Args:
name (str): The package name.
spec (str): Version specifier, e.g., '==1.0.0', '>=1.0.0', etc.
exit_on_fail (bool): Set to ``False`` to not exit on fail.
Returns:
The process return-code.
|
_install
|
python
|
ApeWorX/ape
|
src/ape_plugins/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_plugins/_cli.py
|
Apache-2.0
|
def version_from_project_package_json(self) -> Optional[str]:
"""
The version from your project's package.json, if exists.
"""
return _get_version_from_package_json(
self.local_project.path, dict_path=("dependencies", self.package_id)
)
|
The version from your project's package.json, if exists.
|
version_from_project_package_json
|
python
|
ApeWorX/ape
|
src/ape_pm/dependency.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_pm/dependency.py
|
Apache-2.0
|
def uninstall(cli_ctx, name, versions, yes):
"""
Uninstall a package
This command removes a package from the installed packages.
If specific versions are provided, only those versions of the package will be
removed. If no versions are provided, the command will prompt you to choose
versions to remove. You can also choose to remove all versions of the package.
Examples:\n
- Remove specific versions: ape pm uninstall <PackageName> "1.0.0" "2.0.0"\n
- Prompt to choose versions: ape pm uninstall <PackageName>\n
- Remove all versions: ape pm uninstall <PackageName> -y
"""
pm = cli_ctx.local_project
# NOTE: Purposely don't call `get_dependency` or anything so we for sure
# are only checking the installed.
installed = {d for d in pm.dependencies.installed}
did_error = False
did_find = False
if not name or name == ".":
if versions:
cli_ctx.abort("Cannot specify version when uninstalling from config.")
# Uninstall all dependencies from the config.
for cfg in pm.config.dependencies:
api = pm.dependencies.decode_dependency(**cfg)
for dependency in installed:
if dependency.name != api.name or dependency.version != api.version_id:
continue
did_find = True
res = _uninstall(dependency, yes=yes)
if res is False:
did_error = True
else:
deps_to_remove = {
d
for d in installed
if (d.name == name or d.package_id == name)
and (d.version in versions if versions else True)
}
for dependency in deps_to_remove:
did_find = True
res = _uninstall(dependency, yes=yes)
if res is False:
did_error = True
if not did_find:
if name:
name = ", ".join([f"{name}={v}" for v in versions]) if versions else name
cli_ctx.logger.error(f"Package(s) '{name}' not installed.")
else:
cli_ctx.logger.error(
"No package(s) installed in local project. "
"Please specify a package to uninstall or go to a local project."
)
did_error = True
sys.exit(int(did_error))
|
Uninstall a package
This command removes a package from the installed packages.
If specific versions are provided, only those versions of the package will be
removed. If no versions are provided, the command will prompt you to choose
versions to remove. You can also choose to remove all versions of the package.
Examples:
- Remove specific versions: ape pm uninstall <PackageName> "1.0.0" "2.0.0"
- Prompt to choose versions: ape pm uninstall <PackageName>
- Remove all versions: ape pm uninstall <PackageName> -y
|
uninstall
|
python
|
ApeWorX/ape
|
src/ape_pm/_cli.py
|
https://github.com/ApeWorX/ape/blob/master/src/ape_pm/_cli.py
|
Apache-2.0
|
def skip_if_plugin_installed(*plugin_names: str):
"""
A simple decorator for skipping a test if a plugin is installed.
**NOTE**: For performance reasons, this method is not very good.
It only works for common ApeWorX supported plugins and is only
meant for assisting testing in Core (NOT a public utility).
"""
names = [n.lower().replace("-", "_").replace("ape_", "") for n in plugin_names]
msg_f = "Cannot run this test when plugin '{}' installed."
def wrapper(fn):
for name in names:
# Compilers
if name in ("solidity", "vyper"):
compiler = ape.compilers.get_compiler(name)
if compiler:
def test_skip_from_compiler(*args, name=name, **kwargs):
pytest.mark.skip(msg_f.format(name))
# NOTE: By returning a function, we avoid a collection warning.
return test_skip_from_compiler
# Converters
elif name in ("ens",):
address_converters = [
type(n).__name__ for n in ape.chain.conversion_manager._converters[AddressType]
]
if any(x.startswith(name.upper()) for x in address_converters):
def test_skip_from_converter(name=name):
pytest.mark.skip(msg_f.format(name))
return test_skip_from_converter
# noop
return fn
return wrapper
|
A simple decorator for skipping a test if a plugin is installed.
**NOTE**: For performance reasons, this method is not very good.
It only works for common ApeWorX supported plugins and is only
meant for assisting testing in Core (NOT a public utility).
|
skip_if_plugin_installed
|
python
|
ApeWorX/ape
|
tests/conftest.py
|
https://github.com/ApeWorX/ape/blob/master/tests/conftest.py
|
Apache-2.0
|
def disable_fork_providers(ethereum):
"""
When ape-hardhat or ape-foundry is installed,
this tricks the test into thinking they are not
(only uses sepolia-fork).
"""
actual = ethereum.sepolia_fork.__dict__.pop("providers", {})
ethereum.sepolia_fork.__dict__["providers"] = {}
yield
if actual:
ethereum.sepolia_fork.__dict__["providers"] = actual
|
When ape-hardhat or ape-foundry is installed,
this tricks the test into thinking they are not
(only uses sepolia-fork).
|
disable_fork_providers
|
python
|
ApeWorX/ape
|
tests/functional/conftest.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/conftest.py
|
Apache-2.0
|
def mock_fork_provider(mocker, ethereum, mock_sepolia):
"""
A fake provider representing something like ape-foundry
that can fork networks (only uses sepolia-fork).
"""
initial_providers = ethereum.sepolia_fork.__dict__.pop("providers", {})
initial_default = ethereum.sepolia_fork._default_provider
mock_provider = mocker.MagicMock()
mock_provider.name = "mock"
mock_provider.network = ethereum.sepolia_fork
# Have to do this because providers are partials.
def fake_partial(*args, **kwargs):
mock_provider.partial_call = (args, kwargs)
return mock_provider
ethereum.sepolia_fork._default_provider = "mock"
ethereum.sepolia_fork.__dict__["providers"] = {"mock": fake_partial}
yield mock_provider
if initial_providers:
ethereum.sepolia_fork.__dict__["providers"] = initial_providers
if initial_default:
ethereum.sepolia_fork._default_provider = initial_default
|
A fake provider representing something like ape-foundry
that can fork networks (only uses sepolia-fork).
|
mock_fork_provider
|
python
|
ApeWorX/ape
|
tests/functional/conftest.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/conftest.py
|
Apache-2.0
|
def test_transfer_value_of_0(sender, receiver):
"""
There was a bug where this failed, thinking there was no value.
"""
initial_balance = receiver.balance
sender.transfer(receiver, 0)
assert receiver.balance == initial_balance
# Also show conversion works.
sender.transfer(receiver, "0 wei")
assert receiver.balance == initial_balance
|
There was a bug where this failed, thinking there was no value.
|
test_transfer_value_of_0
|
python
|
ApeWorX/ape
|
tests/functional/test_accounts.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_accounts.py
|
Apache-2.0
|
def test_transfer_mixed_up_sender_and_value(sender, receiver):
"""
Testing the case where the user mixes up the argument order,
it should show a nicer error than it was previously, as this is
a common and easy mistake.
"""
expected = (
r"Cannot use integer-type for the `receiver` "
r"argument in the `\.transfer\(\)` method \(this "
r"protects against accidentally passing the "
r"`value` as the `receiver`\)."
)
with pytest.raises(AccountsError, match=expected):
sender.transfer(123, receiver)
# Similarly show using currency-str (may fail for different error).
expected = r"Invalid `receiver` value: '123 wei'\."
with pytest.raises(AccountsError, match=expected):
sender.transfer("123 wei", receiver)
|
Testing the case where the user mixes up the argument order,
it should show a nicer error than it was previously, as this is
a common and easy mistake.
|
test_transfer_mixed_up_sender_and_value
|
python
|
ApeWorX/ape
|
tests/functional/test_accounts.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_accounts.py
|
Apache-2.0
|
def test_deploy_instance(owner, vyper_contract_instance):
"""
Tests against a confusing scenario where you would get a SignatureError when
trying to deploy a ContractInstance because Ape would attempt to create a tx
by calling the contract's default handler.
"""
expected = (
r"contract argument must be a ContractContainer type, "
r"such as 'project\.MyContract' where 'MyContract' is the "
r"name of a contract in your project\."
)
with pytest.raises(TypeError, match=expected):
owner.deploy(vyper_contract_instance)
|
Tests against a confusing scenario where you would get a SignatureError when
trying to deploy a ContractInstance because Ape would attempt to create a tx
by calling the contract's default handler.
|
test_deploy_instance
|
python
|
ApeWorX/ape
|
tests/functional/test_accounts.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_accounts.py
|
Apache-2.0
|
def test_deploy_no_deployment_bytecode(owner, bytecode):
"""
https://github.com/ApeWorX/ape/issues/1904
"""
expected = (
r"Cannot deploy: contract 'Apes' has no deployment-bytecode\. "
r"Are you attempting to deploy an interface\?"
)
contract_type = ContractType.model_validate(
{"abi": [], "contractName": "Apes", "deploymentBytecode": bytecode}
)
contract = ContractContainer(contract_type)
with pytest.raises(MissingDeploymentBytecodeError, match=expected):
owner.deploy(contract)
| ERROR: type should be string, got "\n https://github.com/ApeWorX/ape/issues/1904\n " |
test_deploy_no_deployment_bytecode
|
python
|
ApeWorX/ape
|
tests/functional/test_accounts.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_accounts.py
|
Apache-2.0
|
def test_unlock_and_reload(runner, account_manager, keyfile_account, message):
"""
Tests against a condition where reloading after unlocking
would not honor unlocked state.
"""
keyfile_account.unlock(passphrase=PASSPHRASE)
reloaded_account = account_manager.load(keyfile_account.alias)
# y: yes, sign (note: unlocking makes the key available but is not the same as autosign).
with runner.isolation(input="y\n"):
signature = reloaded_account.sign_message(message)
assert keyfile_account.check_signature(message, signature)
|
Tests against a condition where reloading after unlocking
would not honor unlocked state.
|
test_unlock_and_reload
|
python
|
ApeWorX/ape
|
tests/functional/test_accounts.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_accounts.py
|
Apache-2.0
|
def test_repr(account_manager):
"""
NOTE: __repr__ should be simple and fast!
Previously, we showed the repr of all the accounts.
That was a bad idea, as that can be very unnecessarily slow.
Hence, this test exists to ensure care is taken.
"""
actual = repr(account_manager)
assert actual == "<AccountManager>"
|
NOTE: __repr__ should be simple and fast!
Previously, we showed the repr of all the accounts.
That was a bad idea, as that can be very unnecessarily slow.
Hence, this test exists to ensure care is taken.
|
test_repr
|
python
|
ApeWorX/ape
|
tests/functional/test_accounts.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_accounts.py
|
Apache-2.0
|
def test_ipython_integration_defaults(manager, fn_name):
"""
Test default behavior for IPython integration methods.
The base-manager short-circuits to NotImplementedError to avoid
dealing with any custom `__getattr__` logic entirely. This prevents
side-effects such as unnecessary compiling in the ProjectManager.
"""
with pytest.raises(APINotImplementedError):
fn = getattr(manager, fn_name)
fn()
|
Test default behavior for IPython integration methods.
The base-manager short-circuits to NotImplementedError to avoid
dealing with any custom `__getattr__` logic entirely. This prevents
side-effects such as unnecessary compiling in the ProjectManager.
|
test_ipython_integration_defaults
|
python
|
ApeWorX/ape
|
tests/functional/test_base_manager.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_base_manager.py
|
Apache-2.0
|
def test_model_validate_web3_block():
"""
Show we have good compatibility with web3.py native types.
"""
data = BlockData(number=123, timestamp=123, gasLimit=123, gasUsed=100) # type: ignore
actual = Block.model_validate(data)
assert actual.number == 123
|
Show we have good compatibility with web3.py native types.
|
test_model_validate_web3_block
|
python
|
ApeWorX/ape
|
tests/functional/test_block.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_block.py
|
Apache-2.0
|
def test_snapshot_and_restore_switched_chains(networks, chain):
"""
Ensuring things work as expected when we switch chains after snapshotting
and before restoring.
"""
snapshot = chain.snapshot()
# Switch chains.
with networks.ethereum.local.use_provider(
"test", provider_settings={"chain_id": 11191919191991918223773}
):
with pytest.raises(UnknownSnapshotError):
chain.restore(snapshot)
|
Ensuring things work as expected when we switch chains after snapshotting
and before restoring.
|
test_snapshot_and_restore_switched_chains
|
python
|
ApeWorX/ape
|
tests/functional/test_chain.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_chain.py
|
Apache-2.0
|
def test_network_option_with_other_option(runner):
"""
To prove can use the `@network_option` with other options
in the same command (was issue during production where could not!).
"""
# Scenario: Using network_option but not using the value in the command callback.
# (Potentially handling independently).
@click.command()
@network_option()
@other_option
def solo_option(other):
click.echo(other)
# Scenario: Using the network option with another option.
# This use-case is way more common than the one above.
@click.command()
@network_option()
@other_option
def with_net(network, other):
click.echo(network.name)
click.echo(other)
def run(cmd, fail_msg=None):
res = runner.invoke(cmd, (), catch_exceptions=False)
fail_msg = f"{fail_msg}\n{res.output}" if fail_msg else res.output
assert res.exit_code == 0, fail_msg
assert OTHER_OPTION_VALUE in res.output, fail_msg
return res
run(solo_option, fail_msg="Failed when used without network kwargs")
result = run(with_net, fail_msg="Failed when used with network kwargs")
assert "local" in result.output
|
To prove can use the `@network_option` with other options
in the same command (was issue during production where could not!).
|
test_network_option_with_other_option
|
python
|
ApeWorX/ape
|
tests/functional/test_cli.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_cli.py
|
Apache-2.0
|
def test_account_option_uses_single_account_as_default(runner, one_account):
"""
When there is only 1 test account, that is the default
when no option is given.
"""
@click.command()
@account_option(account_type=[one_account])
def cmd(account):
_expected = get_expected_account_str(account)
click.echo(_expected)
expected = get_expected_account_str(one_account)
result = runner.invoke(cmd, ())
assert expected in result.output
|
When there is only 1 test account, that is the default
when no option is given.
|
test_account_option_uses_single_account_as_default
|
python
|
ApeWorX/ape
|
tests/functional/test_cli.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_cli.py
|
Apache-2.0
|
def test_prompt_choice(runner, opt):
"""
This demonstrates how to use ``PromptChoice``,
as it is a little confusing, requiring a callback.
"""
def choice_callback(ctx, param, value):
return param.type.select()
choice = PromptChoice(["foo", "bar"])
assert hasattr(choice, "name")
choice = PromptChoice(["foo", "bar"], name="choice")
assert choice.name == "choice"
@click.command()
@click.option(
"--choice",
type=choice,
callback=choice_callback,
)
def cmd(choice):
click.echo(f"__expected_{choice}")
result = runner.invoke(cmd, [], input=f"{opt}\n")
assert "Select one of the following:" in result.output
assert "__expected_foo" in result.output
|
This demonstrates how to use ``PromptChoice``,
as it is a little confusing, requiring a callback.
|
test_prompt_choice
|
python
|
ApeWorX/ape
|
tests/functional/test_cli.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_cli.py
|
Apache-2.0
|
def test_account_prompt_name():
"""
It is very important for this class to have the `name` attribute,
even though it is not used. That is because some click internals
expect this property to exist, and we skip the super() constructor.
"""
option = AccountAliasPromptChoice()
assert option.name == "account"
option = AccountAliasPromptChoice(name="account_z")
assert option.name == "account_z"
|
It is very important for this class to have the `name` attribute,
even though it is not used. That is because some click internals
expect this property to exist, and we skip the super() constructor.
|
test_account_prompt_name
|
python
|
ApeWorX/ape
|
tests/functional/test_cli.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_cli.py
|
Apache-2.0
|
def test_contract_file_paths_argument_given_directory_and_file(
project_with_contract, runner, contracts_paths_cmd
):
"""
Tests against a bug where if given a directory AND a file together,
only the directory resolved and the file was lost.
"""
pm = project_with_contract
src_stem = next(x for x in pm.sources if Path(x).suffix == ".json").split(".")[0]
arguments = ("subdir", src_stem, "--project", f"{pm.path}")
result = runner.invoke(contracts_paths_cmd, arguments)
paths = sorted(pm.sources.paths)
all_paths = ", ".join(x.name for x in paths if x.parent.name == "subdir")
assert f"{all_paths}" in result.output
assert f"{src_stem.split('/')[-1]}" in result.output
|
Tests against a bug where if given a directory AND a file together,
only the directory resolved and the file was lost.
|
test_contract_file_paths_argument_given_directory_and_file
|
python
|
ApeWorX/ape
|
tests/functional/test_cli.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_cli.py
|
Apache-2.0
|
def test_connected_provider_command_use_custom_options(runner):
"""
Ensure custom options work when using `ConnectedProviderCommand`.
(There was an issue during development where we could not).
"""
# Scenario: Custom option and using network object.
@click.command(cls=ConnectedProviderCommand)
@other_option
def use_net(network, other):
click.echo(network.name)
click.echo(other)
# Scenario: Only using custom option.
@click.command(cls=ConnectedProviderCommand)
@other_option
def solo_other(other):
click.echo(other)
@click.command(cls=ConnectedProviderCommand)
@click.argument("other_arg")
@other_option
def with_arg(other_arg, other, provider):
click.echo(other)
click.echo(provider.name)
click.echo(other_arg)
spec = ("--network", "ethereum:local:test")
def run(cmd, extra_args=None):
arguments = [*spec, *(extra_args or [])]
res = runner.invoke(cmd, arguments, catch_exceptions=False)
assert res.exit_code == 0, res.output
assert OTHER_OPTION_VALUE in res.output
return res
result = run(use_net)
assert "local" in result.output, result.output # Echos network object
result = run(solo_other)
assert "local" not in result.output, result.output
argument = "_extra_"
result = run(with_arg, extra_args=[argument])
assert "test" in result.output
assert argument in result.output
|
Ensure custom options work when using `ConnectedProviderCommand`.
(There was an issue during development where we could not).
|
test_connected_provider_command_use_custom_options
|
python
|
ApeWorX/ape
|
tests/functional/test_cli.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_cli.py
|
Apache-2.0
|
def test_flatten_contract(compilers, project_with_contract):
"""
Positive tests exist in compiler plugins that implement this behavior.b
"""
source_id = project_with_contract.ApeContract0.contract_type.source_id
path = project_with_contract.contracts_folder / source_id
with pytest.raises(APINotImplementedError):
compilers.flatten_contract(path)
expected = r"Unable to flatten contract\. Missing compiler for '.foo'\."
with pytest.raises(CompilerError, match=expected):
compilers.flatten_contract(Path("contract.foo"))
|
Positive tests exist in compiler plugins that implement this behavior.b
|
test_flatten_contract
|
python
|
ApeWorX/ape
|
tests/functional/test_compilers.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_compilers.py
|
Apache-2.0
|
def test_compile(compilers, project_with_contract, factory):
"""
Testing both stringified paths and path-object paths.
"""
path = next(iter(project_with_contract.sources.paths))
actual = compilers.compile((factory(path),), project=project_with_contract)
contract_name = path.stem
assert contract_name in [x.name for x in actual]
|
Testing both stringified paths and path-object paths.
|
test_compile
|
python
|
ApeWorX/ape
|
tests/functional/test_compilers.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_compilers.py
|
Apache-2.0
|
def test_compile_multiple_errors(
mock_compiler, make_mock_compiler, compilers, project_with_contract
):
"""
Simulating getting errors from multiple compilers.
We should get all the errors.
"""
second_mock_compiler = make_mock_compiler("mock2")
new_contract_0 = project_with_contract.path / f"AMockContract{mock_compiler.ext}"
new_contract_0.write_text("foobar", encoding="utf8")
new_contract_1 = project_with_contract.path / f"AMockContract{second_mock_compiler.ext}"
new_contract_1.write_text("foobar2", encoding="utf8")
expected_0 = "this is expected message 0"
expected_1 = "this is expected message 1"
class MyCustomCompilerError0(CompilerError):
def __init__(self):
super().__init__(expected_0)
class MyCustomCompilerError1(CompilerError):
def __init__(self):
super().__init__(expected_1)
mock_compiler.compile.side_effect = MyCustomCompilerError0
second_mock_compiler.compile.side_effect = MyCustomCompilerError1
_ = compilers.registered_compilers # Ensures cached property is set.
# Hack in our mock compilers.
compilers.__dict__["registered_compilers"][mock_compiler.ext] = mock_compiler
compilers.__dict__["registered_compilers"][second_mock_compiler.ext] = second_mock_compiler
try:
match = rf"{expected_0}\n\n{expected_1}"
with pytest.raises(CompilerError, match=match):
list(compilers.compile([new_contract_0, new_contract_1], project=project_with_contract))
finally:
for ext in (mock_compiler.ext, second_mock_compiler.ext):
if ext in compilers.__dict__.get("registered_compilers", {}):
del compilers.__dict__["registered_compilers"][ext]
|
Simulating getting errors from multiple compilers.
We should get all the errors.
|
test_compile_multiple_errors
|
python
|
ApeWorX/ape
|
tests/functional/test_compilers.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_compilers.py
|
Apache-2.0
|
def test_compile_in_project_where_source_id_matches_local_project(project, compilers):
"""
Tests against a bug where if you had two projects with the same source IDs but
different content, it always compiled the local project's source.
"""
new_abi = {
"inputs": [],
"name": "retrieve",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
}
content = json.dumps([new_abi])
with project.isolate_in_tempdir() as temp_project:
assert compilers.local_project.path != temp_project.path, (
"Cannot be same as local for this test"
)
contract = temp_project.load_contracts()["Interface"]
path = temp_project.sources.lookup(contract.source_id)
path.unlink(missing_ok=True)
path.write_text(content, encoding="utf8")
# NOTE: Another condition for this bug is that the given path
# must be in source-ID form, meaning it relative to the project
# (but not _necessarily_ a relative path, e.g. no `./` prefix).
argument = Path(contract.source_id)
# Compile the file with the same name but different content.
result = [
x
for x in compilers.compile([argument], project=temp_project)
if x.name == contract.name
][0]
# It should reflect the new content and not the one with the same
# source ID from the local project.
assert "retrieve" in result.methods
|
Tests against a bug where if you had two projects with the same source IDs but
different content, it always compiled the local project's source.
|
test_compile_in_project_where_source_id_matches_local_project
|
python
|
ApeWorX/ape
|
tests/functional/test_compilers.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_compilers.py
|
Apache-2.0
|
def test_config_exclude_regex_serialize():
"""
Show we can to-and-fro with exclude regexes.
"""
raw_value = 'r"FooBar"'
cfg = Config(exclude=[raw_value])
excl = [x for x in cfg.exclude if isinstance(x, Pattern)]
assert len(excl) == 1
assert excl[0].pattern == "FooBar"
# NOTE: Use json mode to ensure we can go from most minimum value back.
model_dump = cfg.model_dump(mode="json", by_alias=True)
assert raw_value in model_dump.get("exclude", [])
new_cfg = Config.model_validate(cfg.model_dump(mode="json", by_alias=True))
excl = [x for x in new_cfg.exclude if isinstance(x, Pattern)]
assert len(excl) == 1
assert excl[0].pattern == "FooBar"
|
Show we can to-and-fro with exclude regexes.
|
test_config_exclude_regex_serialize
|
python
|
ApeWorX/ape
|
tests/functional/test_compilers.py
|
https://github.com/ApeWorX/ape/blob/master/tests/functional/test_compilers.py
|
Apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.