body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
@property def mode(self): 'Mode of representation.\n\n This indicates the type of value returned\n by :meth:`fetch` and :meth:`__getitem__`.\n :class:`tuple`, :class:`dict`, and :obj:`None` are supported.\n ' raise NotImplementedError
6,481,424,975,716,489,000
Mode of representation. This indicates the type of value returned by :meth:`fetch` and :meth:`__getitem__`. :class:`tuple`, :class:`dict`, and :obj:`None` are supported.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
mode
HiroakiMikami/pytorch-pfn-extras
python
@property def mode(self): 'Mode of representation.\n\n This indicates the type of value returned\n by :meth:`fetch` and :meth:`__getitem__`.\n :class:`tuple`, :class:`dict`, and :obj:`None` are supported.\n ' raise NotImplementedError
def get_examples(self, indices, key_indices): 'Return a part of data.\n\n Args:\n indices (list of ints or slice): Indices of requested rows.\n If this argument is :obj:`None`, it indicates all rows.\n key_indices (tuple of ints): Indices of requested columns.\n If this argument is :obj:`None`, it indicates all columns.\n\n Returns:\n tuple of lists/arrays\n ' raise NotImplementedError
-9,071,879,419,892,109,000
Return a part of data. Args: indices (list of ints or slice): Indices of requested rows. If this argument is :obj:`None`, it indicates all rows. key_indices (tuple of ints): Indices of requested columns. If this argument is :obj:`None`, it indicates all columns. Returns: tuple of lists/arrays
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
get_examples
HiroakiMikami/pytorch-pfn-extras
python
def get_examples(self, indices, key_indices): 'Return a part of data.\n\n Args:\n indices (list of ints or slice): Indices of requested rows.\n If this argument is :obj:`None`, it indicates all rows.\n key_indices (tuple of ints): Indices of requested columns.\n If this argument is :obj:`None`, it indicates all columns.\n\n Returns:\n tuple of lists/arrays\n ' raise NotImplementedError
@property def slice(self): 'Get a slice of dataset.\n\n Args:\n indices (list/array of ints/bools or slice): Requested rows.\n keys (tuple of ints/strs or int or str): Requested columns.\n\n Returns:\n A view of specified range.\n ' return ppe.dataset.tabular._slice._SliceHelper(self)
3,540,850,432,536,848,400
Get a slice of dataset. Args: indices (list/array of ints/bools or slice): Requested rows. keys (tuple of ints/strs or int or str): Requested columns. Returns: A view of specified range.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
slice
HiroakiMikami/pytorch-pfn-extras
python
@property def slice(self): 'Get a slice of dataset.\n\n Args:\n indices (list/array of ints/bools or slice): Requested rows.\n keys (tuple of ints/strs or int or str): Requested columns.\n\n Returns:\n A view of specified range.\n ' return ppe.dataset.tabular._slice._SliceHelper(self)
def fetch(self): "Fetch data.\n\n This method fetches all data of the dataset/view.\n Note that this method returns a column-major data\n (i.e. :obj:`([a[0], ..., a[3]], ..., [c[0], ... c[3]])`,\n :obj:`{'a': [a[0], ..., a[3]], ..., 'c': [c[0], ..., c[3]]}`, or\n :obj:`[a[0], ..., a[3]]`).\n\n Returns:\n If :attr:`mode` is :class:`tuple`,\n this method returns a tuple of lists/arrays.\n If :attr:`mode` is :class:`dict`,\n this method returns a dict of lists/arrays.\n " examples = self.get_examples(None, None) if (self.mode is tuple): return examples elif (self.mode is dict): return dict(zip(self.keys, examples)) elif (self.mode is None): return examples[0]
4,498,244,487,840,596,000
Fetch data. This method fetches all data of the dataset/view. Note that this method returns a column-major data (i.e. :obj:`([a[0], ..., a[3]], ..., [c[0], ... c[3]])`, :obj:`{'a': [a[0], ..., a[3]], ..., 'c': [c[0], ..., c[3]]}`, or :obj:`[a[0], ..., a[3]]`). Returns: If :attr:`mode` is :class:`tuple`, this method returns a tuple of lists/arrays. If :attr:`mode` is :class:`dict`, this method returns a dict of lists/arrays.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
fetch
HiroakiMikami/pytorch-pfn-extras
python
def fetch(self): "Fetch data.\n\n This method fetches all data of the dataset/view.\n Note that this method returns a column-major data\n (i.e. :obj:`([a[0], ..., a[3]], ..., [c[0], ... c[3]])`,\n :obj:`{'a': [a[0], ..., a[3]], ..., 'c': [c[0], ..., c[3]]}`, or\n :obj:`[a[0], ..., a[3]]`).\n\n Returns:\n If :attr:`mode` is :class:`tuple`,\n this method returns a tuple of lists/arrays.\n If :attr:`mode` is :class:`dict`,\n this method returns a dict of lists/arrays.\n " examples = self.get_examples(None, None) if (self.mode is tuple): return examples elif (self.mode is dict): return dict(zip(self.keys, examples)) elif (self.mode is None): return examples[0]
def convert(self, data): 'Convert fetched data.\n\n This method takes data fetched by :meth:`fetch` and\n pre-process them before passing them to models.\n The default behaviour is converting each column into an ndarray.\n This behaviour can be overridden by :meth:`with_converter`.\n If the dataset is constructed by :meth:`concat` or :meth:`join`,\n the converter of the first dataset is used.\n\n Args:\n data (tuple or dict): Data from :meth:`fetch`.\n\n Returns:\n A tuple or dict.\n Each value is an ndarray.\n ' if isinstance(data, tuple): return tuple((_as_array(d) for d in data)) elif isinstance(data, dict): return {k: _as_array(v) for (k, v) in data.items()} else: return _as_array(data)
-8,245,612,111,549,184,000
Convert fetched data. This method takes data fetched by :meth:`fetch` and pre-process them before passing them to models. The default behaviour is converting each column into an ndarray. This behaviour can be overridden by :meth:`with_converter`. If the dataset is constructed by :meth:`concat` or :meth:`join`, the converter of the first dataset is used. Args: data (tuple or dict): Data from :meth:`fetch`. Returns: A tuple or dict. Each value is an ndarray.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
convert
HiroakiMikami/pytorch-pfn-extras
python
def convert(self, data): 'Convert fetched data.\n\n This method takes data fetched by :meth:`fetch` and\n pre-process them before passing them to models.\n The default behaviour is converting each column into an ndarray.\n This behaviour can be overridden by :meth:`with_converter`.\n If the dataset is constructed by :meth:`concat` or :meth:`join`,\n the converter of the first dataset is used.\n\n Args:\n data (tuple or dict): Data from :meth:`fetch`.\n\n Returns:\n A tuple or dict.\n Each value is an ndarray.\n ' if isinstance(data, tuple): return tuple((_as_array(d) for d in data)) elif isinstance(data, dict): return {k: _as_array(v) for (k, v) in data.items()} else: return _as_array(data)
def astuple(self): 'Return a view with tuple mode.\n\n Returns:\n A view whose :attr:`mode` is :class:`tuple`.\n ' return ppe.dataset.tabular._asmode._Astuple(self)
2,329,924,775,862,789,000
Return a view with tuple mode. Returns: A view whose :attr:`mode` is :class:`tuple`.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
astuple
HiroakiMikami/pytorch-pfn-extras
python
def astuple(self): 'Return a view with tuple mode.\n\n Returns:\n A view whose :attr:`mode` is :class:`tuple`.\n ' return ppe.dataset.tabular._asmode._Astuple(self)
def asdict(self): 'Return a view with dict mode.\n\n Returns:\n A view whose :attr:`mode` is :class:`dict`.\n ' return ppe.dataset.tabular._asmode._Asdict(self)
-1,517,756,712,161,640,400
Return a view with dict mode. Returns: A view whose :attr:`mode` is :class:`dict`.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
asdict
HiroakiMikami/pytorch-pfn-extras
python
def asdict(self): 'Return a view with dict mode.\n\n Returns:\n A view whose :attr:`mode` is :class:`dict`.\n ' return ppe.dataset.tabular._asmode._Asdict(self)
def concat(self, *datasets): 'Stack datasets along rows.\n\n Args:\n datasets (iterable of :class:`TabularDataset`):\n Datasets to be concatenated.\n All datasets must have the same :attr:`keys`.\n\n Returns:\n A concatenated dataset.\n ' return ppe.dataset.tabular._concat._Concat(self, *datasets)
-9,163,655,187,413,315,000
Stack datasets along rows. Args: datasets (iterable of :class:`TabularDataset`): Datasets to be concatenated. All datasets must have the same :attr:`keys`. Returns: A concatenated dataset.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
concat
HiroakiMikami/pytorch-pfn-extras
python
def concat(self, *datasets): 'Stack datasets along rows.\n\n Args:\n datasets (iterable of :class:`TabularDataset`):\n Datasets to be concatenated.\n All datasets must have the same :attr:`keys`.\n\n Returns:\n A concatenated dataset.\n ' return ppe.dataset.tabular._concat._Concat(self, *datasets)
def join(self, *datasets): 'Stack datasets along columns.\n\n Args: datasets (iterable of :class:`TabularDataset`):\n Datasets to be concatenated.\n All datasets must have the same length\n\n Returns:\n A joined dataset.\n ' return ppe.dataset.tabular._join._Join(self, *datasets)
4,727,159,130,820,419,000
Stack datasets along columns. Args: datasets (iterable of :class:`TabularDataset`): Datasets to be concatenated. All datasets must have the same length Returns: A joined dataset.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
join
HiroakiMikami/pytorch-pfn-extras
python
def join(self, *datasets): 'Stack datasets along columns.\n\n Args: datasets (iterable of :class:`TabularDataset`):\n Datasets to be concatenated.\n All datasets must have the same length\n\n Returns:\n A joined dataset.\n ' return ppe.dataset.tabular._join._Join(self, *datasets)
def transform(self, keys, transform): 'Apply a transform to each example.\n\n The transformations are a list where each element\n is a tuple that holds the transformation signature and\n a callable that is the transformation itself.\n\n The transformation signature is a tuple of 2 elements with\n the first one being the keys of the dataset that are taken\n as inputs. And the last one the outputs it produces for the\n transformation `keys` argument.\n\n When multiple transformations are specified, the outputs\n must be disjoint or `ValueError` will be risen.\n\n Args:\n keys (tuple of strs): The keys of transformed examples.\n transform (list of tuples): A list where each element\n specifies a transformation with a tuple with the\n transformation signature and a callable that takes an example\n and returns transformed example. :attr:`mode` of\n transformed dataset is determined by the transformed\n examples.\n\n Returns:\n A transfromed dataset.\n ' return ppe.dataset.tabular._transform._Transform(self, keys, transform)
8,413,242,104,095,821,000
Apply a transform to each example. The transformations are a list where each element is a tuple that holds the transformation signature and a callable that is the transformation itself. The transformation signature is a tuple of 2 elements with the first one being the keys of the dataset that are taken as inputs. And the last one the outputs it produces for the transformation `keys` argument. When multiple transformations are specified, the outputs must be disjoint or `ValueError` will be risen. Args: keys (tuple of strs): The keys of transformed examples. transform (list of tuples): A list where each element specifies a transformation with a tuple with the transformation signature and a callable that takes an example and returns transformed example. :attr:`mode` of transformed dataset is determined by the transformed examples. Returns: A transfromed dataset.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
transform
HiroakiMikami/pytorch-pfn-extras
python
def transform(self, keys, transform): 'Apply a transform to each example.\n\n The transformations are a list where each element\n is a tuple that holds the transformation signature and\n a callable that is the transformation itself.\n\n The transformation signature is a tuple of 2 elements with\n the first one being the keys of the dataset that are taken\n as inputs. And the last one the outputs it produces for the\n transformation `keys` argument.\n\n When multiple transformations are specified, the outputs\n must be disjoint or `ValueError` will be risen.\n\n Args:\n keys (tuple of strs): The keys of transformed examples.\n transform (list of tuples): A list where each element\n specifies a transformation with a tuple with the\n transformation signature and a callable that takes an example\n and returns transformed example. :attr:`mode` of\n transformed dataset is determined by the transformed\n examples.\n\n Returns:\n A transfromed dataset.\n ' return ppe.dataset.tabular._transform._Transform(self, keys, transform)
def transform_batch(self, keys, transform_batch): 'Apply a transform to examples.\n\n The transformations are a list where each element\n is a tuple that holds the transformation signature and\n a callable that is the transformation itself.\n\n The transformation signature is a tuple of 2 elements with\n the first one being the keys of the dataset that are taken\n as inputs. And the last one the outputs it produces for the\n transformation `keys` argument.\n\n When multiple transformations are specified, the outputs\n must be disjoint or `ValueError` will be risen.\n\n Args:\n keys (tuple of strs): The keys of transformed examples.\n transform_batch (list of tuples): A list where each element\n specifies a transformation with a tuple with the\n transformation signature and a callable that takes a\n batch of examples and returns a batch of transformed examples.\n :attr:`mode` of transformed dataset is determined by\n the transformed examples.\n\n Returns:\n A transfromed dataset.\n ' return ppe.dataset.tabular._transform._TransformBatch(self, keys, transform_batch)
7,518,417,866,828,272,000
Apply a transform to examples. The transformations are a list where each element is a tuple that holds the transformation signature and a callable that is the transformation itself. The transformation signature is a tuple of 2 elements with the first one being the keys of the dataset that are taken as inputs. And the last one the outputs it produces for the transformation `keys` argument. When multiple transformations are specified, the outputs must be disjoint or `ValueError` will be risen. Args: keys (tuple of strs): The keys of transformed examples. transform_batch (list of tuples): A list where each element specifies a transformation with a tuple with the transformation signature and a callable that takes a batch of examples and returns a batch of transformed examples. :attr:`mode` of transformed dataset is determined by the transformed examples. Returns: A transfromed dataset.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
transform_batch
HiroakiMikami/pytorch-pfn-extras
python
def transform_batch(self, keys, transform_batch): 'Apply a transform to examples.\n\n The transformations are a list where each element\n is a tuple that holds the transformation signature and\n a callable that is the transformation itself.\n\n The transformation signature is a tuple of 2 elements with\n the first one being the keys of the dataset that are taken\n as inputs. And the last one the outputs it produces for the\n transformation `keys` argument.\n\n When multiple transformations are specified, the outputs\n must be disjoint or `ValueError` will be risen.\n\n Args:\n keys (tuple of strs): The keys of transformed examples.\n transform_batch (list of tuples): A list where each element\n specifies a transformation with a tuple with the\n transformation signature and a callable that takes a\n batch of examples and returns a batch of transformed examples.\n :attr:`mode` of transformed dataset is determined by\n the transformed examples.\n\n Returns:\n A transfromed dataset.\n ' return ppe.dataset.tabular._transform._TransformBatch(self, keys, transform_batch)
def with_converter(self, converter): 'Override the behaviour of :meth:`convert`.\n\n This method overrides :meth:`convert`.\n\n Args:\n converter (callable): A new converter.\n\n Returns:\n A dataset with the new converter.\n ' return ppe.dataset.tabular._with_converter._WithConverter(self, converter)
5,333,493,650,766,142,000
Override the behaviour of :meth:`convert`. This method overrides :meth:`convert`. Args: converter (callable): A new converter. Returns: A dataset with the new converter.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
with_converter
HiroakiMikami/pytorch-pfn-extras
python
def with_converter(self, converter): 'Override the behaviour of :meth:`convert`.\n\n This method overrides :meth:`convert`.\n\n Args:\n converter (callable): A new converter.\n\n Returns:\n A dataset with the new converter.\n ' return ppe.dataset.tabular._with_converter._WithConverter(self, converter)
def __getitem__(self, index): 'Returns an example or a sequence of examples.\n It implements the standard Python indexing and one-dimensional integer\n array indexing. It uses the :meth:`get_example` method by default, but\n it may be overridden by the implementation to, for example, improve the\n slicing performance.\n Args:\n index (int, slice, list or numpy.ndarray): An index of an example\n or indexes of examples.\n Returns:\n If index is int, returns an example created by `get_example`.\n If index is either slice or one-dimensional list or numpy.ndarray,\n returns a list of examples created by `get_example`.\n ' if isinstance(index, slice): (current, stop, step) = index.indices(len(self)) return [self.get_example(i) for i in range(current, stop, step)] elif (isinstance(index, list) or isinstance(index, numpy.ndarray)): return [self.get_example(i) for i in index] else: return self.get_example(index)
6,519,675,880,091,241,000
Returns an example or a sequence of examples. It implements the standard Python indexing and one-dimensional integer array indexing. It uses the :meth:`get_example` method by default, but it may be overridden by the implementation to, for example, improve the slicing performance. Args: index (int, slice, list or numpy.ndarray): An index of an example or indexes of examples. Returns: If index is int, returns an example created by `get_example`. If index is either slice or one-dimensional list or numpy.ndarray, returns a list of examples created by `get_example`.
pytorch_pfn_extras/dataset/tabular/tabular_dataset.py
__getitem__
HiroakiMikami/pytorch-pfn-extras
python
def __getitem__(self, index): 'Returns an example or a sequence of examples.\n It implements the standard Python indexing and one-dimensional integer\n array indexing. It uses the :meth:`get_example` method by default, but\n it may be overridden by the implementation to, for example, improve the\n slicing performance.\n Args:\n index (int, slice, list or numpy.ndarray): An index of an example\n or indexes of examples.\n Returns:\n If index is int, returns an example created by `get_example`.\n If index is either slice or one-dimensional list or numpy.ndarray,\n returns a list of examples created by `get_example`.\n ' if isinstance(index, slice): (current, stop, step) = index.indices(len(self)) return [self.get_example(i) for i in range(current, stop, step)] elif (isinstance(index, list) or isinstance(index, numpy.ndarray)): return [self.get_example(i) for i in index] else: return self.get_example(index)
def _fix_sys_path(script_path): "Add the script's folder to the sys path.\n\n Python normally does this automatically, but since we exec the script\n ourselves we need to do it instead.\n " sys.path.insert(0, os.path.dirname(script_path))
7,402,760,777,040,301,000
Add the script's folder to the sys path. Python normally does this automatically, but since we exec the script ourselves we need to do it instead.
notebook/lib/python3.9/site-packages/streamlit/bootstrap.py
_fix_sys_path
Deepanjalkumar/Attacksurfacemanagement
python
def _fix_sys_path(script_path): "Add the script's folder to the sys path.\n\n Python normally does this automatically, but since we exec the script\n ourselves we need to do it instead.\n " sys.path.insert(0, os.path.dirname(script_path))
def _fix_matplotlib_crash(): "Set Matplotlib backend to avoid a crash.\n\n The default Matplotlib backend crashes Python on OSX when run on a thread\n that's not the main thread, so here we set a safer backend as a fix.\n Users can always disable this behavior by setting the config\n runner.fixMatplotlib = false.\n\n This fix is OS-independent. We didn't see a good reason to make this\n Mac-only. Consistency within Streamlit seemed more important.\n " if config.get_option('runner.fixMatplotlib'): try: import matplotlib matplotlib.use('Agg') except ImportError: pass
901,468,904,473,203,000
Set Matplotlib backend to avoid a crash. The default Matplotlib backend crashes Python on OSX when run on a thread that's not the main thread, so here we set a safer backend as a fix. Users can always disable this behavior by setting the config runner.fixMatplotlib = false. This fix is OS-independent. We didn't see a good reason to make this Mac-only. Consistency within Streamlit seemed more important.
notebook/lib/python3.9/site-packages/streamlit/bootstrap.py
_fix_matplotlib_crash
Deepanjalkumar/Attacksurfacemanagement
python
def _fix_matplotlib_crash(): "Set Matplotlib backend to avoid a crash.\n\n The default Matplotlib backend crashes Python on OSX when run on a thread\n that's not the main thread, so here we set a safer backend as a fix.\n Users can always disable this behavior by setting the config\n runner.fixMatplotlib = false.\n\n This fix is OS-independent. We didn't see a good reason to make this\n Mac-only. Consistency within Streamlit seemed more important.\n " if config.get_option('runner.fixMatplotlib'): try: import matplotlib matplotlib.use('Agg') except ImportError: pass
def _fix_tornado_crash(): 'Set default asyncio policy to be compatible with Tornado 6.\n\n Tornado 6 (at least) is not compatible with the default\n asyncio implementation on Windows. So here we\n pick the older SelectorEventLoopPolicy when the OS is Windows\n if the known-incompatible default policy is in use.\n\n This has to happen as early as possible to make it a low priority and\n overrideable\n\n See: https://github.com/tornadoweb/tornado/issues/2608\n\n FIXME: if/when tornado supports the defaults in asyncio,\n remove and bump tornado requirement for py38\n ' if (env_util.IS_WINDOWS and (sys.version_info >= (3, 8))): import asyncio try: from asyncio import WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy except ImportError: pass else: if (type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy): asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
4,316,309,139,046,760,400
Set default asyncio policy to be compatible with Tornado 6. Tornado 6 (at least) is not compatible with the default asyncio implementation on Windows. So here we pick the older SelectorEventLoopPolicy when the OS is Windows if the known-incompatible default policy is in use. This has to happen as early as possible to make it a low priority and overrideable See: https://github.com/tornadoweb/tornado/issues/2608 FIXME: if/when tornado supports the defaults in asyncio, remove and bump tornado requirement for py38
notebook/lib/python3.9/site-packages/streamlit/bootstrap.py
_fix_tornado_crash
Deepanjalkumar/Attacksurfacemanagement
python
def _fix_tornado_crash(): 'Set default asyncio policy to be compatible with Tornado 6.\n\n Tornado 6 (at least) is not compatible with the default\n asyncio implementation on Windows. So here we\n pick the older SelectorEventLoopPolicy when the OS is Windows\n if the known-incompatible default policy is in use.\n\n This has to happen as early as possible to make it a low priority and\n overrideable\n\n See: https://github.com/tornadoweb/tornado/issues/2608\n\n FIXME: if/when tornado supports the defaults in asyncio,\n remove and bump tornado requirement for py38\n ' if (env_util.IS_WINDOWS and (sys.version_info >= (3, 8))): import asyncio try: from asyncio import WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy except ImportError: pass else: if (type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy): asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
def _fix_sys_argv(script_path, args): "sys.argv needs to exclude streamlit arguments and parameters\n and be set to what a user's script may expect.\n " import sys sys.argv = ([script_path] + list(args))
-674,952,883,092,193,500
sys.argv needs to exclude streamlit arguments and parameters and be set to what a user's script may expect.
notebook/lib/python3.9/site-packages/streamlit/bootstrap.py
_fix_sys_argv
Deepanjalkumar/Attacksurfacemanagement
python
def _fix_sys_argv(script_path, args): "sys.argv needs to exclude streamlit arguments and parameters\n and be set to what a user's script may expect.\n " import sys sys.argv = ([script_path] + list(args))
def _fix_pydeck_mapbox_api_warning(): 'Sets MAPBOX_API_KEY environment variable needed for PyDeck otherwise it will throw an exception' os.environ['MAPBOX_API_KEY'] = config.get_option('mapbox.token')
2,014,555,495,941,573,000
Sets MAPBOX_API_KEY environment variable needed for PyDeck otherwise it will throw an exception
notebook/lib/python3.9/site-packages/streamlit/bootstrap.py
_fix_pydeck_mapbox_api_warning
Deepanjalkumar/Attacksurfacemanagement
python
def _fix_pydeck_mapbox_api_warning(): os.environ['MAPBOX_API_KEY'] = config.get_option('mapbox.token')
def _maybe_print_old_git_warning(script_path: str) -> None: "If our script is running in a Git repo, and we're running a very old\n Git version, print a warning that Git integration will be unavailable.\n " repo = GitRepo(script_path) if ((not repo.is_valid()) and (repo.git_version is not None) and (repo.git_version < MIN_GIT_VERSION)): git_version_string = '.'.join((str(val) for val in repo.git_version)) min_version_string = '.'.join((str(val) for val in MIN_GIT_VERSION)) click.secho('') click.secho(' Git integration is disabled.', fg='yellow', bold=True) click.secho('') click.secho(f' Streamlit requires Git {min_version_string} or later, but you have {git_version_string}.', fg='yellow') click.secho(' Git is used by Streamlit Sharing (https://streamlit.io/sharing).', fg='yellow') click.secho(' To enable this feature, please update Git.', fg='yellow')
-3,466,808,207,749,236,700
If our script is running in a Git repo, and we're running a very old Git version, print a warning that Git integration will be unavailable.
notebook/lib/python3.9/site-packages/streamlit/bootstrap.py
_maybe_print_old_git_warning
Deepanjalkumar/Attacksurfacemanagement
python
def _maybe_print_old_git_warning(script_path: str) -> None: "If our script is running in a Git repo, and we're running a very old\n Git version, print a warning that Git integration will be unavailable.\n " repo = GitRepo(script_path) if ((not repo.is_valid()) and (repo.git_version is not None) and (repo.git_version < MIN_GIT_VERSION)): git_version_string = '.'.join((str(val) for val in repo.git_version)) min_version_string = '.'.join((str(val) for val in MIN_GIT_VERSION)) click.secho() click.secho(' Git integration is disabled.', fg='yellow', bold=True) click.secho() click.secho(f' Streamlit requires Git {min_version_string} or later, but you have {git_version_string}.', fg='yellow') click.secho(' Git is used by Streamlit Sharing (https://streamlit.io/sharing).', fg='yellow') click.secho(' To enable this feature, please update Git.', fg='yellow')
def load_config_options(flag_options: Dict[(str, Any)]): 'Load config options from config.toml files, then overlay the ones set by\n flag_options.\n\n The "streamlit run" command supports passing Streamlit\'s config options\n as flags. This function reads through the config options set via flag,\n massages them, and passes them to get_config_options() so that they\n overwrite config option defaults and those loaded from config.toml files.\n\n Parameters\n ----------\n flag_options : Dict[str, Any]\n A dict of config options where the keys are the CLI flag version of the\n config option names.\n ' options_from_flags = {name.replace('_', '.'): val for (name, val) in flag_options.items() if (val is not None)} config.get_config_options(force_reparse=True, options_from_flags=options_from_flags)
4,179,268,150,167,291,400
Load config options from config.toml files, then overlay the ones set by flag_options. The "streamlit run" command supports passing Streamlit's config options as flags. This function reads through the config options set via flag, massages them, and passes them to get_config_options() so that they overwrite config option defaults and those loaded from config.toml files. Parameters ---------- flag_options : Dict[str, Any] A dict of config options where the keys are the CLI flag version of the config option names.
notebook/lib/python3.9/site-packages/streamlit/bootstrap.py
load_config_options
Deepanjalkumar/Attacksurfacemanagement
python
def load_config_options(flag_options: Dict[(str, Any)]): 'Load config options from config.toml files, then overlay the ones set by\n flag_options.\n\n The "streamlit run" command supports passing Streamlit\'s config options\n as flags. This function reads through the config options set via flag,\n massages them, and passes them to get_config_options() so that they\n overwrite config option defaults and those loaded from config.toml files.\n\n Parameters\n ----------\n flag_options : Dict[str, Any]\n A dict of config options where the keys are the CLI flag version of the\n config option names.\n ' options_from_flags = {name.replace('_', '.'): val for (name, val) in flag_options.items() if (val is not None)} config.get_config_options(force_reparse=True, options_from_flags=options_from_flags)
def run(script_path, command_line, args, flag_options): 'Run a script in a separate thread and start a server for the app.\n\n This starts a blocking ioloop.\n\n Parameters\n ----------\n script_path : str\n command_line : str\n args : [str]\n flag_options : Dict[str, Any]\n ' _fix_sys_path(script_path) _fix_matplotlib_crash() _fix_tornado_crash() _fix_sys_argv(script_path, args) _fix_pydeck_mapbox_api_warning() _install_config_watchers(flag_options) _set_up_signal_handler() ioloop = tornado.ioloop.IOLoop.current() server = Server(ioloop, script_path, command_line) server.start(_on_server_start) server.add_preheated_report_session() ioloop.start()
1,975,938,148,645,434,000
Run a script in a separate thread and start a server for the app. This starts a blocking ioloop. Parameters ---------- script_path : str command_line : str args : [str] flag_options : Dict[str, Any]
notebook/lib/python3.9/site-packages/streamlit/bootstrap.py
run
Deepanjalkumar/Attacksurfacemanagement
python
def run(script_path, command_line, args, flag_options): 'Run a script in a separate thread and start a server for the app.\n\n This starts a blocking ioloop.\n\n Parameters\n ----------\n script_path : str\n command_line : str\n args : [str]\n flag_options : Dict[str, Any]\n ' _fix_sys_path(script_path) _fix_matplotlib_crash() _fix_tornado_crash() _fix_sys_argv(script_path, args) _fix_pydeck_mapbox_api_warning() _install_config_watchers(flag_options) _set_up_signal_handler() ioloop = tornado.ioloop.IOLoop.current() server = Server(ioloop, script_path, command_line) server.start(_on_server_start) server.add_preheated_report_session() ioloop.start()
def gradients(vals, func, releps=0.001, abseps=None, mineps=1e-09, reltol=1, epsscale=0.5): '\n Calculate the partial derivatives of a function at a set of values. The\n derivatives are calculated using the central difference, using an iterative\n method to check that the values converge as step size decreases.\n\n Parameters\n ----------\n vals: array_like\n A set of values, that are passed to a function, at which to calculate\n the gradient of that function\n func:\n A function that takes in an array of values.\n releps: float, array_like, 1e-3\n The initial relative step size for calculating the derivative.\n abseps: float, array_like, None\n The initial absolute step size for calculating the derivative.\n This overrides `releps` if set.\n `releps` is set then that is used.\n mineps: float, 1e-9\n The minimum relative step size at which to stop iterations if no\n convergence is achieved.\n epsscale: float, 0.5\n The factor by which releps if scaled in each iteration.\n\n Returns\n -------\n grads: array_like\n An array of gradients for each non-fixed value.\n ' grads = np.zeros(len(vals)) flipflopmax = 10.0 if (abseps is None): if isinstance(releps, float): eps = (np.abs(vals) * releps) eps[(eps == 0.0)] = releps teps = (releps * np.ones(len(vals))) elif isinstance(releps, (list, np.ndarray)): if (len(releps) != len(vals)): raise ValueError('Problem with input relative step sizes') eps = np.multiply(np.abs(vals), releps) eps[(eps == 0.0)] = np.array(releps)[(eps == 0.0)] teps = releps else: raise RuntimeError('Relative step sizes are not a recognised type!') else: if isinstance(abseps, float): eps = (abseps * np.ones(len(vals))) elif isinstance(abseps, (list, np.ndarray)): if (len(abseps) != len(vals)): raise ValueError('Problem with input absolute step sizes') eps = np.array(abseps) else: raise RuntimeError('Absolute step sizes are not a recognised type!') teps = eps count = 0 for i in range(len(vals)): leps = eps[i] cureps = teps[i] flipflop = 0 fvals = np.copy(vals) bvals = np.copy(vals) fvals[i] += (0.5 * leps) bvals[i] -= (0.5 * leps) cdiff = ((func(fvals) - func(bvals)) / leps) while 1: fvals[i] -= (0.5 * leps) bvals[i] += (0.5 * leps) cureps *= epsscale if ((cureps < mineps) or (flipflop > flipflopmax)): logger.warn('Derivative calculation did not converge: setting flat derivative.') grads[count] = 0.0 break leps *= epsscale fvals[i] += (0.5 * leps) bvals[i] -= (0.5 * leps) cdiffnew = ((func(fvals) - func(bvals)) / leps) if (cdiffnew == cdiff): grads[count] = cdiff break rat = (cdiff / cdiffnew) if (np.isfinite(rat) and (rat > 0.0)): if (np.abs((1.0 - rat)) < reltol): grads[count] = cdiffnew break else: cdiff = cdiffnew continue else: cdiff = cdiffnew flipflop += 1 continue count += 1 return grads
-5,688,202,814,726,371,000
Calculate the partial derivatives of a function at a set of values. The derivatives are calculated using the central difference, using an iterative method to check that the values converge as step size decreases. Parameters ---------- vals: array_like A set of values, that are passed to a function, at which to calculate the gradient of that function func: A function that takes in an array of values. releps: float, array_like, 1e-3 The initial relative step size for calculating the derivative. abseps: float, array_like, None The initial absolute step size for calculating the derivative. This overrides `releps` if set. `releps` is set then that is used. mineps: float, 1e-9 The minimum relative step size at which to stop iterations if no convergence is achieved. epsscale: float, 0.5 The factor by which releps if scaled in each iteration. Returns ------- grads: array_like An array of gradients for each non-fixed value.
LoopStructural/probability/_gradient_calculator.py
gradients
Loop3D/LoopStructural
python
def gradients(vals, func, releps=0.001, abseps=None, mineps=1e-09, reltol=1, epsscale=0.5): '\n Calculate the partial derivatives of a function at a set of values. The\n derivatives are calculated using the central difference, using an iterative\n method to check that the values converge as step size decreases.\n\n Parameters\n ----------\n vals: array_like\n A set of values, that are passed to a function, at which to calculate\n the gradient of that function\n func:\n A function that takes in an array of values.\n releps: float, array_like, 1e-3\n The initial relative step size for calculating the derivative.\n abseps: float, array_like, None\n The initial absolute step size for calculating the derivative.\n This overrides `releps` if set.\n `releps` is set then that is used.\n mineps: float, 1e-9\n The minimum relative step size at which to stop iterations if no\n convergence is achieved.\n epsscale: float, 0.5\n The factor by which releps if scaled in each iteration.\n\n Returns\n -------\n grads: array_like\n An array of gradients for each non-fixed value.\n ' grads = np.zeros(len(vals)) flipflopmax = 10.0 if (abseps is None): if isinstance(releps, float): eps = (np.abs(vals) * releps) eps[(eps == 0.0)] = releps teps = (releps * np.ones(len(vals))) elif isinstance(releps, (list, np.ndarray)): if (len(releps) != len(vals)): raise ValueError('Problem with input relative step sizes') eps = np.multiply(np.abs(vals), releps) eps[(eps == 0.0)] = np.array(releps)[(eps == 0.0)] teps = releps else: raise RuntimeError('Relative step sizes are not a recognised type!') else: if isinstance(abseps, float): eps = (abseps * np.ones(len(vals))) elif isinstance(abseps, (list, np.ndarray)): if (len(abseps) != len(vals)): raise ValueError('Problem with input absolute step sizes') eps = np.array(abseps) else: raise RuntimeError('Absolute step sizes are not a recognised type!') teps = eps count = 0 for i in range(len(vals)): leps = eps[i] cureps = teps[i] flipflop = 0 fvals = np.copy(vals) bvals = np.copy(vals) fvals[i] += (0.5 * leps) bvals[i] -= (0.5 * leps) cdiff = ((func(fvals) - func(bvals)) / leps) while 1: fvals[i] -= (0.5 * leps) bvals[i] += (0.5 * leps) cureps *= epsscale if ((cureps < mineps) or (flipflop > flipflopmax)): logger.warn('Derivative calculation did not converge: setting flat derivative.') grads[count] = 0.0 break leps *= epsscale fvals[i] += (0.5 * leps) bvals[i] -= (0.5 * leps) cdiffnew = ((func(fvals) - func(bvals)) / leps) if (cdiffnew == cdiff): grads[count] = cdiff break rat = (cdiff / cdiffnew) if (np.isfinite(rat) and (rat > 0.0)): if (np.abs((1.0 - rat)) < reltol): grads[count] = cdiffnew break else: cdiff = cdiffnew continue else: cdiff = cdiffnew flipflop += 1 continue count += 1 return grads
def couple(lst1, lst2): "Return a list that contains lists with i-th elements of two sequences\n coupled together.\n >>> lst1 = [1, 2, 3]\n >>> lst2 = [4, 5, 6]\n >>> couple(lst1, lst2)\n [[1, 4], [2, 5], [3, 6]]\n >>> lst3 = ['c', 6]\n >>> lst4 = ['s', '1']\n >>> couple(lst3, lst4)\n [['c', 's'], [6, '1']]\n " assert (len(lst1) == len(lst2)) a = [] for i in range(len(lst1)): a.append([lst1[i], lst2[i]]) return a
6,645,344,682,957,890,000
Return a list that contains lists with i-th elements of two sequences coupled together. >>> lst1 = [1, 2, 3] >>> lst2 = [4, 5, 6] >>> couple(lst1, lst2) [[1, 4], [2, 5], [3, 6]] >>> lst3 = ['c', 6] >>> lst4 = ['s', '1'] >>> couple(lst3, lst4) [['c', 's'], [6, '1']]
lab/lab05/lab05.py
couple
weijiew/cs61a
python
def couple(lst1, lst2): "Return a list that contains lists with i-th elements of two sequences\n coupled together.\n >>> lst1 = [1, 2, 3]\n >>> lst2 = [4, 5, 6]\n >>> couple(lst1, lst2)\n [[1, 4], [2, 5], [3, 6]]\n >>> lst3 = ['c', 6]\n >>> lst4 = ['s', '1']\n >>> couple(lst3, lst4)\n [['c', 's'], [6, '1']]\n " assert (len(lst1) == len(lst2)) a = [] for i in range(len(lst1)): a.append([lst1[i], lst2[i]]) return a
def distance(city1, city2): "\n >>> city1 = make_city('city1', 0, 1)\n >>> city2 = make_city('city2', 0, 2)\n >>> distance(city1, city2)\n 1.0\n >>> city3 = make_city('city3', 6.5, 12)\n >>> city4 = make_city('city4', 2.5, 15)\n >>> distance(city3, city4)\n 5.0\n " (x1, y1) = (get_lat(city1), get_lon(city1)) (x2, y2) = (get_lat(city2), get_lon(city2)) return sqrt((((x1 - x2) ** 2) + ((y1 - y2) ** 2)))
5,752,097,178,087,292,000
>>> city1 = make_city('city1', 0, 1) >>> city2 = make_city('city2', 0, 2) >>> distance(city1, city2) 1.0 >>> city3 = make_city('city3', 6.5, 12) >>> city4 = make_city('city4', 2.5, 15) >>> distance(city3, city4) 5.0
lab/lab05/lab05.py
distance
weijiew/cs61a
python
def distance(city1, city2): "\n >>> city1 = make_city('city1', 0, 1)\n >>> city2 = make_city('city2', 0, 2)\n >>> distance(city1, city2)\n 1.0\n >>> city3 = make_city('city3', 6.5, 12)\n >>> city4 = make_city('city4', 2.5, 15)\n >>> distance(city3, city4)\n 5.0\n " (x1, y1) = (get_lat(city1), get_lon(city1)) (x2, y2) = (get_lat(city2), get_lon(city2)) return sqrt((((x1 - x2) ** 2) + ((y1 - y2) ** 2)))
def closer_city(lat, lon, city1, city2): "\n Returns the name of either city1 or city2, whichever is closest to\n coordinate (lat, lon).\n\n >>> berkeley = make_city('Berkeley', 37.87, 112.26)\n >>> stanford = make_city('Stanford', 34.05, 118.25)\n >>> closer_city(38.33, 121.44, berkeley, stanford)\n 'Stanford'\n >>> bucharest = make_city('Bucharest', 44.43, 26.10)\n >>> vienna = make_city('Vienna', 48.20, 16.37)\n >>> closer_city(41.29, 174.78, bucharest, vienna)\n 'Bucharest'\n " tmp = make_city('tmp', lat, lon) dis1 = distance(city1, tmp) dis2 = distance(city2, tmp) if (dis1 > dis2): return get_name(city2) else: return get_name(city1)
5,909,678,910,041,403,000
Returns the name of either city1 or city2, whichever is closest to coordinate (lat, lon). >>> berkeley = make_city('Berkeley', 37.87, 112.26) >>> stanford = make_city('Stanford', 34.05, 118.25) >>> closer_city(38.33, 121.44, berkeley, stanford) 'Stanford' >>> bucharest = make_city('Bucharest', 44.43, 26.10) >>> vienna = make_city('Vienna', 48.20, 16.37) >>> closer_city(41.29, 174.78, bucharest, vienna) 'Bucharest'
lab/lab05/lab05.py
closer_city
weijiew/cs61a
python
def closer_city(lat, lon, city1, city2): "\n Returns the name of either city1 or city2, whichever is closest to\n coordinate (lat, lon).\n\n >>> berkeley = make_city('Berkeley', 37.87, 112.26)\n >>> stanford = make_city('Stanford', 34.05, 118.25)\n >>> closer_city(38.33, 121.44, berkeley, stanford)\n 'Stanford'\n >>> bucharest = make_city('Bucharest', 44.43, 26.10)\n >>> vienna = make_city('Vienna', 48.20, 16.37)\n >>> closer_city(41.29, 174.78, bucharest, vienna)\n 'Bucharest'\n " tmp = make_city('tmp', lat, lon) dis1 = distance(city1, tmp) dis2 = distance(city2, tmp) if (dis1 > dis2): return get_name(city2) else: return get_name(city1)
def check_abstraction(): "\n There's nothing for you to do for this function, it's just here for the extra doctest\n >>> change_abstraction(True)\n >>> city1 = make_city('city1', 0, 1)\n >>> city2 = make_city('city2', 0, 2)\n >>> distance(city1, city2)\n 1.0\n >>> city3 = make_city('city3', 6.5, 12)\n >>> city4 = make_city('city4', 2.5, 15)\n >>> distance(city3, city4)\n 5.0\n >>> berkeley = make_city('Berkeley', 37.87, 112.26)\n >>> stanford = make_city('Stanford', 34.05, 118.25)\n >>> closer_city(38.33, 121.44, berkeley, stanford)\n 'Stanford'\n >>> bucharest = make_city('Bucharest', 44.43, 26.10)\n >>> vienna = make_city('Vienna', 48.20, 16.37)\n >>> closer_city(41.29, 174.78, bucharest, vienna)\n 'Bucharest'\n >>> change_abstraction(False)\n "
7,676,074,233,030,160,000
There's nothing for you to do for this function, it's just here for the extra doctest >>> change_abstraction(True) >>> city1 = make_city('city1', 0, 1) >>> city2 = make_city('city2', 0, 2) >>> distance(city1, city2) 1.0 >>> city3 = make_city('city3', 6.5, 12) >>> city4 = make_city('city4', 2.5, 15) >>> distance(city3, city4) 5.0 >>> berkeley = make_city('Berkeley', 37.87, 112.26) >>> stanford = make_city('Stanford', 34.05, 118.25) >>> closer_city(38.33, 121.44, berkeley, stanford) 'Stanford' >>> bucharest = make_city('Bucharest', 44.43, 26.10) >>> vienna = make_city('Vienna', 48.20, 16.37) >>> closer_city(41.29, 174.78, bucharest, vienna) 'Bucharest' >>> change_abstraction(False)
lab/lab05/lab05.py
check_abstraction
weijiew/cs61a
python
def check_abstraction(): "\n There's nothing for you to do for this function, it's just here for the extra doctest\n >>> change_abstraction(True)\n >>> city1 = make_city('city1', 0, 1)\n >>> city2 = make_city('city2', 0, 2)\n >>> distance(city1, city2)\n 1.0\n >>> city3 = make_city('city3', 6.5, 12)\n >>> city4 = make_city('city4', 2.5, 15)\n >>> distance(city3, city4)\n 5.0\n >>> berkeley = make_city('Berkeley', 37.87, 112.26)\n >>> stanford = make_city('Stanford', 34.05, 118.25)\n >>> closer_city(38.33, 121.44, berkeley, stanford)\n 'Stanford'\n >>> bucharest = make_city('Bucharest', 44.43, 26.10)\n >>> vienna = make_city('Vienna', 48.20, 16.37)\n >>> closer_city(41.29, 174.78, bucharest, vienna)\n 'Bucharest'\n >>> change_abstraction(False)\n "
def make_city(name, lat, lon): "\n >>> city = make_city('Berkeley', 0, 1)\n >>> get_name(city)\n 'Berkeley'\n >>> get_lat(city)\n 0\n >>> get_lon(city)\n 1\n " if change_abstraction.changed: return {'name': name, 'lat': lat, 'lon': lon} else: return [name, lat, lon]
4,689,566,000,169,289,000
>>> city = make_city('Berkeley', 0, 1) >>> get_name(city) 'Berkeley' >>> get_lat(city) 0 >>> get_lon(city) 1
lab/lab05/lab05.py
make_city
weijiew/cs61a
python
def make_city(name, lat, lon): "\n >>> city = make_city('Berkeley', 0, 1)\n >>> get_name(city)\n 'Berkeley'\n >>> get_lat(city)\n 0\n >>> get_lon(city)\n 1\n " if change_abstraction.changed: return {'name': name, 'lat': lat, 'lon': lon} else: return [name, lat, lon]
def get_name(city): "\n >>> city = make_city('Berkeley', 0, 1)\n >>> get_name(city)\n 'Berkeley'\n " if change_abstraction.changed: return city['name'] else: return city[0]
460,446,731,083,844,100
>>> city = make_city('Berkeley', 0, 1) >>> get_name(city) 'Berkeley'
lab/lab05/lab05.py
get_name
weijiew/cs61a
python
def get_name(city): "\n >>> city = make_city('Berkeley', 0, 1)\n >>> get_name(city)\n 'Berkeley'\n " if change_abstraction.changed: return city['name'] else: return city[0]
def get_lat(city): "\n >>> city = make_city('Berkeley', 0, 1)\n >>> get_lat(city)\n 0\n " if change_abstraction.changed: return city['lat'] else: return city[1]
-763,576,806,966,212,000
>>> city = make_city('Berkeley', 0, 1) >>> get_lat(city) 0
lab/lab05/lab05.py
get_lat
weijiew/cs61a
python
def get_lat(city): "\n >>> city = make_city('Berkeley', 0, 1)\n >>> get_lat(city)\n 0\n " if change_abstraction.changed: return city['lat'] else: return city[1]
def get_lon(city): "\n >>> city = make_city('Berkeley', 0, 1)\n >>> get_lon(city)\n 1\n " if change_abstraction.changed: return city['lon'] else: return city[2]
-1,493,669,833,233,132,000
>>> city = make_city('Berkeley', 0, 1) >>> get_lon(city) 1
lab/lab05/lab05.py
get_lon
weijiew/cs61a
python
def get_lon(city): "\n >>> city = make_city('Berkeley', 0, 1)\n >>> get_lon(city)\n 1\n " if change_abstraction.changed: return city['lon'] else: return city[2]
def nut_finder(t): "Returns True if t contains a node with the value 'nut' and\n False otherwise.\n\n >>> scrat = tree('nut')\n >>> nut_finder(scrat)\n True\n >>> sproul = tree('roots', [tree('branch1', [tree('leaf'), tree('nut')]), tree('branch2')])\n >>> nut_finder(sproul)\n True\n >>> numbers = tree(1, [tree(2), tree(3, [tree(4), tree(5)]), tree(6, [tree(7)])])\n >>> nut_finder(numbers)\n False\n >>> t = tree(1, [tree('nut',[tree('not nut')])])\n >>> nut_finder(t)\n True\n " if (label(t) == 'nut'): return True for node in branches(t): if nut_finder(node): return True return False
4,750,845,568,511,252,000
Returns True if t contains a node with the value 'nut' and False otherwise. >>> scrat = tree('nut') >>> nut_finder(scrat) True >>> sproul = tree('roots', [tree('branch1', [tree('leaf'), tree('nut')]), tree('branch2')]) >>> nut_finder(sproul) True >>> numbers = tree(1, [tree(2), tree(3, [tree(4), tree(5)]), tree(6, [tree(7)])]) >>> nut_finder(numbers) False >>> t = tree(1, [tree('nut',[tree('not nut')])]) >>> nut_finder(t) True
lab/lab05/lab05.py
nut_finder
weijiew/cs61a
python
def nut_finder(t): "Returns True if t contains a node with the value 'nut' and\n False otherwise.\n\n >>> scrat = tree('nut')\n >>> nut_finder(scrat)\n True\n >>> sproul = tree('roots', [tree('branch1', [tree('leaf'), tree('nut')]), tree('branch2')])\n >>> nut_finder(sproul)\n True\n >>> numbers = tree(1, [tree(2), tree(3, [tree(4), tree(5)]), tree(6, [tree(7)])])\n >>> nut_finder(numbers)\n False\n >>> t = tree(1, [tree('nut',[tree('not nut')])])\n >>> nut_finder(t)\n True\n " if (label(t) == 'nut'): return True for node in branches(t): if nut_finder(node): return True return False
def sprout_leaves(t, values): 'Sprout new leaves containing the data in values at each leaf in\n the original tree t and return the resulting tree.\n\n >>> t1 = tree(1, [tree(2), tree(3)])\n >>> print_tree(t1)\n 1\n 2\n 3\n >>> new1 = sprout_leaves(t1, [4, 5])\n >>> print_tree(new1)\n 1\n 2\n 4\n 5\n 3\n 4\n 5\n\n >>> t2 = tree(1, [tree(2, [tree(3)])])\n >>> print_tree(t2)\n 1\n 2\n 3\n >>> new2 = sprout_leaves(t2, [6, 1, 2])\n >>> print_tree(new2)\n 1\n 2\n 3\n 6\n 1\n 2\n ' if is_leaf(t): return tree(label(t), [tree(v) for v in values]) return tree(label(t), [sprout_leaves(b, values) for b in branches(t)])
-7,956,871,296,061,474,000
Sprout new leaves containing the data in values at each leaf in the original tree t and return the resulting tree. >>> t1 = tree(1, [tree(2), tree(3)]) >>> print_tree(t1) 1 2 3 >>> new1 = sprout_leaves(t1, [4, 5]) >>> print_tree(new1) 1 2 4 5 3 4 5 >>> t2 = tree(1, [tree(2, [tree(3)])]) >>> print_tree(t2) 1 2 3 >>> new2 = sprout_leaves(t2, [6, 1, 2]) >>> print_tree(new2) 1 2 3 6 1 2
lab/lab05/lab05.py
sprout_leaves
weijiew/cs61a
python
def sprout_leaves(t, values): 'Sprout new leaves containing the data in values at each leaf in\n the original tree t and return the resulting tree.\n\n >>> t1 = tree(1, [tree(2), tree(3)])\n >>> print_tree(t1)\n 1\n 2\n 3\n >>> new1 = sprout_leaves(t1, [4, 5])\n >>> print_tree(new1)\n 1\n 2\n 4\n 5\n 3\n 4\n 5\n\n >>> t2 = tree(1, [tree(2, [tree(3)])])\n >>> print_tree(t2)\n 1\n 2\n 3\n >>> new2 = sprout_leaves(t2, [6, 1, 2])\n >>> print_tree(new2)\n 1\n 2\n 3\n 6\n 1\n 2\n ' if is_leaf(t): return tree(label(t), [tree(v) for v in values]) return tree(label(t), [sprout_leaves(b, values) for b in branches(t)])
def tree(label, branches=[]): 'Construct a tree with the given label value and a list of branches.' for branch in branches: assert is_tree(branch), 'branches must be trees' return ([label] + list(branches))
2,953,853,968,114,559,000
Construct a tree with the given label value and a list of branches.
lab/lab05/lab05.py
tree
weijiew/cs61a
python
def tree(label, branches=[]): for branch in branches: assert is_tree(branch), 'branches must be trees' return ([label] + list(branches))
def label(tree): 'Return the label value of a tree.' return tree[0]
4,737,495,565,978,606,000
Return the label value of a tree.
lab/lab05/lab05.py
label
weijiew/cs61a
python
def label(tree): return tree[0]
def branches(tree): 'Return the list of branches of the given tree.' return tree[1:]
7,668,930,889,431,274,000
Return the list of branches of the given tree.
lab/lab05/lab05.py
branches
weijiew/cs61a
python
def branches(tree): return tree[1:]
def is_tree(tree): 'Returns True if the given tree is a tree, and False otherwise.' if ((type(tree) != list) or (len(tree) < 1)): return False for branch in branches(tree): if (not is_tree(branch)): return False return True
-5,480,379,214,850,312,000
Returns True if the given tree is a tree, and False otherwise.
lab/lab05/lab05.py
is_tree
weijiew/cs61a
python
def is_tree(tree): if ((type(tree) != list) or (len(tree) < 1)): return False for branch in branches(tree): if (not is_tree(branch)): return False return True
def is_leaf(tree): "Returns True if the given tree's list of branches is empty, and False\n otherwise.\n " return (not branches(tree))
-3,935,306,139,317,841,000
Returns True if the given tree's list of branches is empty, and False otherwise.
lab/lab05/lab05.py
is_leaf
weijiew/cs61a
python
def is_leaf(tree): "Returns True if the given tree's list of branches is empty, and False\n otherwise.\n " return (not branches(tree))
def print_tree(t, indent=0): 'Print a representation of this tree in which each node is\n indented by two spaces times its depth from the root.\n\n >>> print_tree(tree(1))\n 1\n >>> print_tree(tree(1, [tree(2)]))\n 1\n 2\n >>> numbers = tree(1, [tree(2), tree(3, [tree(4), tree(5)]), tree(6, [tree(7)])])\n >>> print_tree(numbers)\n 1\n 2\n 3\n 4\n 5\n 6\n 7\n ' print(((' ' * indent) + str(label(t)))) for b in branches(t): print_tree(b, (indent + 1))
-5,713,328,670,943,918,000
Print a representation of this tree in which each node is indented by two spaces times its depth from the root. >>> print_tree(tree(1)) 1 >>> print_tree(tree(1, [tree(2)])) 1 2 >>> numbers = tree(1, [tree(2), tree(3, [tree(4), tree(5)]), tree(6, [tree(7)])]) >>> print_tree(numbers) 1 2 3 4 5 6 7
lab/lab05/lab05.py
print_tree
weijiew/cs61a
python
def print_tree(t, indent=0): 'Print a representation of this tree in which each node is\n indented by two spaces times its depth from the root.\n\n >>> print_tree(tree(1))\n 1\n >>> print_tree(tree(1, [tree(2)]))\n 1\n 2\n >>> numbers = tree(1, [tree(2), tree(3, [tree(4), tree(5)]), tree(6, [tree(7)])])\n >>> print_tree(numbers)\n 1\n 2\n 3\n 4\n 5\n 6\n 7\n ' print(((' ' * indent) + str(label(t)))) for b in branches(t): print_tree(b, (indent + 1))
def copy_tree(t): 'Returns a copy of t. Only for testing purposes.\n\n >>> t = tree(5)\n >>> copy = copy_tree(t)\n >>> t = tree(6)\n >>> print_tree(copy)\n 5\n ' return tree(label(t), [copy_tree(b) for b in branches(t)])
-3,218,421,863,158,620,700
Returns a copy of t. Only for testing purposes. >>> t = tree(5) >>> copy = copy_tree(t) >>> t = tree(6) >>> print_tree(copy) 5
lab/lab05/lab05.py
copy_tree
weijiew/cs61a
python
def copy_tree(t): 'Returns a copy of t. Only for testing purposes.\n\n >>> t = tree(5)\n >>> copy = copy_tree(t)\n >>> t = tree(6)\n >>> print_tree(copy)\n 5\n ' return tree(label(t), [copy_tree(b) for b in branches(t)])
def add_chars(w1, w2): '\n Return a string containing the characters you need to add to w1 to get w2.\n\n You may assume that w1 is a subsequence of w2.\n\n >>> add_chars("owl", "howl")\n \'h\'\n >>> add_chars("want", "wanton")\n \'on\'\n >>> add_chars("rat", "radiate")\n \'diae\'\n >>> add_chars("a", "prepare")\n \'prepre\'\n >>> add_chars("resin", "recursion")\n \'curo\'\n >>> add_chars("fin", "effusion")\n \'efuso\'\n >>> add_chars("coy", "cacophony")\n \'acphon\'\n >>> from construct_check import check\n >>> # ban iteration and sets\n >>> check(LAB_SOURCE_FILE, \'add_chars\',\n ... [\'For\', \'While\', \'Set\', \'SetComp\']) # Must use recursion\n True\n ' '*** YOUR CODE HERE ***'
8,587,130,409,299,641,000
Return a string containing the characters you need to add to w1 to get w2. You may assume that w1 is a subsequence of w2. >>> add_chars("owl", "howl") 'h' >>> add_chars("want", "wanton") 'on' >>> add_chars("rat", "radiate") 'diae' >>> add_chars("a", "prepare") 'prepre' >>> add_chars("resin", "recursion") 'curo' >>> add_chars("fin", "effusion") 'efuso' >>> add_chars("coy", "cacophony") 'acphon' >>> from construct_check import check >>> # ban iteration and sets >>> check(LAB_SOURCE_FILE, 'add_chars', ... ['For', 'While', 'Set', 'SetComp']) # Must use recursion True
lab/lab05/lab05.py
add_chars
weijiew/cs61a
python
def add_chars(w1, w2): '\n Return a string containing the characters you need to add to w1 to get w2.\n\n You may assume that w1 is a subsequence of w2.\n\n >>> add_chars("owl", "howl")\n \'h\'\n >>> add_chars("want", "wanton")\n \'on\'\n >>> add_chars("rat", "radiate")\n \'diae\'\n >>> add_chars("a", "prepare")\n \'prepre\'\n >>> add_chars("resin", "recursion")\n \'curo\'\n >>> add_chars("fin", "effusion")\n \'efuso\'\n >>> add_chars("coy", "cacophony")\n \'acphon\'\n >>> from construct_check import check\n >>> # ban iteration and sets\n >>> check(LAB_SOURCE_FILE, \'add_chars\',\n ... [\'For\', \'While\', \'Set\', \'SetComp\']) # Must use recursion\n True\n ' '*** YOUR CODE HERE ***'
def add_trees(t1, t2): '\n >>> numbers = tree(1,\n ... [tree(2,\n ... [tree(3),\n ... tree(4)]),\n ... tree(5,\n ... [tree(6,\n ... [tree(7)]),\n ... tree(8)])])\n >>> print_tree(add_trees(numbers, numbers))\n 2\n 4\n 6\n 8\n 10\n 12\n 14\n 16\n >>> print_tree(add_trees(tree(2), tree(3, [tree(4), tree(5)])))\n 5\n 4\n 5\n >>> print_tree(add_trees(tree(2, [tree(3)]), tree(2, [tree(3), tree(4)])))\n 4\n 6\n 4\n >>> print_tree(add_trees(tree(2, [tree(3, [tree(4), tree(5)])]), tree(2, [tree(3, [tree(4)]), tree(5)])))\n 4\n 6\n 8\n 5\n 5\n ' '*** YOUR CODE HERE ***'
-3,282,762,069,251,459,000
>>> numbers = tree(1, ... [tree(2, ... [tree(3), ... tree(4)]), ... tree(5, ... [tree(6, ... [tree(7)]), ... tree(8)])]) >>> print_tree(add_trees(numbers, numbers)) 2 4 6 8 10 12 14 16 >>> print_tree(add_trees(tree(2), tree(3, [tree(4), tree(5)]))) 5 4 5 >>> print_tree(add_trees(tree(2, [tree(3)]), tree(2, [tree(3), tree(4)]))) 4 6 4 >>> print_tree(add_trees(tree(2, [tree(3, [tree(4), tree(5)])]), tree(2, [tree(3, [tree(4)]), tree(5)]))) 4 6 8 5 5
lab/lab05/lab05.py
add_trees
weijiew/cs61a
python
def add_trees(t1, t2): '\n >>> numbers = tree(1,\n ... [tree(2,\n ... [tree(3),\n ... tree(4)]),\n ... tree(5,\n ... [tree(6,\n ... [tree(7)]),\n ... tree(8)])])\n >>> print_tree(add_trees(numbers, numbers))\n 2\n 4\n 6\n 8\n 10\n 12\n 14\n 16\n >>> print_tree(add_trees(tree(2), tree(3, [tree(4), tree(5)])))\n 5\n 4\n 5\n >>> print_tree(add_trees(tree(2, [tree(3)]), tree(2, [tree(3), tree(4)])))\n 4\n 6\n 4\n >>> print_tree(add_trees(tree(2, [tree(3, [tree(4), tree(5)])]), tree(2, [tree(3, [tree(4)]), tree(5)])))\n 4\n 6\n 8\n 5\n 5\n ' '*** YOUR CODE HERE ***'
def build_successors_table(tokens): "Return a dictionary: keys are words; values are lists of successors.\n\n >>> text = ['We', 'came', 'to', 'investigate', ',', 'catch', 'bad', 'guys', 'and', 'to', 'eat', 'pie', '.']\n >>> table = build_successors_table(text)\n >>> sorted(table)\n [',', '.', 'We', 'and', 'bad', 'came', 'catch', 'eat', 'guys', 'investigate', 'pie', 'to']\n >>> table['to']\n ['investigate', 'eat']\n >>> table['pie']\n ['.']\n >>> table['.']\n ['We']\n " table = {} prev = '.' for word in tokens: if (prev not in table): '*** YOUR CODE HERE ***' '*** YOUR CODE HERE ***' prev = word return table
-2,718,429,813,578,603,000
Return a dictionary: keys are words; values are lists of successors. >>> text = ['We', 'came', 'to', 'investigate', ',', 'catch', 'bad', 'guys', 'and', 'to', 'eat', 'pie', '.'] >>> table = build_successors_table(text) >>> sorted(table) [',', '.', 'We', 'and', 'bad', 'came', 'catch', 'eat', 'guys', 'investigate', 'pie', 'to'] >>> table['to'] ['investigate', 'eat'] >>> table['pie'] ['.'] >>> table['.'] ['We']
lab/lab05/lab05.py
build_successors_table
weijiew/cs61a
python
def build_successors_table(tokens): "Return a dictionary: keys are words; values are lists of successors.\n\n >>> text = ['We', 'came', 'to', 'investigate', ',', 'catch', 'bad', 'guys', 'and', 'to', 'eat', 'pie', '.']\n >>> table = build_successors_table(text)\n >>> sorted(table)\n [',', '.', 'We', 'and', 'bad', 'came', 'catch', 'eat', 'guys', 'investigate', 'pie', 'to']\n >>> table['to']\n ['investigate', 'eat']\n >>> table['pie']\n ['.']\n >>> table['.']\n ['We']\n " table = {} prev = '.' for word in tokens: if (prev not in table): '*** YOUR CODE HERE ***' '*** YOUR CODE HERE ***' prev = word return table
def construct_sent(word, table): "Prints a random sentence starting with word, sampling from\n table.\n\n >>> table = {'Wow': ['!'], 'Sentences': ['are'], 'are': ['cool'], 'cool': ['.']}\n >>> construct_sent('Wow', table)\n 'Wow!'\n >>> construct_sent('Sentences', table)\n 'Sentences are cool.'\n " import random result = '' while (word not in ['.', '!', '?']): '*** YOUR CODE HERE ***' return (result.strip() + word)
-3,923,192,067,224,500,000
Prints a random sentence starting with word, sampling from table. >>> table = {'Wow': ['!'], 'Sentences': ['are'], 'are': ['cool'], 'cool': ['.']} >>> construct_sent('Wow', table) 'Wow!' >>> construct_sent('Sentences', table) 'Sentences are cool.'
lab/lab05/lab05.py
construct_sent
weijiew/cs61a
python
def construct_sent(word, table): "Prints a random sentence starting with word, sampling from\n table.\n\n >>> table = {'Wow': ['!'], 'Sentences': ['are'], 'are': ['cool'], 'cool': ['.']}\n >>> construct_sent('Wow', table)\n 'Wow!'\n >>> construct_sent('Sentences', table)\n 'Sentences are cool.'\n " import random result = while (word not in ['.', '!', '?']): '*** YOUR CODE HERE ***' return (result.strip() + word)
def shakespeare_tokens(path='shakespeare.txt', url='http://composingprograms.com/shakespeare.txt'): "Return the words of Shakespeare's plays as a list." import os from urllib.request import urlopen if os.path.exists(path): return open('shakespeare.txt', encoding='ascii').read().split() else: shakespeare = urlopen(url) return shakespeare.read().decode(encoding='ascii').split()
1,566,796,944,794,258,000
Return the words of Shakespeare's plays as a list.
lab/lab05/lab05.py
shakespeare_tokens
weijiew/cs61a
python
def shakespeare_tokens(path='shakespeare.txt', url='http://composingprograms.com/shakespeare.txt'): import os from urllib.request import urlopen if os.path.exists(path): return open('shakespeare.txt', encoding='ascii').read().split() else: shakespeare = urlopen(url) return shakespeare.read().decode(encoding='ascii').split()
def __init__(self, name: str, cloud_device_id: Optional[str], type_name: EntityType, fields: List[EntityField], is_reporting: bool, guid: Optional[str]=None, metadata: Optional[Dict[(str, str)]]=None): "Init.\n\n Args:\n name: the entity's name.\n cloud_device_id: Device id iot core or any iot application.\n type_name: DBO entity type stored in EntityType instance.\n fields: List of standard field names.\n is_reporting: if an entity maps 1:1 to a reporting device, it is a\n reporting entity.\n guid: [Optional] Universally Unique identification code for an entity.\n metadata: Contextual metadata about an entity.\n " self.name = name self._guid = guid self.cloud_device_id = cloud_device_id self.type_name = type_name self._fields = fields self.is_reporting = is_reporting self.metadata = metadata
8,463,877,923,377,424,000
Init. Args: name: the entity's name. cloud_device_id: Device id iot core or any iot application. type_name: DBO entity type stored in EntityType instance. fields: List of standard field names. is_reporting: if an entity maps 1:1 to a reporting device, it is a reporting entity. guid: [Optional] Universally Unique identification code for an entity. metadata: Contextual metadata about an entity.
tools/concrete_model/model/entity.py
__init__
ghairapetian/digitalbuildings
python
def __init__(self, name: str, cloud_device_id: Optional[str], type_name: EntityType, fields: List[EntityField], is_reporting: bool, guid: Optional[str]=None, metadata: Optional[Dict[(str, str)]]=None): "Init.\n\n Args:\n name: the entity's name.\n cloud_device_id: Device id iot core or any iot application.\n type_name: DBO entity type stored in EntityType instance.\n fields: List of standard field names.\n is_reporting: if an entity maps 1:1 to a reporting device, it is a\n reporting entity.\n guid: [Optional] Universally Unique identification code for an entity.\n metadata: Contextual metadata about an entity.\n " self.name = name self._guid = guid self.cloud_device_id = cloud_device_id self.type_name = type_name self._fields = fields self.is_reporting = is_reporting self.metadata = metadata
@classmethod def FromDict(cls, entity_dict: Dict[(str, object)]): 'class method to create an instance of Entity from mapping of entity attributes to values.\n\n Args:\n entity_dict: dictionary mapping field attributes to values from a\n loadsheet or building configuration.\n\n Returns:\n An instance of Entity class.\n '
-6,018,446,563,130,544,000
class method to create an instance of Entity from mapping of entity attributes to values. Args: entity_dict: dictionary mapping field attributes to values from a loadsheet or building configuration. Returns: An instance of Entity class.
tools/concrete_model/model/entity.py
FromDict
ghairapetian/digitalbuildings
python
@classmethod def FromDict(cls, entity_dict: Dict[(str, object)]): 'class method to create an instance of Entity from mapping of entity attributes to values.\n\n Args:\n entity_dict: dictionary mapping field attributes to values from a\n loadsheet or building configuration.\n\n Returns:\n An instance of Entity class.\n '
@property def fields(self) -> Dict[(str, EntityField)]: 'Returns a mapping of standard field names to EntityField instances associated with self.' return self._fields
1,322,555,290,743,449,300
Returns a mapping of standard field names to EntityField instances associated with self.
tools/concrete_model/model/entity.py
fields
ghairapetian/digitalbuildings
python
@property def fields(self) -> Dict[(str, EntityField)]: return self._fields
@fields.setter def fields(self, new_fields: Dict[(str, EntityField)]) -> None: 'Validates that each value of new_fields is an instance of EntityField class and sets.\n\n Arguments:\n new_fields: A mapping of standard field names to EntityField instances.\n '
-1,757,987,839,824,743,700
Validates that each value of new_fields is an instance of EntityField class and sets. Arguments: new_fields: A mapping of standard field names to EntityField instances.
tools/concrete_model/model/entity.py
fields
ghairapetian/digitalbuildings
python
@fields.setter def fields(self, new_fields: Dict[(str, EntityField)]) -> None: 'Validates that each value of new_fields is an instance of EntityField class and sets.\n\n Arguments:\n new_fields: A mapping of standard field names to EntityField instances.\n '
@property def guid(self) -> str: 'Returns the GUID associated with self.' return self._guid
4,325,422,431,556,203,500
Returns the GUID associated with self.
tools/concrete_model/model/entity.py
guid
ghairapetian/digitalbuildings
python
@property def guid(self) -> str: return self._guid
@guid.setter def guid(self, guid: Optional[str]=None) -> None: 'If guid argument is none, generate a new guid for set or just set if none.\n\n Args:\n guid: [Optional] A UUID string.\n '
4,728,215,185,178,931,000
If guid argument is none, generate a new guid for set or just set if none. Args: guid: [Optional] A UUID string.
tools/concrete_model/model/entity.py
guid
ghairapetian/digitalbuildings
python
@guid.setter def guid(self, guid: Optional[str]=None) -> None: 'If guid argument is none, generate a new guid for set or just set if none.\n\n Args:\n guid: [Optional] A UUID string.\n '
def safe_quotes(text, escape_single_quotes=False): 'htmlify string' if isinstance(text, str): safe_text = text.replace('"', '&quot;') if escape_single_quotes: safe_text = safe_text.replace("'", "&#92;'") return safe_text.replace('True', 'true') return text
-5,689,330,192,420,735,000
htmlify string
cartoframes/viz/html/utils.py
safe_quotes
CartoDB/cartoframes
python
def safe_quotes(text, escape_single_quotes=False): if isinstance(text, str): safe_text = text.replace('"', '&quot;') if escape_single_quotes: safe_text = safe_text.replace("'", "&#92;'") return safe_text.replace('True', 'true') return text
def test_shard_genesis_fork_fork(self): ' Test shard forks at genesis blocks due to root chain fork at GENESIS.ROOT_HEIGHT' acc1 = Address.create_random_account(0) acc2 = Address.create_random_account(1) genesis_root_heights = {2: 0, 3: 1} with ClusterContext(2, acc1, chain_size=1, shard_size=2, genesis_root_heights=genesis_root_heights) as clusters: clusters[1].peer.close() master0 = clusters[0].master root0 = call_async(master0.get_next_block_to_mine(acc1, branch_value=None)) call_async(master0.add_root_block(root0)) genesis0 = clusters[0].get_shard_state((2 | 1)).db.get_minor_block_by_height(0) self.assertEqual(genesis0.header.hash_prev_root_block, root0.header.get_hash()) master1 = clusters[1].master root1 = call_async(master1.get_next_block_to_mine(acc2, branch_value=None)) self.assertNotEqual(root0.header.get_hash(), root1.header.get_hash()) call_async(master1.add_root_block(root1)) genesis1 = clusters[1].get_shard_state((2 | 1)).db.get_minor_block_by_height(0) self.assertEqual(genesis1.header.hash_prev_root_block, root1.header.get_hash()) self.assertNotEqual(genesis0.header.get_hash(), genesis1.header.get_hash()) root2 = call_async(master1.get_next_block_to_mine(acc2, branch_value=None)) call_async(master1.add_root_block(root2)) self.assertEqual(master1.root_state.tip.height, 2) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].master.env.cluster_config.SIMPLE_NETWORK.BOOTSTRAP_PORT)) assert_true_with_timeout((lambda : (clusters[0].get_shard_state((2 | 1)).db.get_minor_block_by_height(0).header.get_hash() == genesis1.header.get_hash()))) self.assertTrue((clusters[0].get_shard_state((2 | 1)).root_tip == root2.header))
-2,042,348,012,184,034,800
Test shard forks at genesis blocks due to root chain fork at GENESIS.ROOT_HEIGHT
quarkchain/cluster/tests/test_cluster.py
test_shard_genesis_fork_fork
Belgarion/pyquarkchain_cuda
python
def test_shard_genesis_fork_fork(self): ' ' acc1 = Address.create_random_account(0) acc2 = Address.create_random_account(1) genesis_root_heights = {2: 0, 3: 1} with ClusterContext(2, acc1, chain_size=1, shard_size=2, genesis_root_heights=genesis_root_heights) as clusters: clusters[1].peer.close() master0 = clusters[0].master root0 = call_async(master0.get_next_block_to_mine(acc1, branch_value=None)) call_async(master0.add_root_block(root0)) genesis0 = clusters[0].get_shard_state((2 | 1)).db.get_minor_block_by_height(0) self.assertEqual(genesis0.header.hash_prev_root_block, root0.header.get_hash()) master1 = clusters[1].master root1 = call_async(master1.get_next_block_to_mine(acc2, branch_value=None)) self.assertNotEqual(root0.header.get_hash(), root1.header.get_hash()) call_async(master1.add_root_block(root1)) genesis1 = clusters[1].get_shard_state((2 | 1)).db.get_minor_block_by_height(0) self.assertEqual(genesis1.header.hash_prev_root_block, root1.header.get_hash()) self.assertNotEqual(genesis0.header.get_hash(), genesis1.header.get_hash()) root2 = call_async(master1.get_next_block_to_mine(acc2, branch_value=None)) call_async(master1.add_root_block(root2)) self.assertEqual(master1.root_state.tip.height, 2) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].master.env.cluster_config.SIMPLE_NETWORK.BOOTSTRAP_PORT)) assert_true_with_timeout((lambda : (clusters[0].get_shard_state((2 | 1)).db.get_minor_block_by_height(0).header.get_hash() == genesis1.header.get_hash()))) self.assertTrue((clusters[0].get_shard_state((2 | 1)).root_tip == root2.header))
def test_broadcast_cross_shard_transactions(self): ' Test the cross shard transactions are broadcasted to the destination shards ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) acc3 = Address.create_random_account(full_shard_key=1) with ClusterContext(1, acc1) as clusters: master = clusters[0].master slaves = clusters[0].slave_list genesis_token = clusters[0].get_shard_state((2 | 0)).env.quark_chain_config.genesis_token root_block = call_async(master.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master.add_root_block(root_block)) tx1 = create_transfer_transaction(shard_state=clusters[0].get_shard_state((2 | 0)), key=id1.get_key(), from_address=acc1, to_address=acc3, value=54321, gas=(opcodes.GTXXSHARDCOST + opcodes.GTXCOST)) self.assertTrue(slaves[0].add_tx(tx1)) b1 = clusters[0].get_shard_state((2 | 0)).create_block_to_mine(address=acc1) b2 = clusters[0].get_shard_state((2 | 0)).create_block_to_mine(address=acc1) b2.header.create_time += 1 self.assertNotEqual(b1.header.get_hash(), b2.header.get_hash()) call_async(clusters[0].get_shard((2 | 0)).add_block(b1)) xshard_tx_list = clusters[0].get_shard_state((2 | 1)).db.get_minor_block_xshard_tx_list(b1.header.get_hash()) self.assertEqual(len(xshard_tx_list.tx_list), 1) self.assertEqual(xshard_tx_list.tx_list[0].tx_hash, tx1.get_hash()) self.assertEqual(xshard_tx_list.tx_list[0].from_address, acc1) self.assertEqual(xshard_tx_list.tx_list[0].to_address, acc3) self.assertEqual(xshard_tx_list.tx_list[0].value, 54321) call_async(clusters[0].get_shard((2 | 0)).add_block(b2)) self.assertEqual(clusters[0].get_shard_state((2 | 0)).header_tip, b1.header) xshard_tx_list = clusters[0].get_shard_state((2 | 1)).db.get_minor_block_xshard_tx_list(b2.header.get_hash()) self.assertEqual(len(xshard_tx_list.tx_list), 1) self.assertEqual(xshard_tx_list.tx_list[0].tx_hash, tx1.get_hash()) self.assertEqual(xshard_tx_list.tx_list[0].from_address, acc1) self.assertEqual(xshard_tx_list.tx_list[0].to_address, acc3) self.assertEqual(xshard_tx_list.tx_list[0].value, 54321) b3 = clusters[0].get_shard_state((2 | 1)).create_block_to_mine(address=acc1.address_in_shard(1)) call_async(master.add_raw_minor_block(b3.header.branch, b3.serialize())) root_block = call_async(master.get_next_block_to_mine(address=acc1, branch_value=None)) call_async(master.add_root_block(root_block)) b4 = clusters[0].get_shard_state((2 | 1)).create_block_to_mine(address=acc1.address_in_shard(1)) self.assertTrue(call_async(master.add_raw_minor_block(b1.header.branch, b1.serialize()))) self.assertTrue(call_async(master.add_raw_minor_block(b2.header.branch, b2.serialize()))) self.assertTrue(call_async(master.add_raw_minor_block(b3.header.branch, b3.serialize()))) self.assertTrue(call_async(master.add_raw_minor_block(b4.header.branch, b4.serialize()))) self.assertEqual(call_async(master.get_primary_account_data(acc3)).token_balances.balance_map, {genesis_token: 54321})
-8,231,601,325,534,709,000
Test the cross shard transactions are broadcasted to the destination shards
quarkchain/cluster/tests/test_cluster.py
test_broadcast_cross_shard_transactions
Belgarion/pyquarkchain_cuda
python
def test_broadcast_cross_shard_transactions(self): ' ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) acc3 = Address.create_random_account(full_shard_key=1) with ClusterContext(1, acc1) as clusters: master = clusters[0].master slaves = clusters[0].slave_list genesis_token = clusters[0].get_shard_state((2 | 0)).env.quark_chain_config.genesis_token root_block = call_async(master.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master.add_root_block(root_block)) tx1 = create_transfer_transaction(shard_state=clusters[0].get_shard_state((2 | 0)), key=id1.get_key(), from_address=acc1, to_address=acc3, value=54321, gas=(opcodes.GTXXSHARDCOST + opcodes.GTXCOST)) self.assertTrue(slaves[0].add_tx(tx1)) b1 = clusters[0].get_shard_state((2 | 0)).create_block_to_mine(address=acc1) b2 = clusters[0].get_shard_state((2 | 0)).create_block_to_mine(address=acc1) b2.header.create_time += 1 self.assertNotEqual(b1.header.get_hash(), b2.header.get_hash()) call_async(clusters[0].get_shard((2 | 0)).add_block(b1)) xshard_tx_list = clusters[0].get_shard_state((2 | 1)).db.get_minor_block_xshard_tx_list(b1.header.get_hash()) self.assertEqual(len(xshard_tx_list.tx_list), 1) self.assertEqual(xshard_tx_list.tx_list[0].tx_hash, tx1.get_hash()) self.assertEqual(xshard_tx_list.tx_list[0].from_address, acc1) self.assertEqual(xshard_tx_list.tx_list[0].to_address, acc3) self.assertEqual(xshard_tx_list.tx_list[0].value, 54321) call_async(clusters[0].get_shard((2 | 0)).add_block(b2)) self.assertEqual(clusters[0].get_shard_state((2 | 0)).header_tip, b1.header) xshard_tx_list = clusters[0].get_shard_state((2 | 1)).db.get_minor_block_xshard_tx_list(b2.header.get_hash()) self.assertEqual(len(xshard_tx_list.tx_list), 1) self.assertEqual(xshard_tx_list.tx_list[0].tx_hash, tx1.get_hash()) self.assertEqual(xshard_tx_list.tx_list[0].from_address, acc1) self.assertEqual(xshard_tx_list.tx_list[0].to_address, acc3) self.assertEqual(xshard_tx_list.tx_list[0].value, 54321) b3 = clusters[0].get_shard_state((2 | 1)).create_block_to_mine(address=acc1.address_in_shard(1)) call_async(master.add_raw_minor_block(b3.header.branch, b3.serialize())) root_block = call_async(master.get_next_block_to_mine(address=acc1, branch_value=None)) call_async(master.add_root_block(root_block)) b4 = clusters[0].get_shard_state((2 | 1)).create_block_to_mine(address=acc1.address_in_shard(1)) self.assertTrue(call_async(master.add_raw_minor_block(b1.header.branch, b1.serialize()))) self.assertTrue(call_async(master.add_raw_minor_block(b2.header.branch, b2.serialize()))) self.assertTrue(call_async(master.add_raw_minor_block(b3.header.branch, b3.serialize()))) self.assertTrue(call_async(master.add_raw_minor_block(b4.header.branch, b4.serialize()))) self.assertEqual(call_async(master.get_primary_account_data(acc3)).token_balances.balance_map, {genesis_token: 54321})
def test_broadcast_cross_shard_transactions_to_neighbor_only(self): ' Test the broadcast is only done to the neighbors ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(1, acc1, shard_size=64, num_slaves=4) as clusters: master = clusters[0].master root_block = call_async(master.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master.add_root_block(root_block)) b1 = clusters[0].get_shard_state(64).create_block_to_mine(address=acc1) self.assertTrue(call_async(master.add_raw_minor_block(b1.header.branch, b1.serialize()))) neighbor_shards = [(2 ** i) for i in range(6)] for shard_id in range(64): xshard_tx_list = clusters[0].get_shard_state((64 | shard_id)).db.get_minor_block_xshard_tx_list(b1.header.get_hash()) if (shard_id in neighbor_shards): self.assertIsNotNone(xshard_tx_list) else: self.assertIsNone(xshard_tx_list)
2,652,062,782,321,459,700
Test the broadcast is only done to the neighbors
quarkchain/cluster/tests/test_cluster.py
test_broadcast_cross_shard_transactions_to_neighbor_only
Belgarion/pyquarkchain_cuda
python
def test_broadcast_cross_shard_transactions_to_neighbor_only(self): ' ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(1, acc1, shard_size=64, num_slaves=4) as clusters: master = clusters[0].master root_block = call_async(master.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master.add_root_block(root_block)) b1 = clusters[0].get_shard_state(64).create_block_to_mine(address=acc1) self.assertTrue(call_async(master.add_raw_minor_block(b1.header.branch, b1.serialize()))) neighbor_shards = [(2 ** i) for i in range(6)] for shard_id in range(64): xshard_tx_list = clusters[0].get_shard_state((64 | shard_id)).db.get_minor_block_xshard_tx_list(b1.header.get_hash()) if (shard_id in neighbor_shards): self.assertIsNotNone(xshard_tx_list) else: self.assertIsNone(xshard_tx_list)
def test_get_root_block_headers_with_skip(self): ' Test the broadcast is only done to the neighbors ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1) as clusters: master = clusters[0].master root_block_header_list = [master.root_state.tip] for i in range(10): root_block = call_async(master.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master.add_root_block(root_block)) root_block_header_list.append(root_block.header) self.assertEqual(root_block_header_list[(- 1)].height, 10) assert_true_with_timeout((lambda : (clusters[1].master.root_state.tip.height == 10))) peer = clusters[1].peer (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=1, skip=1, limit=3, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], root_block_header_list[1]) self.assertEqual(resp.block_header_list[1], root_block_header_list[3]) self.assertEqual(resp.block_header_list[2], root_block_header_list[5]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[1].get_hash(), skip=1, limit=3, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], root_block_header_list[1]) self.assertEqual(resp.block_header_list[1], root_block_header_list[3]) self.assertEqual(resp.block_header_list[2], root_block_header_list[5]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=2, skip=2, limit=4, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], root_block_header_list[2]) self.assertEqual(resp.block_header_list[1], root_block_header_list[5]) self.assertEqual(resp.block_header_list[2], root_block_header_list[8]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[2].get_hash(), skip=2, limit=4, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], root_block_header_list[2]) self.assertEqual(resp.block_header_list[1], root_block_header_list[5]) self.assertEqual(resp.block_header_list[2], root_block_header_list[8]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=6, skip=0, limit=100, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], root_block_header_list[6]) self.assertEqual(resp.block_header_list[1], root_block_header_list[7]) self.assertEqual(resp.block_header_list[2], root_block_header_list[8]) self.assertEqual(resp.block_header_list[3], root_block_header_list[9]) self.assertEqual(resp.block_header_list[4], root_block_header_list[10]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[6].get_hash(), skip=0, limit=100, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], root_block_header_list[6]) self.assertEqual(resp.block_header_list[1], root_block_header_list[7]) self.assertEqual(resp.block_header_list[2], root_block_header_list[8]) self.assertEqual(resp.block_header_list[3], root_block_header_list[9]) self.assertEqual(resp.block_header_list[4], root_block_header_list[10]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=2, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 1) self.assertEqual(resp.block_header_list[0], root_block_header_list[2]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[2].get_hash(), skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 1) self.assertEqual(resp.block_header_list[0], root_block_header_list[2]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=11, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 0) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=bytes(32), skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 0) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=8, skip=1, limit=5, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], root_block_header_list[8]) self.assertEqual(resp.block_header_list[1], root_block_header_list[6]) self.assertEqual(resp.block_header_list[2], root_block_header_list[4]) self.assertEqual(resp.block_header_list[3], root_block_header_list[2]) self.assertEqual(resp.block_header_list[4], root_block_header_list[0]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[8].get_hash(), skip=1, limit=5, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], root_block_header_list[8]) self.assertEqual(resp.block_header_list[1], root_block_header_list[6]) self.assertEqual(resp.block_header_list[2], root_block_header_list[4]) self.assertEqual(resp.block_header_list[3], root_block_header_list[2]) self.assertEqual(resp.block_header_list[4], root_block_header_list[0])
-1,659,318,232,538,599,400
Test the broadcast is only done to the neighbors
quarkchain/cluster/tests/test_cluster.py
test_get_root_block_headers_with_skip
Belgarion/pyquarkchain_cuda
python
def test_get_root_block_headers_with_skip(self): ' ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1) as clusters: master = clusters[0].master root_block_header_list = [master.root_state.tip] for i in range(10): root_block = call_async(master.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master.add_root_block(root_block)) root_block_header_list.append(root_block.header) self.assertEqual(root_block_header_list[(- 1)].height, 10) assert_true_with_timeout((lambda : (clusters[1].master.root_state.tip.height == 10))) peer = clusters[1].peer (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=1, skip=1, limit=3, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], root_block_header_list[1]) self.assertEqual(resp.block_header_list[1], root_block_header_list[3]) self.assertEqual(resp.block_header_list[2], root_block_header_list[5]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[1].get_hash(), skip=1, limit=3, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], root_block_header_list[1]) self.assertEqual(resp.block_header_list[1], root_block_header_list[3]) self.assertEqual(resp.block_header_list[2], root_block_header_list[5]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=2, skip=2, limit=4, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], root_block_header_list[2]) self.assertEqual(resp.block_header_list[1], root_block_header_list[5]) self.assertEqual(resp.block_header_list[2], root_block_header_list[8]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[2].get_hash(), skip=2, limit=4, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], root_block_header_list[2]) self.assertEqual(resp.block_header_list[1], root_block_header_list[5]) self.assertEqual(resp.block_header_list[2], root_block_header_list[8]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=6, skip=0, limit=100, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], root_block_header_list[6]) self.assertEqual(resp.block_header_list[1], root_block_header_list[7]) self.assertEqual(resp.block_header_list[2], root_block_header_list[8]) self.assertEqual(resp.block_header_list[3], root_block_header_list[9]) self.assertEqual(resp.block_header_list[4], root_block_header_list[10]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[6].get_hash(), skip=0, limit=100, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], root_block_header_list[6]) self.assertEqual(resp.block_header_list[1], root_block_header_list[7]) self.assertEqual(resp.block_header_list[2], root_block_header_list[8]) self.assertEqual(resp.block_header_list[3], root_block_header_list[9]) self.assertEqual(resp.block_header_list[4], root_block_header_list[10]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=2, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 1) self.assertEqual(resp.block_header_list[0], root_block_header_list[2]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[2].get_hash(), skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 1) self.assertEqual(resp.block_header_list[0], root_block_header_list[2]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=11, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 0) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=bytes(32), skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 0) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_height(height=8, skip=1, limit=5, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], root_block_header_list[8]) self.assertEqual(resp.block_header_list[1], root_block_header_list[6]) self.assertEqual(resp.block_header_list[2], root_block_header_list[4]) self.assertEqual(resp.block_header_list[3], root_block_header_list[2]) self.assertEqual(resp.block_header_list[4], root_block_header_list[0]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_ROOT_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetRootBlockHeaderListWithSkipRequest.create_for_hash(hash=root_block_header_list[8].get_hash(), skip=1, limit=5, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], root_block_header_list[8]) self.assertEqual(resp.block_header_list[1], root_block_header_list[6]) self.assertEqual(resp.block_header_list[2], root_block_header_list[4]) self.assertEqual(resp.block_header_list[3], root_block_header_list[2]) self.assertEqual(resp.block_header_list[4], root_block_header_list[0])
def test_get_root_block_header_sync_from_genesis(self): ' Test the broadcast is only done to the neighbors ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master = clusters[0].master root_block_header_list = [master.root_state.tip] for i in range(10): root_block = call_async(master.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master.add_root_block(root_block)) root_block_header_list.append(root_block.header) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (clusters[1].master.root_state.tip == root_block_header_list[(- 1)]))) self.assertEqual(clusters[1].master.synchronizer.stats.blocks_downloaded, (len(root_block_header_list) - 1))
6,604,767,233,400,697,000
Test the broadcast is only done to the neighbors
quarkchain/cluster/tests/test_cluster.py
test_get_root_block_header_sync_from_genesis
Belgarion/pyquarkchain_cuda
python
def test_get_root_block_header_sync_from_genesis(self): ' ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master = clusters[0].master root_block_header_list = [master.root_state.tip] for i in range(10): root_block = call_async(master.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master.add_root_block(root_block)) root_block_header_list.append(root_block.header) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (clusters[1].master.root_state.tip == root_block_header_list[(- 1)]))) self.assertEqual(clusters[1].master.synchronizer.stats.blocks_downloaded, (len(root_block_header_list) - 1))
def test_get_root_block_header_sync_from_height_3(self): ' Test the broadcast is only done to the neighbors ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master0 = clusters[0].master root_block_list = [] for i in range(10): root_block = call_async(master0.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master0.add_root_block(root_block)) root_block_list.append(root_block) master1 = clusters[1].master for i in range(3): call_async(master1.add_root_block(root_block_list[i])) assert_true_with_timeout((lambda : (master1.root_state.tip == root_block_list[2].header))) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (master1.root_state.tip == root_block_list[(- 1)].header))) self.assertEqual(master1.synchronizer.stats.blocks_downloaded, (len(root_block_list) - 3)) self.assertEqual(master1.synchronizer.stats.ancestor_lookup_requests, 1)
7,170,750,119,003,731,000
Test the broadcast is only done to the neighbors
quarkchain/cluster/tests/test_cluster.py
test_get_root_block_header_sync_from_height_3
Belgarion/pyquarkchain_cuda
python
def test_get_root_block_header_sync_from_height_3(self): ' ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master0 = clusters[0].master root_block_list = [] for i in range(10): root_block = call_async(master0.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master0.add_root_block(root_block)) root_block_list.append(root_block) master1 = clusters[1].master for i in range(3): call_async(master1.add_root_block(root_block_list[i])) assert_true_with_timeout((lambda : (master1.root_state.tip == root_block_list[2].header))) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (master1.root_state.tip == root_block_list[(- 1)].header))) self.assertEqual(master1.synchronizer.stats.blocks_downloaded, (len(root_block_list) - 3)) self.assertEqual(master1.synchronizer.stats.ancestor_lookup_requests, 1)
def test_get_root_block_header_sync_with_fork(self): ' Test the broadcast is only done to the neighbors ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master0 = clusters[0].master root_block_list = [] for i in range(10): root_block = call_async(master0.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master0.add_root_block(root_block)) root_block_list.append(root_block) master1 = clusters[1].master for i in range(2): call_async(master1.add_root_block(root_block_list[i])) for i in range(3): root_block = call_async(master1.get_next_block_to_mine(acc1, branch_value=None)) call_async(master1.add_root_block(root_block)) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (master1.root_state.tip == root_block_list[(- 1)].header))) self.assertEqual(master1.synchronizer.stats.blocks_downloaded, (len(root_block_list) - 2)) self.assertEqual(master1.synchronizer.stats.ancestor_lookup_requests, 1)
3,788,074,659,094,418,400
Test the broadcast is only done to the neighbors
quarkchain/cluster/tests/test_cluster.py
test_get_root_block_header_sync_with_fork
Belgarion/pyquarkchain_cuda
python
def test_get_root_block_header_sync_with_fork(self): ' ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master0 = clusters[0].master root_block_list = [] for i in range(10): root_block = call_async(master0.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master0.add_root_block(root_block)) root_block_list.append(root_block) master1 = clusters[1].master for i in range(2): call_async(master1.add_root_block(root_block_list[i])) for i in range(3): root_block = call_async(master1.get_next_block_to_mine(acc1, branch_value=None)) call_async(master1.add_root_block(root_block)) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (master1.root_state.tip == root_block_list[(- 1)].header))) self.assertEqual(master1.synchronizer.stats.blocks_downloaded, (len(root_block_list) - 2)) self.assertEqual(master1.synchronizer.stats.ancestor_lookup_requests, 1)
def test_get_root_block_header_sync_with_staleness(self): ' Test the broadcast is only done to the neighbors ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master0 = clusters[0].master root_block_list = [] for i in range(10): root_block = call_async(master0.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master0.add_root_block(root_block)) root_block_list.append(root_block) assert_true_with_timeout((lambda : (master0.root_state.tip == root_block_list[(- 1)].header))) master1 = clusters[1].master for i in range(8): root_block = call_async(master1.get_next_block_to_mine(acc1, branch_value=None)) call_async(master1.add_root_block(root_block)) master1.env.quark_chain_config.ROOT.MAX_STALE_ROOT_BLOCK_HEIGHT_DIFF = 5 assert_true_with_timeout((lambda : (master1.root_state.tip == root_block.header))) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (master1.synchronizer.stats.ancestor_not_found_count == 1))) self.assertEqual(master1.synchronizer.stats.blocks_downloaded, 0) self.assertEqual(master1.synchronizer.stats.ancestor_lookup_requests, 1)
-2,076,552,471,465,037,300
Test the broadcast is only done to the neighbors
quarkchain/cluster/tests/test_cluster.py
test_get_root_block_header_sync_with_staleness
Belgarion/pyquarkchain_cuda
python
def test_get_root_block_header_sync_with_staleness(self): ' ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master0 = clusters[0].master root_block_list = [] for i in range(10): root_block = call_async(master0.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master0.add_root_block(root_block)) root_block_list.append(root_block) assert_true_with_timeout((lambda : (master0.root_state.tip == root_block_list[(- 1)].header))) master1 = clusters[1].master for i in range(8): root_block = call_async(master1.get_next_block_to_mine(acc1, branch_value=None)) call_async(master1.add_root_block(root_block)) master1.env.quark_chain_config.ROOT.MAX_STALE_ROOT_BLOCK_HEIGHT_DIFF = 5 assert_true_with_timeout((lambda : (master1.root_state.tip == root_block.header))) call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (master1.synchronizer.stats.ancestor_not_found_count == 1))) self.assertEqual(master1.synchronizer.stats.blocks_downloaded, 0) self.assertEqual(master1.synchronizer.stats.ancestor_lookup_requests, 1)
def test_get_root_block_header_sync_with_multiple_lookup(self): ' Test the broadcast is only done to the neighbors ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master0 = clusters[0].master root_block_list = [] for i in range(12): root_block = call_async(master0.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master0.add_root_block(root_block)) root_block_list.append(root_block) assert_true_with_timeout((lambda : (master0.root_state.tip == root_block_list[(- 1)].header))) master1 = clusters[1].master for i in range(4): call_async(master1.add_root_block(root_block_list[i])) for i in range(4): root_block = call_async(master1.get_next_block_to_mine(acc1, branch_value=None)) call_async(master1.add_root_block(root_block)) master1.synchronizer.root_block_header_list_limit = 4 call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (master1.root_state.tip == root_block_list[(- 1)].header))) self.assertEqual(master1.synchronizer.stats.blocks_downloaded, 8) self.assertEqual(master1.synchronizer.stats.headers_downloaded, (5 + 8)) self.assertEqual(master1.synchronizer.stats.ancestor_lookup_requests, 2)
5,361,396,513,719,990,000
Test the broadcast is only done to the neighbors
quarkchain/cluster/tests/test_cluster.py
test_get_root_block_header_sync_with_multiple_lookup
Belgarion/pyquarkchain_cuda
python
def test_get_root_block_header_sync_with_multiple_lookup(self): ' ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1, connect=False) as clusters: master0 = clusters[0].master root_block_list = [] for i in range(12): root_block = call_async(master0.get_next_block_to_mine(Address.create_empty_account(), branch_value=None)) call_async(master0.add_root_block(root_block)) root_block_list.append(root_block) assert_true_with_timeout((lambda : (master0.root_state.tip == root_block_list[(- 1)].header))) master1 = clusters[1].master for i in range(4): call_async(master1.add_root_block(root_block_list[i])) for i in range(4): root_block = call_async(master1.get_next_block_to_mine(acc1, branch_value=None)) call_async(master1.add_root_block(root_block)) master1.synchronizer.root_block_header_list_limit = 4 call_async(clusters[1].network.connect('127.0.0.1', clusters[0].network.env.cluster_config.P2P_PORT)) assert_true_with_timeout((lambda : (master1.root_state.tip == root_block_list[(- 1)].header))) self.assertEqual(master1.synchronizer.stats.blocks_downloaded, 8) self.assertEqual(master1.synchronizer.stats.headers_downloaded, (5 + 8)) self.assertEqual(master1.synchronizer.stats.ancestor_lookup_requests, 2)
def test_get_minor_block_headers_with_skip(self): ' Test the broadcast is only done to the neighbors ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1) as clusters: master = clusters[0].master shard = next(iter(clusters[0].slave_list[0].shards.values())) minor_block_header_list = [shard.state.header_tip] branch = shard.state.header_tip.branch for i in range(10): b = shard.state.create_block_to_mine() call_async(master.add_raw_minor_block(b.header.branch, b.serialize())) minor_block_header_list.append(b.header) self.assertEqual(minor_block_header_list[(- 1)].height, 10) peer = next(iter(clusters[1].slave_list[0].shards[branch].peers.values())) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=1, branch=branch, skip=1, limit=3, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], minor_block_header_list[1]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[3]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[5]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[1].get_hash(), branch=branch, skip=1, limit=3, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], minor_block_header_list[1]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[3]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[5]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=2, branch=branch, skip=2, limit=4, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], minor_block_header_list[2]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[5]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[8]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[2].get_hash(), branch=branch, skip=2, limit=4, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], minor_block_header_list[2]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[5]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[8]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=6, branch=branch, skip=0, limit=100, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], minor_block_header_list[6]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[7]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[8]) self.assertEqual(resp.block_header_list[3], minor_block_header_list[9]) self.assertEqual(resp.block_header_list[4], minor_block_header_list[10]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[6].get_hash(), branch=branch, skip=0, limit=100, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], minor_block_header_list[6]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[7]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[8]) self.assertEqual(resp.block_header_list[3], minor_block_header_list[9]) self.assertEqual(resp.block_header_list[4], minor_block_header_list[10]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=2, branch=branch, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 1) self.assertEqual(resp.block_header_list[0], minor_block_header_list[2]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[2].get_hash(), branch=branch, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 1) self.assertEqual(resp.block_header_list[0], minor_block_header_list[2]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=11, branch=branch, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 0) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=bytes(32), branch=branch, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 0) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=8, branch=branch, skip=1, limit=5, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], minor_block_header_list[8]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[6]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[4]) self.assertEqual(resp.block_header_list[3], minor_block_header_list[2]) self.assertEqual(resp.block_header_list[4], minor_block_header_list[0]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[8].get_hash(), branch=branch, skip=1, limit=5, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], minor_block_header_list[8]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[6]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[4]) self.assertEqual(resp.block_header_list[3], minor_block_header_list[2]) self.assertEqual(resp.block_header_list[4], minor_block_header_list[0])
-2,125,516,172,805,657,300
Test the broadcast is only done to the neighbors
quarkchain/cluster/tests/test_cluster.py
test_get_minor_block_headers_with_skip
Belgarion/pyquarkchain_cuda
python
def test_get_minor_block_headers_with_skip(self): ' ' id1 = Identity.create_random_identity() acc1 = Address.create_from_identity(id1, full_shard_key=0) with ClusterContext(2, acc1) as clusters: master = clusters[0].master shard = next(iter(clusters[0].slave_list[0].shards.values())) minor_block_header_list = [shard.state.header_tip] branch = shard.state.header_tip.branch for i in range(10): b = shard.state.create_block_to_mine() call_async(master.add_raw_minor_block(b.header.branch, b.serialize())) minor_block_header_list.append(b.header) self.assertEqual(minor_block_header_list[(- 1)].height, 10) peer = next(iter(clusters[1].slave_list[0].shards[branch].peers.values())) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=1, branch=branch, skip=1, limit=3, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], minor_block_header_list[1]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[3]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[5]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[1].get_hash(), branch=branch, skip=1, limit=3, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], minor_block_header_list[1]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[3]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[5]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=2, branch=branch, skip=2, limit=4, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], minor_block_header_list[2]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[5]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[8]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[2].get_hash(), branch=branch, skip=2, limit=4, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 3) self.assertEqual(resp.block_header_list[0], minor_block_header_list[2]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[5]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[8]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=6, branch=branch, skip=0, limit=100, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], minor_block_header_list[6]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[7]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[8]) self.assertEqual(resp.block_header_list[3], minor_block_header_list[9]) self.assertEqual(resp.block_header_list[4], minor_block_header_list[10]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[6].get_hash(), branch=branch, skip=0, limit=100, direction=Direction.TIP))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], minor_block_header_list[6]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[7]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[8]) self.assertEqual(resp.block_header_list[3], minor_block_header_list[9]) self.assertEqual(resp.block_header_list[4], minor_block_header_list[10]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=2, branch=branch, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 1) self.assertEqual(resp.block_header_list[0], minor_block_header_list[2]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[2].get_hash(), branch=branch, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 1) self.assertEqual(resp.block_header_list[0], minor_block_header_list[2]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=11, branch=branch, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 0) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=bytes(32), branch=branch, skip=2, limit=4, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 0) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_height(height=8, branch=branch, skip=1, limit=5, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], minor_block_header_list[8]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[6]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[4]) self.assertEqual(resp.block_header_list[3], minor_block_header_list[2]) self.assertEqual(resp.block_header_list[4], minor_block_header_list[0]) (op, resp, rpc_id) = call_async(peer.write_rpc_request(op=CommandOp.GET_MINOR_BLOCK_HEADER_LIST_WITH_SKIP_REQUEST, cmd=GetMinorBlockHeaderListWithSkipRequest.create_for_hash(hash=minor_block_header_list[8].get_hash(), branch=branch, skip=1, limit=5, direction=Direction.GENESIS))) self.assertEqual(len(resp.block_header_list), 5) self.assertEqual(resp.block_header_list[0], minor_block_header_list[8]) self.assertEqual(resp.block_header_list[1], minor_block_header_list[6]) self.assertEqual(resp.block_header_list[2], minor_block_header_list[4]) self.assertEqual(resp.block_header_list[3], minor_block_header_list[2]) self.assertEqual(resp.block_header_list[4], minor_block_header_list[0])
def test_list_inputs(self): 'Test input a list\n ' with tf.Graph().as_default(): (model, inputs, target) = self.build_simple_model() model.fit([inpData for (_, _, inpData) in inputs], target, batch_size=1)
-7,471,307,216,688,532,000
Test input a list
zqtflearn2/tests/test_inputs.py
test_list_inputs
ZhengDeQuan/AAA
python
def test_list_inputs(self): '\n ' with tf.Graph().as_default(): (model, inputs, target) = self.build_simple_model() model.fit([inpData for (_, _, inpData) in inputs], target, batch_size=1)
def test_dict_inputs(self): 'Test input a dict with layer name\n ' with tf.Graph().as_default(): (model, inputs, target) = self.build_simple_model() model.fit({name: inpData for (name, _, inpData) in inputs}, target, batch_size=1)
4,393,760,652,735,001,600
Test input a dict with layer name
zqtflearn2/tests/test_inputs.py
test_dict_inputs
ZhengDeQuan/AAA
python
def test_dict_inputs(self): '\n ' with tf.Graph().as_default(): (model, inputs, target) = self.build_simple_model() model.fit({name: inpData for (name, _, inpData) in inputs}, target, batch_size=1)
def test_dict_withtensor_inputs(self): 'Test input a dict with placeholder\n ' with tf.Graph().as_default(): (model, inputs, target) = self.build_simple_model() model.fit({placeholder: inpData for (_, placeholder, inpData) in inputs}, target, batch_size=1)
1,681,431,496,209,710,000
Test input a dict with placeholder
zqtflearn2/tests/test_inputs.py
test_dict_withtensor_inputs
ZhengDeQuan/AAA
python
def test_dict_withtensor_inputs(self): '\n ' with tf.Graph().as_default(): (model, inputs, target) = self.build_simple_model() model.fit({placeholder: inpData for (_, placeholder, inpData) in inputs}, target, batch_size=1)
def build_simple_model(self): 'Build a simple model for test\n Returns:\n DNN, [ (input layer name, input placeholder, input data) ], Target data\n ' (inputPlaceholder1, inputPlaceholder2) = (tf.placeholder(tf.float32, (1, 1), name='input1'), tf.placeholder(tf.float32, (1, 1), name='input2')) input1 = zqtflearn.input_data(placeholder=inputPlaceholder1) input2 = zqtflearn.input_data(placeholder=inputPlaceholder2) network = zqtflearn.merge([input1, input2], 'sum') network = zqtflearn.reshape(network, (1, 1)) network = zqtflearn.fully_connected(network, 1) network = zqtflearn.regression(network) return (zqtflearn.DNN(network), [('input1:0', inputPlaceholder1, self.INPUT_DATA_1), ('input2:0', inputPlaceholder2, self.INPUT_DATA_2)], self.TARGET)
3,643,260,391,374,534,000
Build a simple model for test Returns: DNN, [ (input layer name, input placeholder, input data) ], Target data
zqtflearn2/tests/test_inputs.py
build_simple_model
ZhengDeQuan/AAA
python
def build_simple_model(self): 'Build a simple model for test\n Returns:\n DNN, [ (input layer name, input placeholder, input data) ], Target data\n ' (inputPlaceholder1, inputPlaceholder2) = (tf.placeholder(tf.float32, (1, 1), name='input1'), tf.placeholder(tf.float32, (1, 1), name='input2')) input1 = zqtflearn.input_data(placeholder=inputPlaceholder1) input2 = zqtflearn.input_data(placeholder=inputPlaceholder2) network = zqtflearn.merge([input1, input2], 'sum') network = zqtflearn.reshape(network, (1, 1)) network = zqtflearn.fully_connected(network, 1) network = zqtflearn.regression(network) return (zqtflearn.DNN(network), [('input1:0', inputPlaceholder1, self.INPUT_DATA_1), ('input2:0', inputPlaceholder2, self.INPUT_DATA_2)], self.TARGET)
def run_inference(df, df_old, df_test, CFG, run=None): '\n Run inference loop\n ' assert isinstance(CFG, dict), 'CFG has to be a dict with parameters' assert isinstance(df, pd.DataFrame), 'df has to be a pandas dataframe' oof = None sub = None for fold in range(CFG['num_folds']): accelerator = Accelerator(device_placement=True, fp16=CFG['use_fp16'], split_batches=False) if (CFG['device'] == 'GPU'): accelerator.state.device = torch.device('cuda:{}'.format(CFG['device_index'])) accelerator.print(('-' * 55)) accelerator.print('FOLD {:d}/{:d}'.format((fold + 1), CFG['num_folds'])) accelerator.print(('-' * 55)) (df_trn, df_val) = get_data(df=df, df_old=df_old, fold=fold, CFG=CFG, accelerator=accelerator, silent=True) (_, val_loader) = get_loaders(df_train=df_trn, df_valid=df_val, CFG=CFG, accelerator=accelerator, labeled=False, silent=True) (_, test_loader) = get_loaders(df_train=df_trn, df_valid=df_test, CFG=CFG, accelerator=accelerator, labeled=False, silent=True) model = get_model(CFG=CFG, pretrained=(CFG['out_path'] + 'weights_fold{}.pth'.format(int(fold)))) (model, val_loader, test_loader) = accelerator.prepare(model, val_loader, test_loader) if CFG['predict_oof']: val_preds = test_epoch(loader=val_loader, model=model, CFG=CFG, accelerator=accelerator, num_tta=CFG['num_tta']) val_preds_df = pd.DataFrame(val_preds, columns=['pred']) val_preds_df = pd.concat([df_val, val_preds_df], axis=1) oof = pd.concat([oof, val_preds_df], axis=0).reset_index(drop=True) if CFG['predict_test']: test_preds = test_epoch(loader=test_loader, model=model, CFG=CFG, accelerator=accelerator, num_tta=CFG['num_tta']) test_preds_df = pd.DataFrame(test_preds, columns=['pred_fold{}'.format(int(fold))]) sub = pd.concat([sub, test_preds_df], axis=1) del model, val_loader, test_loader del accelerator gc.collect() if CFG['predict_oof']: oof.to_csv((CFG['out_path'] + 'oof.csv'), index=False) if CFG['tracking']: run['oof'].upload((CFG['out_path'] + 'oof.csv')) if CFG['predict_test']: sub = pd.concat([df_test['Id'], sub], axis=1) sub.to_csv((CFG['out_path'] + 'submission.csv'), index=False) if CFG['tracking']: run['submission'].upload((CFG['out_path'] + 'submission.csv'))
-4,392,115,608,977,143,300
Run inference loop
code/run_inference.py
run_inference
kozodoi/Pet_Pawpularity
python
def run_inference(df, df_old, df_test, CFG, run=None): '\n \n ' assert isinstance(CFG, dict), 'CFG has to be a dict with parameters' assert isinstance(df, pd.DataFrame), 'df has to be a pandas dataframe' oof = None sub = None for fold in range(CFG['num_folds']): accelerator = Accelerator(device_placement=True, fp16=CFG['use_fp16'], split_batches=False) if (CFG['device'] == 'GPU'): accelerator.state.device = torch.device('cuda:{}'.format(CFG['device_index'])) accelerator.print(('-' * 55)) accelerator.print('FOLD {:d}/{:d}'.format((fold + 1), CFG['num_folds'])) accelerator.print(('-' * 55)) (df_trn, df_val) = get_data(df=df, df_old=df_old, fold=fold, CFG=CFG, accelerator=accelerator, silent=True) (_, val_loader) = get_loaders(df_train=df_trn, df_valid=df_val, CFG=CFG, accelerator=accelerator, labeled=False, silent=True) (_, test_loader) = get_loaders(df_train=df_trn, df_valid=df_test, CFG=CFG, accelerator=accelerator, labeled=False, silent=True) model = get_model(CFG=CFG, pretrained=(CFG['out_path'] + 'weights_fold{}.pth'.format(int(fold)))) (model, val_loader, test_loader) = accelerator.prepare(model, val_loader, test_loader) if CFG['predict_oof']: val_preds = test_epoch(loader=val_loader, model=model, CFG=CFG, accelerator=accelerator, num_tta=CFG['num_tta']) val_preds_df = pd.DataFrame(val_preds, columns=['pred']) val_preds_df = pd.concat([df_val, val_preds_df], axis=1) oof = pd.concat([oof, val_preds_df], axis=0).reset_index(drop=True) if CFG['predict_test']: test_preds = test_epoch(loader=test_loader, model=model, CFG=CFG, accelerator=accelerator, num_tta=CFG['num_tta']) test_preds_df = pd.DataFrame(test_preds, columns=['pred_fold{}'.format(int(fold))]) sub = pd.concat([sub, test_preds_df], axis=1) del model, val_loader, test_loader del accelerator gc.collect() if CFG['predict_oof']: oof.to_csv((CFG['out_path'] + 'oof.csv'), index=False) if CFG['tracking']: run['oof'].upload((CFG['out_path'] + 'oof.csv')) if CFG['predict_test']: sub = pd.concat([df_test['Id'], sub], axis=1) sub.to_csv((CFG['out_path'] + 'submission.csv'), index=False) if CFG['tracking']: run['submission'].upload((CFG['out_path'] + 'submission.csv'))
@csrf_exempt def add_vit(request): '\n Add a new vit with API, currently image and video are not supported\n ' user = KeyBackend().authenticate(request) if (request.method == 'POST'): if request.user.is_authenticated: form = VitForm(request.POST) if form.is_valid(): vit = form.save(commit=False) vit.user = request.user vit.save() return JsonResponse({'status': 'success', 'vit': vit.to_json()}, status=201) else: return JsonResponse({'error': 'No vit body provided'}, status=400) else: return JsonResponse({'error': 'You must be logged in'}, status=401) else: return JsonResponse({'error': 'Invalid request'}, status=400)
-4,221,172,300,516,865,500
Add a new vit with API, currently image and video are not supported
apps/vit/api.py
add_vit
Visualway/Vitary
python
@csrf_exempt def add_vit(request): '\n \n ' user = KeyBackend().authenticate(request) if (request.method == 'POST'): if request.user.is_authenticated: form = VitForm(request.POST) if form.is_valid(): vit = form.save(commit=False) vit.user = request.user vit.save() return JsonResponse({'status': 'success', 'vit': vit.to_json()}, status=201) else: return JsonResponse({'error': 'No vit body provided'}, status=400) else: return JsonResponse({'error': 'You must be logged in'}, status=401) else: return JsonResponse({'error': 'Invalid request'}, status=400)
@csrf_exempt def edit_vit(request): '\n Edit a vit with API\n ' user = KeyBackend().authenticate(request) if (request.method == 'POST'): if request.user.is_authenticated: try: vit = Vit.objects.get(id=request.POST.get('vit_pk')) if (vit.user == request.user): form = VitForm(request.POST, instance=vit) if form.is_valid(): vit = form.save(commit=False) vit.save() return JsonResponse({'status': 'success', 'vit': vit.to_json()}, status=201) else: return JsonResponse({'error': 'No vit body provided'}, status=400) else: return JsonResponse({'error': 'You do not have permission to edit this vit'}, status=403) except Vit.DoesNotExist: return JsonResponse({'error': 'Vit not found'}, status=404) else: return JsonResponse({'error': 'You must be logged in'}, status=401) else: return JsonResponse({'error': 'Invalid request'}, status=400)
-5,257,368,736,001,182,000
Edit a vit with API
apps/vit/api.py
edit_vit
Visualway/Vitary
python
@csrf_exempt def edit_vit(request): '\n \n ' user = KeyBackend().authenticate(request) if (request.method == 'POST'): if request.user.is_authenticated: try: vit = Vit.objects.get(id=request.POST.get('vit_pk')) if (vit.user == request.user): form = VitForm(request.POST, instance=vit) if form.is_valid(): vit = form.save(commit=False) vit.save() return JsonResponse({'status': 'success', 'vit': vit.to_json()}, status=201) else: return JsonResponse({'error': 'No vit body provided'}, status=400) else: return JsonResponse({'error': 'You do not have permission to edit this vit'}, status=403) except Vit.DoesNotExist: return JsonResponse({'error': 'Vit not found'}, status=404) else: return JsonResponse({'error': 'You must be logged in'}, status=401) else: return JsonResponse({'error': 'Invalid request'}, status=400)
@csrf_exempt def delete_vit(request): '\n Delete a vit with API\n ' user = KeyBackend().authenticate(request) if (request.method == 'POST'): if request.user.is_authenticated: try: vit = Vit.objects.get(id=request.POST.get('vit_pk')) if (vit.user == request.user): vit.delete() return JsonResponse({'status': 'success'}, status=200) else: return JsonResponse({'error': 'You do not have permission to delete this vit'}, status=403) except Vit.DoesNotExist: return JsonResponse({'error': 'Vit not found'}, status=404) else: return JsonResponse({'error': 'You must be logged in'}, status=401) else: return JsonResponse({'error': 'Invalid request'}, status=400)
-5,072,101,132,261,834,000
Delete a vit with API
apps/vit/api.py
delete_vit
Visualway/Vitary
python
@csrf_exempt def delete_vit(request): '\n \n ' user = KeyBackend().authenticate(request) if (request.method == 'POST'): if request.user.is_authenticated: try: vit = Vit.objects.get(id=request.POST.get('vit_pk')) if (vit.user == request.user): vit.delete() return JsonResponse({'status': 'success'}, status=200) else: return JsonResponse({'error': 'You do not have permission to delete this vit'}, status=403) except Vit.DoesNotExist: return JsonResponse({'error': 'Vit not found'}, status=404) else: return JsonResponse({'error': 'You must be logged in'}, status=401) else: return JsonResponse({'error': 'Invalid request'}, status=400)
def get_bytes(s): 'Returns the byte representation of a hex- or byte-string.' if isinstance(s, bytes): b = s elif isinstance(s, str): b = bytes.fromhex(s) else: raise TypeError("s must be either 'bytes' or 'str'!") return b
6,362,010,049,312,019,000
Returns the byte representation of a hex- or byte-string.
pywallet/utils/ethereum.py
get_bytes
ukor/pywallet
python
def get_bytes(s): if isinstance(s, bytes): b = s elif isinstance(s, str): b = bytes.fromhex(s) else: raise TypeError("s must be either 'bytes' or 'str'!") return b
@staticmethod def from_b58check(private_key): ' Decodes a Base58Check encoded private-key.\n\n Args:\n private_key (str): A Base58Check encoded private key.\n\n Returns:\n PrivateKey: A PrivateKey object\n ' raise NotImplementedError
-2,194,948,722,815,287,300
Decodes a Base58Check encoded private-key. Args: private_key (str): A Base58Check encoded private key. Returns: PrivateKey: A PrivateKey object
pywallet/utils/ethereum.py
from_b58check
ukor/pywallet
python
@staticmethod def from_b58check(private_key): ' Decodes a Base58Check encoded private-key.\n\n Args:\n private_key (str): A Base58Check encoded private key.\n\n Returns:\n PrivateKey: A PrivateKey object\n ' raise NotImplementedError
@property def public_key(self): ' Returns the public key associated with this private key.\n\n Returns:\n PublicKey:\n The PublicKey object that corresponds to this\n private key.\n ' return self._public_key
-3,903,173,452,133,121,000
Returns the public key associated with this private key. Returns: PublicKey: The PublicKey object that corresponds to this private key.
pywallet/utils/ethereum.py
public_key
ukor/pywallet
python
@property def public_key(self): ' Returns the public key associated with this private key.\n\n Returns:\n PublicKey:\n The PublicKey object that corresponds to this\n private key.\n ' return self._public_key
def raw_sign(self, message, do_hash=True): " Signs message using this private key.\n\n Args:\n message (bytes): The message to be signed. If a string is\n provided it is assumed the encoding is 'ascii' and\n converted to bytes. If this is not the case, it is up\n to the caller to convert the string to bytes\n appropriately and pass in the bytes.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n ECPointAffine:\n a raw point (r = pt.x, s = pt.y) which is\n the signature.\n " raise NotImplementedError
-3,442,752,404,058,702,000
Signs message using this private key. Args: message (bytes): The message to be signed. If a string is provided it is assumed the encoding is 'ascii' and converted to bytes. If this is not the case, it is up to the caller to convert the string to bytes appropriately and pass in the bytes. do_hash (bool): True if the message should be hashed prior to signing, False if not. This should always be left as True except in special situations which require doing the hash outside (e.g. handling Bitcoin bugs). Returns: ECPointAffine: a raw point (r = pt.x, s = pt.y) which is the signature.
pywallet/utils/ethereum.py
raw_sign
ukor/pywallet
python
def raw_sign(self, message, do_hash=True): " Signs message using this private key.\n\n Args:\n message (bytes): The message to be signed. If a string is\n provided it is assumed the encoding is 'ascii' and\n converted to bytes. If this is not the case, it is up\n to the caller to convert the string to bytes\n appropriately and pass in the bytes.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n ECPointAffine:\n a raw point (r = pt.x, s = pt.y) which is\n the signature.\n " raise NotImplementedError
def sign(self, message, do_hash=True): " Signs message using this private key.\n\n Note:\n This differs from `raw_sign()` since it returns a\n Signature object.\n\n Args:\n message (bytes or str): The message to be signed. If a\n string is provided it is assumed the encoding is\n 'ascii' and converted to bytes. If this is not the\n case, it is up to the caller to convert the string to\n bytes appropriately and pass in the bytes.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n Signature: The signature corresponding to message.\n " raise NotImplementedError
8,591,961,857,602,308,000
Signs message using this private key. Note: This differs from `raw_sign()` since it returns a Signature object. Args: message (bytes or str): The message to be signed. If a string is provided it is assumed the encoding is 'ascii' and converted to bytes. If this is not the case, it is up to the caller to convert the string to bytes appropriately and pass in the bytes. do_hash (bool): True if the message should be hashed prior to signing, False if not. This should always be left as True except in special situations which require doing the hash outside (e.g. handling Bitcoin bugs). Returns: Signature: The signature corresponding to message.
pywallet/utils/ethereum.py
sign
ukor/pywallet
python
def sign(self, message, do_hash=True): " Signs message using this private key.\n\n Note:\n This differs from `raw_sign()` since it returns a\n Signature object.\n\n Args:\n message (bytes or str): The message to be signed. If a\n string is provided it is assumed the encoding is\n 'ascii' and converted to bytes. If this is not the\n case, it is up to the caller to convert the string to\n bytes appropriately and pass in the bytes.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n Signature: The signature corresponding to message.\n " raise NotImplementedError
def sign_bitcoin(self, message, compressed=False): ' Signs a message using this private key such that it\n is compatible with bitcoind, bx, and other Bitcoin\n clients/nodes/utilities.\n\n Note:\n 0x18 + b"Bitcoin Signed Message:" + newline + len(message) is\n prepended to the message before signing.\n\n Args:\n message (bytes or str): Message to be signed.\n compressed (bool): True if the corresponding public key will be\n used in compressed format. False if the uncompressed version\n is used.\n\n Returns:\n bytes: A Base64-encoded byte string of the signed message.\n The first byte of the encoded message contains information\n about how to recover the public key. In bitcoind parlance,\n this is the magic number containing the recovery ID and\n whether or not the key was compressed or not. (This function\n always processes full, uncompressed public-keys, so the magic\n number will always be either 27 or 28).\n ' raise NotImplementedError
-577,525,095,042,194,300
Signs a message using this private key such that it is compatible with bitcoind, bx, and other Bitcoin clients/nodes/utilities. Note: 0x18 + b"Bitcoin Signed Message:" + newline + len(message) is prepended to the message before signing. Args: message (bytes or str): Message to be signed. compressed (bool): True if the corresponding public key will be used in compressed format. False if the uncompressed version is used. Returns: bytes: A Base64-encoded byte string of the signed message. The first byte of the encoded message contains information about how to recover the public key. In bitcoind parlance, this is the magic number containing the recovery ID and whether or not the key was compressed or not. (This function always processes full, uncompressed public-keys, so the magic number will always be either 27 or 28).
pywallet/utils/ethereum.py
sign_bitcoin
ukor/pywallet
python
def sign_bitcoin(self, message, compressed=False): ' Signs a message using this private key such that it\n is compatible with bitcoind, bx, and other Bitcoin\n clients/nodes/utilities.\n\n Note:\n 0x18 + b"Bitcoin Signed Message:" + newline + len(message) is\n prepended to the message before signing.\n\n Args:\n message (bytes or str): Message to be signed.\n compressed (bool): True if the corresponding public key will be\n used in compressed format. False if the uncompressed version\n is used.\n\n Returns:\n bytes: A Base64-encoded byte string of the signed message.\n The first byte of the encoded message contains information\n about how to recover the public key. In bitcoind parlance,\n this is the magic number containing the recovery ID and\n whether or not the key was compressed or not. (This function\n always processes full, uncompressed public-keys, so the magic\n number will always be either 27 or 28).\n ' raise NotImplementedError
def to_b58check(self, testnet=False): ' Generates a Base58Check encoding of this private key.\n\n Returns:\n str: A Base58Check encoded string representing the key.\n ' raise NotImplementedError
-4,622,134,561,817,966,000
Generates a Base58Check encoding of this private key. Returns: str: A Base58Check encoded string representing the key.
pywallet/utils/ethereum.py
to_b58check
ukor/pywallet
python
def to_b58check(self, testnet=False): ' Generates a Base58Check encoding of this private key.\n\n Returns:\n str: A Base58Check encoded string representing the key.\n ' raise NotImplementedError
def to_hex(self): ' Generates a hex encoding of the serialized key.\n\n Returns:\n str: A hex encoded string representing the key.\n ' return codecs.encode(bytes(self), 'hex_codec').decode('ascii')
1,117,107,793,647,114,500
Generates a hex encoding of the serialized key. Returns: str: A hex encoded string representing the key.
pywallet/utils/ethereum.py
to_hex
ukor/pywallet
python
def to_hex(self): ' Generates a hex encoding of the serialized key.\n\n Returns:\n str: A hex encoded string representing the key.\n ' return codecs.encode(bytes(self), 'hex_codec').decode('ascii')
@staticmethod def from_bytes(key_bytes): ' Generates a public key object from a byte (or hex) string.\n\n Args:\n key_bytes (bytes or str): A byte stream.\n\n Returns:\n PublicKey: A PublicKey object.\n ' raise NotImplementedError
-7,482,710,778,146,468,000
Generates a public key object from a byte (or hex) string. Args: key_bytes (bytes or str): A byte stream. Returns: PublicKey: A PublicKey object.
pywallet/utils/ethereum.py
from_bytes
ukor/pywallet
python
@staticmethod def from_bytes(key_bytes): ' Generates a public key object from a byte (or hex) string.\n\n Args:\n key_bytes (bytes or str): A byte stream.\n\n Returns:\n PublicKey: A PublicKey object.\n ' raise NotImplementedError
@staticmethod def from_private_key(private_key): ' Generates a public key object from a PrivateKey object.\n\n Args:\n private_key (PrivateKey): The private key object from\n which to derive this object.\n\n Returns:\n PublicKey: A PublicKey object.\n ' return private_key.public_key
-7,522,706,405,364,125,000
Generates a public key object from a PrivateKey object. Args: private_key (PrivateKey): The private key object from which to derive this object. Returns: PublicKey: A PublicKey object.
pywallet/utils/ethereum.py
from_private_key
ukor/pywallet
python
@staticmethod def from_private_key(private_key): ' Generates a public key object from a PrivateKey object.\n\n Args:\n private_key (PrivateKey): The private key object from\n which to derive this object.\n\n Returns:\n PublicKey: A PublicKey object.\n ' return private_key.public_key
def hash160(self, compressed=True): ' Return the RIPEMD-160 hash of the SHA-256 hash of the\n public key.\n\n Args:\n compressed (bool): Whether or not the compressed key should\n be used.\n Returns:\n bytes: RIPEMD-160 byte string.\n ' raise NotImplementedError
8,495,305,454,611,718,000
Return the RIPEMD-160 hash of the SHA-256 hash of the public key. Args: compressed (bool): Whether or not the compressed key should be used. Returns: bytes: RIPEMD-160 byte string.
pywallet/utils/ethereum.py
hash160
ukor/pywallet
python
def hash160(self, compressed=True): ' Return the RIPEMD-160 hash of the SHA-256 hash of the\n public key.\n\n Args:\n compressed (bool): Whether or not the compressed key should\n be used.\n Returns:\n bytes: RIPEMD-160 byte string.\n ' raise NotImplementedError
def address(self, compressed=True, testnet=False): ' Address property that returns the Base58Check\n encoded version of the HASH160.\n\n Args:\n compressed (bool): Whether or not the compressed key should\n be used.\n testnet (bool): Whether or not the key is intended for testnet\n usage. False indicates mainnet usage.\n\n Returns:\n bytes: Base58Check encoded string\n ' raise NotImplementedError
1,505,693,198,810,818,800
Address property that returns the Base58Check encoded version of the HASH160. Args: compressed (bool): Whether or not the compressed key should be used. testnet (bool): Whether or not the key is intended for testnet usage. False indicates mainnet usage. Returns: bytes: Base58Check encoded string
pywallet/utils/ethereum.py
address
ukor/pywallet
python
def address(self, compressed=True, testnet=False): ' Address property that returns the Base58Check\n encoded version of the HASH160.\n\n Args:\n compressed (bool): Whether or not the compressed key should\n be used.\n testnet (bool): Whether or not the key is intended for testnet\n usage. False indicates mainnet usage.\n\n Returns:\n bytes: Base58Check encoded string\n ' raise NotImplementedError
def verify(self, message, signature, do_hash=True): ' Verifies that message was appropriately signed.\n\n Args:\n message (bytes): The message to be verified.\n signature (Signature): A signature object.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n verified (bool): True if the signature is verified, False\n otherwise.\n ' raise NotImplementedError
299,335,390,298,276,500
Verifies that message was appropriately signed. Args: message (bytes): The message to be verified. signature (Signature): A signature object. do_hash (bool): True if the message should be hashed prior to signing, False if not. This should always be left as True except in special situations which require doing the hash outside (e.g. handling Bitcoin bugs). Returns: verified (bool): True if the signature is verified, False otherwise.
pywallet/utils/ethereum.py
verify
ukor/pywallet
python
def verify(self, message, signature, do_hash=True): ' Verifies that message was appropriately signed.\n\n Args:\n message (bytes): The message to be verified.\n signature (Signature): A signature object.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n verified (bool): True if the signature is verified, False\n otherwise.\n ' raise NotImplementedError
def to_hex(self): ' Hex representation of the serialized byte stream.\n\n Returns:\n h (str): A hex-encoded string.\n ' return codecs.encode(bytes(self), 'hex_codec').decode('ascii')
-3,788,021,947,701,061,000
Hex representation of the serialized byte stream. Returns: h (str): A hex-encoded string.
pywallet/utils/ethereum.py
to_hex
ukor/pywallet
python
def to_hex(self): ' Hex representation of the serialized byte stream.\n\n Returns:\n h (str): A hex-encoded string.\n ' return codecs.encode(bytes(self), 'hex_codec').decode('ascii')
@property def compressed_bytes(self): ' Byte string corresponding to a compressed representation\n of this public key.\n\n Returns:\n b (bytes): A 33-byte long byte string.\n ' raise NotImplementedError
-5,173,596,266,008,949,000
Byte string corresponding to a compressed representation of this public key. Returns: b (bytes): A 33-byte long byte string.
pywallet/utils/ethereum.py
compressed_bytes
ukor/pywallet
python
@property def compressed_bytes(self): ' Byte string corresponding to a compressed representation\n of this public key.\n\n Returns:\n b (bytes): A 33-byte long byte string.\n ' raise NotImplementedError
@staticmethod def from_bytes(b): ' Generates PrivateKey from the underlying bytes.\n\n Args:\n b (bytes): A byte stream containing a 256-bit (32-byte) integer.\n\n Returns:\n tuple(PrivateKey, bytes): A PrivateKey object and the remainder\n of the bytes.\n ' if (len(b) < 32): raise ValueError('b must contain at least 32 bytes') return PrivateKey(int.from_bytes(b[:32], 'big'))
4,301,424,855,126,597,600
Generates PrivateKey from the underlying bytes. Args: b (bytes): A byte stream containing a 256-bit (32-byte) integer. Returns: tuple(PrivateKey, bytes): A PrivateKey object and the remainder of the bytes.
pywallet/utils/ethereum.py
from_bytes
ukor/pywallet
python
@staticmethod def from_bytes(b): ' Generates PrivateKey from the underlying bytes.\n\n Args:\n b (bytes): A byte stream containing a 256-bit (32-byte) integer.\n\n Returns:\n tuple(PrivateKey, bytes): A PrivateKey object and the remainder\n of the bytes.\n ' if (len(b) < 32): raise ValueError('b must contain at least 32 bytes') return PrivateKey(int.from_bytes(b[:32], 'big'))
@staticmethod def from_hex(h): ' Generates PrivateKey from a hex-encoded string.\n\n Args:\n h (str): A hex-encoded string containing a 256-bit\n (32-byte) integer.\n\n Returns:\n PrivateKey: A PrivateKey object.\n ' return PrivateKey.from_bytes(bytes.fromhex(h))
-2,570,931,154,343,340,000
Generates PrivateKey from a hex-encoded string. Args: h (str): A hex-encoded string containing a 256-bit (32-byte) integer. Returns: PrivateKey: A PrivateKey object.
pywallet/utils/ethereum.py
from_hex
ukor/pywallet
python
@staticmethod def from_hex(h): ' Generates PrivateKey from a hex-encoded string.\n\n Args:\n h (str): A hex-encoded string containing a 256-bit\n (32-byte) integer.\n\n Returns:\n PrivateKey: A PrivateKey object.\n ' return PrivateKey.from_bytes(bytes.fromhex(h))
@staticmethod def from_int(i): ' Initializes a private key from an integer.\n\n Args:\n i (int): Integer that is the private key.\n\n Returns:\n PrivateKey: The object representing the private key.\n ' return PrivateKey(i)
-5,908,994,610,876,351,000
Initializes a private key from an integer. Args: i (int): Integer that is the private key. Returns: PrivateKey: The object representing the private key.
pywallet/utils/ethereum.py
from_int
ukor/pywallet
python
@staticmethod def from_int(i): ' Initializes a private key from an integer.\n\n Args:\n i (int): Integer that is the private key.\n\n Returns:\n PrivateKey: The object representing the private key.\n ' return PrivateKey(i)
@staticmethod def from_b58check(private_key): ' Decodes a Base58Check encoded private-key.\n\n Args:\n private_key (str): A Base58Check encoded private key.\n\n Returns:\n PrivateKey: A PrivateKey object\n ' b58dec = base58.b58decode_check(private_key) version = b58dec[0] assert (version in [PrivateKey.TESTNET_VERSION, PrivateKey.MAINNET_VERSION]) return PrivateKey(int.from_bytes(b58dec[1:], 'big'))
4,843,793,838,649,410,000
Decodes a Base58Check encoded private-key. Args: private_key (str): A Base58Check encoded private key. Returns: PrivateKey: A PrivateKey object
pywallet/utils/ethereum.py
from_b58check
ukor/pywallet
python
@staticmethod def from_b58check(private_key): ' Decodes a Base58Check encoded private-key.\n\n Args:\n private_key (str): A Base58Check encoded private key.\n\n Returns:\n PrivateKey: A PrivateKey object\n ' b58dec = base58.b58decode_check(private_key) version = b58dec[0] assert (version in [PrivateKey.TESTNET_VERSION, PrivateKey.MAINNET_VERSION]) return PrivateKey(int.from_bytes(b58dec[1:], 'big'))
@staticmethod def from_random(): ' Initializes a private key from a random integer.\n\n Returns:\n PrivateKey: The object representing the private key.\n ' return PrivateKey(random.SystemRandom().randrange(1, bitcoin_curve.n))
-7,143,004,497,988,753,000
Initializes a private key from a random integer. Returns: PrivateKey: The object representing the private key.
pywallet/utils/ethereum.py
from_random
ukor/pywallet
python
@staticmethod def from_random(): ' Initializes a private key from a random integer.\n\n Returns:\n PrivateKey: The object representing the private key.\n ' return PrivateKey(random.SystemRandom().randrange(1, bitcoin_curve.n))
@property def public_key(self): ' Returns the public key associated with this private key.\n\n Returns:\n PublicKey:\n The PublicKey object that corresponds to this\n private key.\n ' if (self._public_key is None): self._public_key = PublicKey.from_point(bitcoin_curve.public_key(self.key)) return self._public_key
252,891,754,864,734,080
Returns the public key associated with this private key. Returns: PublicKey: The PublicKey object that corresponds to this private key.
pywallet/utils/ethereum.py
public_key
ukor/pywallet
python
@property def public_key(self): ' Returns the public key associated with this private key.\n\n Returns:\n PublicKey:\n The PublicKey object that corresponds to this\n private key.\n ' if (self._public_key is None): self._public_key = PublicKey.from_point(bitcoin_curve.public_key(self.key)) return self._public_key
def raw_sign(self, message, do_hash=True): " Signs message using this private key.\n\n Args:\n message (bytes): The message to be signed. If a string is\n provided it is assumed the encoding is 'ascii' and\n converted to bytes. If this is not the case, it is up\n to the caller to convert the string to bytes\n appropriately and pass in the bytes.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n ECPointAffine:\n a raw point (r = pt.x, s = pt.y) which is\n the signature.\n " if isinstance(message, str): msg = bytes(message, 'ascii') elif isinstance(message, bytes): msg = message else: raise TypeError('message must be either str or bytes!') (sig_pt, rec_id) = bitcoin_curve.sign(msg, self.key, do_hash) if (sig_pt.y >= (bitcoin_curve.n // 2)): sig_pt = Point(sig_pt.x, (bitcoin_curve.n - sig_pt.y)) rec_id ^= 1 return (sig_pt, rec_id)
-5,298,181,686,060,600,000
Signs message using this private key. Args: message (bytes): The message to be signed. If a string is provided it is assumed the encoding is 'ascii' and converted to bytes. If this is not the case, it is up to the caller to convert the string to bytes appropriately and pass in the bytes. do_hash (bool): True if the message should be hashed prior to signing, False if not. This should always be left as True except in special situations which require doing the hash outside (e.g. handling Bitcoin bugs). Returns: ECPointAffine: a raw point (r = pt.x, s = pt.y) which is the signature.
pywallet/utils/ethereum.py
raw_sign
ukor/pywallet
python
def raw_sign(self, message, do_hash=True): " Signs message using this private key.\n\n Args:\n message (bytes): The message to be signed. If a string is\n provided it is assumed the encoding is 'ascii' and\n converted to bytes. If this is not the case, it is up\n to the caller to convert the string to bytes\n appropriately and pass in the bytes.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n ECPointAffine:\n a raw point (r = pt.x, s = pt.y) which is\n the signature.\n " if isinstance(message, str): msg = bytes(message, 'ascii') elif isinstance(message, bytes): msg = message else: raise TypeError('message must be either str or bytes!') (sig_pt, rec_id) = bitcoin_curve.sign(msg, self.key, do_hash) if (sig_pt.y >= (bitcoin_curve.n // 2)): sig_pt = Point(sig_pt.x, (bitcoin_curve.n - sig_pt.y)) rec_id ^= 1 return (sig_pt, rec_id)
def sign(self, message, do_hash=True): " Signs message using this private key.\n\n Note:\n This differs from `raw_sign()` since it returns a Signature object.\n\n Args:\n message (bytes or str): The message to be signed. If a\n string is provided it is assumed the encoding is\n 'ascii' and converted to bytes. If this is not the\n case, it is up to the caller to convert the string to\n bytes appropriately and pass in the bytes.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n Signature: The signature corresponding to message.\n " (sig_pt, rec_id) = self.raw_sign(message, do_hash) return Signature(sig_pt.x, sig_pt.y, rec_id)
-2,846,491,641,569,747,500
Signs message using this private key. Note: This differs from `raw_sign()` since it returns a Signature object. Args: message (bytes or str): The message to be signed. If a string is provided it is assumed the encoding is 'ascii' and converted to bytes. If this is not the case, it is up to the caller to convert the string to bytes appropriately and pass in the bytes. do_hash (bool): True if the message should be hashed prior to signing, False if not. This should always be left as True except in special situations which require doing the hash outside (e.g. handling Bitcoin bugs). Returns: Signature: The signature corresponding to message.
pywallet/utils/ethereum.py
sign
ukor/pywallet
python
def sign(self, message, do_hash=True): " Signs message using this private key.\n\n Note:\n This differs from `raw_sign()` since it returns a Signature object.\n\n Args:\n message (bytes or str): The message to be signed. If a\n string is provided it is assumed the encoding is\n 'ascii' and converted to bytes. If this is not the\n case, it is up to the caller to convert the string to\n bytes appropriately and pass in the bytes.\n do_hash (bool): True if the message should be hashed prior\n to signing, False if not. This should always be left as\n True except in special situations which require doing\n the hash outside (e.g. handling Bitcoin bugs).\n\n Returns:\n Signature: The signature corresponding to message.\n " (sig_pt, rec_id) = self.raw_sign(message, do_hash) return Signature(sig_pt.x, sig_pt.y, rec_id)
def sign_bitcoin(self, message, compressed=False): ' Signs a message using this private key such that it\n is compatible with bitcoind, bx, and other Bitcoin\n clients/nodes/utilities.\n\n Note:\n 0x18 + b"Bitcoin Signed Message:" + newline + len(message) is\n prepended to the message before signing.\n\n Args:\n message (bytes or str): Message to be signed.\n compressed (bool): True if the corresponding public key will be\n used in compressed format. False if the uncompressed version\n is used.\n\n Returns:\n bytes: A Base64-encoded byte string of the signed message.\n The first byte of the encoded message contains information\n about how to recover the public key. In bitcoind parlance,\n this is the magic number containing the recovery ID and\n whether or not the key was compressed or not.\n ' if isinstance(message, str): msg_in = bytes(message, 'ascii') elif isinstance(message, bytes): msg_in = message else: raise TypeError('message must be either str or bytes!') msg = ((b'\x18Bitcoin Signed Message:\n' + bytes([len(msg_in)])) + msg_in) msg_hash = hashlib.sha256(msg).digest() sig = self.sign(msg_hash) comp_adder = (4 if compressed else 0) magic = ((27 + sig.recovery_id) + comp_adder) return base64.b64encode((bytes([magic]) + bytes(sig)))
-8,730,095,808,249,029,000
Signs a message using this private key such that it is compatible with bitcoind, bx, and other Bitcoin clients/nodes/utilities. Note: 0x18 + b"Bitcoin Signed Message:" + newline + len(message) is prepended to the message before signing. Args: message (bytes or str): Message to be signed. compressed (bool): True if the corresponding public key will be used in compressed format. False if the uncompressed version is used. Returns: bytes: A Base64-encoded byte string of the signed message. The first byte of the encoded message contains information about how to recover the public key. In bitcoind parlance, this is the magic number containing the recovery ID and whether or not the key was compressed or not.
pywallet/utils/ethereum.py
sign_bitcoin
ukor/pywallet
python
def sign_bitcoin(self, message, compressed=False): ' Signs a message using this private key such that it\n is compatible with bitcoind, bx, and other Bitcoin\n clients/nodes/utilities.\n\n Note:\n 0x18 + b"Bitcoin Signed Message:" + newline + len(message) is\n prepended to the message before signing.\n\n Args:\n message (bytes or str): Message to be signed.\n compressed (bool): True if the corresponding public key will be\n used in compressed format. False if the uncompressed version\n is used.\n\n Returns:\n bytes: A Base64-encoded byte string of the signed message.\n The first byte of the encoded message contains information\n about how to recover the public key. In bitcoind parlance,\n this is the magic number containing the recovery ID and\n whether or not the key was compressed or not.\n ' if isinstance(message, str): msg_in = bytes(message, 'ascii') elif isinstance(message, bytes): msg_in = message else: raise TypeError('message must be either str or bytes!') msg = ((b'\x18Bitcoin Signed Message:\n' + bytes([len(msg_in)])) + msg_in) msg_hash = hashlib.sha256(msg).digest() sig = self.sign(msg_hash) comp_adder = (4 if compressed else 0) magic = ((27 + sig.recovery_id) + comp_adder) return base64.b64encode((bytes([magic]) + bytes(sig)))
def to_b58check(self, testnet=False): ' Generates a Base58Check encoding of this private key.\n\n Returns:\n str: A Base58Check encoded string representing the key.\n ' version = (self.TESTNET_VERSION if testnet else self.MAINNET_VERSION) return base58.b58encode_check((bytes([version]) + bytes(self)))
326,012,224,586,419,000
Generates a Base58Check encoding of this private key. Returns: str: A Base58Check encoded string representing the key.
pywallet/utils/ethereum.py
to_b58check
ukor/pywallet
python
def to_b58check(self, testnet=False): ' Generates a Base58Check encoding of this private key.\n\n Returns:\n str: A Base58Check encoded string representing the key.\n ' version = (self.TESTNET_VERSION if testnet else self.MAINNET_VERSION) return base58.b58encode_check((bytes([version]) + bytes(self)))
@staticmethod def from_point(p): ' Generates a public key object from any object\n containing x, y coordinates.\n\n Args:\n p (Point): An object containing a two-dimensional, affine\n representation of a point on the secp256k1 curve.\n\n Returns:\n PublicKey: A PublicKey object.\n ' return PublicKey(p.x, p.y)
-2,368,731,521,817,372,700
Generates a public key object from any object containing x, y coordinates. Args: p (Point): An object containing a two-dimensional, affine representation of a point on the secp256k1 curve. Returns: PublicKey: A PublicKey object.
pywallet/utils/ethereum.py
from_point
ukor/pywallet
python
@staticmethod def from_point(p): ' Generates a public key object from any object\n containing x, y coordinates.\n\n Args:\n p (Point): An object containing a two-dimensional, affine\n representation of a point on the secp256k1 curve.\n\n Returns:\n PublicKey: A PublicKey object.\n ' return PublicKey(p.x, p.y)
@staticmethod def from_int(i): ' Generates a public key object from an integer.\n\n Note:\n This assumes that the upper 32 bytes of the integer\n are the x component of the public key point and the\n lower 32 bytes are the y component.\n\n Args:\n i (Bignum): A 512-bit integer representing the public\n key point on the secp256k1 curve.\n\n Returns:\n PublicKey: A PublicKey object.\n ' point = ECPointAffine.from_int(bitcoin_curve, i) return PublicKey.from_point(point)
-4,592,589,044,295,648,000
Generates a public key object from an integer. Note: This assumes that the upper 32 bytes of the integer are the x component of the public key point and the lower 32 bytes are the y component. Args: i (Bignum): A 512-bit integer representing the public key point on the secp256k1 curve. Returns: PublicKey: A PublicKey object.
pywallet/utils/ethereum.py
from_int
ukor/pywallet
python
@staticmethod def from_int(i): ' Generates a public key object from an integer.\n\n Note:\n This assumes that the upper 32 bytes of the integer\n are the x component of the public key point and the\n lower 32 bytes are the y component.\n\n Args:\n i (Bignum): A 512-bit integer representing the public\n key point on the secp256k1 curve.\n\n Returns:\n PublicKey: A PublicKey object.\n ' point = ECPointAffine.from_int(bitcoin_curve, i) return PublicKey.from_point(point)
@staticmethod def from_base64(b64str, testnet=False): ' Generates a public key object from a Base64 encoded string.\n\n Args:\n b64str (str): A Base64-encoded string.\n testnet (bool) (Optional): If True, changes the version that\n is prepended to the key.\n\n Returns:\n PublicKey: A PublicKey object.\n ' return PublicKey.from_bytes(base64.b64decode(b64str))
440,505,516,854,344,770
Generates a public key object from a Base64 encoded string. Args: b64str (str): A Base64-encoded string. testnet (bool) (Optional): If True, changes the version that is prepended to the key. Returns: PublicKey: A PublicKey object.
pywallet/utils/ethereum.py
from_base64
ukor/pywallet
python
@staticmethod def from_base64(b64str, testnet=False): ' Generates a public key object from a Base64 encoded string.\n\n Args:\n b64str (str): A Base64-encoded string.\n testnet (bool) (Optional): If True, changes the version that\n is prepended to the key.\n\n Returns:\n PublicKey: A PublicKey object.\n ' return PublicKey.from_bytes(base64.b64decode(b64str))
@staticmethod def from_bytes(key_bytes): ' Generates a public key object from a byte (or hex) string.\n\n The byte stream must be of the SEC variety\n (http://www.secg.org/): beginning with a single byte telling\n what key representation follows. A full, uncompressed key\n is represented by: 0x04 followed by 64 bytes containing\n the x and y components of the point. For compressed keys\n with an even y component, 0x02 is followed by 32 bytes\n containing the x component. For compressed keys with an\n odd y component, 0x03 is followed by 32 bytes containing\n the x component.\n\n Args:\n key_bytes (bytes or str): A byte stream that conforms to the above.\n\n Returns:\n PublicKey: A PublicKey object.\n ' b = get_bytes(key_bytes) key_bytes_len = len(b) key_type = b[0] if (key_type == 4): if (key_bytes_len != 65): raise ValueError('key_bytes must be exactly 65 bytes long when uncompressed.') x = int.from_bytes(b[1:33], 'big') y = int.from_bytes(b[33:65], 'big') elif ((key_type == 2) or (key_type == 3)): if (key_bytes_len != 33): raise ValueError('key_bytes must be exactly 33 bytes long when compressed.') x = int.from_bytes(b[1:33], 'big') ys = bitcoin_curve.y_from_x(x) last_bit = (key_type - 2) for y in ys: if ((y & 1) == last_bit): break else: return None return PublicKey(x, y)
-2,474,053,631,457,766,400
Generates a public key object from a byte (or hex) string. The byte stream must be of the SEC variety (http://www.secg.org/): beginning with a single byte telling what key representation follows. A full, uncompressed key is represented by: 0x04 followed by 64 bytes containing the x and y components of the point. For compressed keys with an even y component, 0x02 is followed by 32 bytes containing the x component. For compressed keys with an odd y component, 0x03 is followed by 32 bytes containing the x component. Args: key_bytes (bytes or str): A byte stream that conforms to the above. Returns: PublicKey: A PublicKey object.
pywallet/utils/ethereum.py
from_bytes
ukor/pywallet
python
@staticmethod def from_bytes(key_bytes): ' Generates a public key object from a byte (or hex) string.\n\n The byte stream must be of the SEC variety\n (http://www.secg.org/): beginning with a single byte telling\n what key representation follows. A full, uncompressed key\n is represented by: 0x04 followed by 64 bytes containing\n the x and y components of the point. For compressed keys\n with an even y component, 0x02 is followed by 32 bytes\n containing the x component. For compressed keys with an\n odd y component, 0x03 is followed by 32 bytes containing\n the x component.\n\n Args:\n key_bytes (bytes or str): A byte stream that conforms to the above.\n\n Returns:\n PublicKey: A PublicKey object.\n ' b = get_bytes(key_bytes) key_bytes_len = len(b) key_type = b[0] if (key_type == 4): if (key_bytes_len != 65): raise ValueError('key_bytes must be exactly 65 bytes long when uncompressed.') x = int.from_bytes(b[1:33], 'big') y = int.from_bytes(b[33:65], 'big') elif ((key_type == 2) or (key_type == 3)): if (key_bytes_len != 33): raise ValueError('key_bytes must be exactly 33 bytes long when compressed.') x = int.from_bytes(b[1:33], 'big') ys = bitcoin_curve.y_from_x(x) last_bit = (key_type - 2) for y in ys: if ((y & 1) == last_bit): break else: return None return PublicKey(x, y)
@staticmethod def from_hex(h): ' Generates a public key object from a hex-encoded string.\n\n See from_bytes() for requirements of the hex string.\n\n Args:\n h (str): A hex-encoded string.\n\n Returns:\n PublicKey: A PublicKey object.\n ' return PublicKey.from_bytes(h)
5,488,581,212,984,160,000
Generates a public key object from a hex-encoded string. See from_bytes() for requirements of the hex string. Args: h (str): A hex-encoded string. Returns: PublicKey: A PublicKey object.
pywallet/utils/ethereum.py
from_hex
ukor/pywallet
python
@staticmethod def from_hex(h): ' Generates a public key object from a hex-encoded string.\n\n See from_bytes() for requirements of the hex string.\n\n Args:\n h (str): A hex-encoded string.\n\n Returns:\n PublicKey: A PublicKey object.\n ' return PublicKey.from_bytes(h)
@staticmethod def from_signature(message, signature): ' Attempts to create PublicKey object by deriving it\n from the message and signature.\n\n Args:\n message (bytes): The message to be verified.\n signature (Signature): The signature for message.\n The recovery_id must not be None!\n\n Returns:\n PublicKey:\n A PublicKey object derived from the\n signature, it it exists. None otherwise.\n ' if (signature.recovery_id is None): raise ValueError('The signature must have a recovery_id.') msg = get_bytes(message) pub_keys = bitcoin_curve.recover_public_key(msg, signature, signature.recovery_id) for (k, recid) in pub_keys: if ((signature.recovery_id is not None) and (recid == signature.recovery_id)): return PublicKey(k.x, k.y) return None
-9,018,104,774,159,616,000
Attempts to create PublicKey object by deriving it from the message and signature. Args: message (bytes): The message to be verified. signature (Signature): The signature for message. The recovery_id must not be None! Returns: PublicKey: A PublicKey object derived from the signature, it it exists. None otherwise.
pywallet/utils/ethereum.py
from_signature
ukor/pywallet
python
@staticmethod def from_signature(message, signature): ' Attempts to create PublicKey object by deriving it\n from the message and signature.\n\n Args:\n message (bytes): The message to be verified.\n signature (Signature): The signature for message.\n The recovery_id must not be None!\n\n Returns:\n PublicKey:\n A PublicKey object derived from the\n signature, it it exists. None otherwise.\n ' if (signature.recovery_id is None): raise ValueError('The signature must have a recovery_id.') msg = get_bytes(message) pub_keys = bitcoin_curve.recover_public_key(msg, signature, signature.recovery_id) for (k, recid) in pub_keys: if ((signature.recovery_id is not None) and (recid == signature.recovery_id)): return PublicKey(k.x, k.y) return None