body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
---|---|---|---|---|---|---|---|
def to_str(self):
'\n Returns the string representation of the model\n '
return pformat(self.to_dict()) | -3,531,024,894,346,511,000 | Returns the string representation of the model | squareconnect/models/v1_retrieve_business_request.py | to_str | reduceus/connect-python-sdk | python | def to_str(self):
'\n \n '
return pformat(self.to_dict()) |
def __repr__(self):
'\n For `print` and `pprint`\n '
return self.to_str() | 5,853,962,500,611,353,000 | For `print` and `pprint` | squareconnect/models/v1_retrieve_business_request.py | __repr__ | reduceus/connect-python-sdk | python | def __repr__(self):
'\n \n '
return self.to_str() |
def __eq__(self, other):
'\n Returns true if both objects are equal\n '
return (self.__dict__ == other.__dict__) | 3,599,733,221,149,238,300 | Returns true if both objects are equal | squareconnect/models/v1_retrieve_business_request.py | __eq__ | reduceus/connect-python-sdk | python | def __eq__(self, other):
'\n \n '
return (self.__dict__ == other.__dict__) |
def __ne__(self, other):
'\n Returns true if both objects are not equal\n '
return (not (self == other)) | 3,600,423,175,817,510,400 | Returns true if both objects are not equal | squareconnect/models/v1_retrieve_business_request.py | __ne__ | reduceus/connect-python-sdk | python | def __ne__(self, other):
'\n \n '
return (not (self == other)) |
@classmethod
def initialize(cls, application_id: str, application_version: str, upload_enabled: bool, configuration: Optional[Configuration]=None, data_dir: Optional[Path]=None):
'\n Initialize the Glean SDK.\n\n This should only be initialized once by the application, and not by\n libraries using the Glean SDK. A message is logged to error and no\n changes are made to the state if initialize is called a more than\n once.\n\n Args:\n application_id (str): The application id to use when sending pings.\n application_version (str): The version of the application sending\n Glean data.\n upload_enabled (bool): Controls whether telemetry is enabled. If\n disabled, all persisted metrics, events and queued pings\n (except first_run_date) are cleared.\n configuration (glean.config.Configuration): (optional) An object with\n global settings.\n data_dir (pathlib.Path): (optional) The path to the Glean data\n directory. If not provided, uses a temporary directory.\n '
if cls.is_initialized():
return
if (configuration is None):
configuration = Configuration()
if (data_dir is None):
data_dir = Path(tempfile.TemporaryDirectory().name)
cls._destroy_data_dir = True
else:
cls._destroy_data_dir = False
cls._data_dir = data_dir
cls._configuration = configuration
cls._application_id = application_id
cls._application_version = application_version
cls._upload_enabled = upload_enabled
cfg = _ffi.make_config(cls._data_dir, application_id, cls._upload_enabled, configuration.max_events)
cls._initialized = (_ffi.lib.glean_initialize(cfg) != 0)
if (not cls._initialized):
return
for ping in cls._ping_type_queue:
cls.register_ping_type(ping)
cls._initialize_core_metrics()
@Dispatcher.launch_at_front
def submit_pending_events():
if _ffi.lib.glean_on_ready_to_submit_pings():
PingUploadWorker.process()
Dispatcher.flush_queued_initial_tasks()
if (cls._upload_enabled is False):
@Dispatcher.launch
def check_pending_deletion_request():
DeletionPingUploadWorker.process() | -4,728,470,344,958,751,000 | Initialize the Glean SDK.
This should only be initialized once by the application, and not by
libraries using the Glean SDK. A message is logged to error and no
changes are made to the state if initialize is called a more than
once.
Args:
application_id (str): The application id to use when sending pings.
application_version (str): The version of the application sending
Glean data.
upload_enabled (bool): Controls whether telemetry is enabled. If
disabled, all persisted metrics, events and queued pings
(except first_run_date) are cleared.
configuration (glean.config.Configuration): (optional) An object with
global settings.
data_dir (pathlib.Path): (optional) The path to the Glean data
directory. If not provided, uses a temporary directory. | glean-core/python/glean/glean.py | initialize | tdsmith/glean | python | @classmethod
def initialize(cls, application_id: str, application_version: str, upload_enabled: bool, configuration: Optional[Configuration]=None, data_dir: Optional[Path]=None):
'\n Initialize the Glean SDK.\n\n This should only be initialized once by the application, and not by\n libraries using the Glean SDK. A message is logged to error and no\n changes are made to the state if initialize is called a more than\n once.\n\n Args:\n application_id (str): The application id to use when sending pings.\n application_version (str): The version of the application sending\n Glean data.\n upload_enabled (bool): Controls whether telemetry is enabled. If\n disabled, all persisted metrics, events and queued pings\n (except first_run_date) are cleared.\n configuration (glean.config.Configuration): (optional) An object with\n global settings.\n data_dir (pathlib.Path): (optional) The path to the Glean data\n directory. If not provided, uses a temporary directory.\n '
if cls.is_initialized():
return
if (configuration is None):
configuration = Configuration()
if (data_dir is None):
data_dir = Path(tempfile.TemporaryDirectory().name)
cls._destroy_data_dir = True
else:
cls._destroy_data_dir = False
cls._data_dir = data_dir
cls._configuration = configuration
cls._application_id = application_id
cls._application_version = application_version
cls._upload_enabled = upload_enabled
cfg = _ffi.make_config(cls._data_dir, application_id, cls._upload_enabled, configuration.max_events)
cls._initialized = (_ffi.lib.glean_initialize(cfg) != 0)
if (not cls._initialized):
return
for ping in cls._ping_type_queue:
cls.register_ping_type(ping)
cls._initialize_core_metrics()
@Dispatcher.launch_at_front
def submit_pending_events():
if _ffi.lib.glean_on_ready_to_submit_pings():
PingUploadWorker.process()
Dispatcher.flush_queued_initial_tasks()
if (cls._upload_enabled is False):
@Dispatcher.launch
def check_pending_deletion_request():
DeletionPingUploadWorker.process() |
@classmethod
def reset(cls):
'\n Resets the Glean singleton.\n '
Dispatcher.reset()
if cls._initialized:
_ffi.lib.glean_destroy_glean()
cls._initialized = False
if (cls._destroy_data_dir and cls._data_dir.exists()):
shutil.rmtree(str(cls._data_dir)) | 6,506,047,272,699,636,000 | Resets the Glean singleton. | glean-core/python/glean/glean.py | reset | tdsmith/glean | python | @classmethod
def reset(cls):
'\n \n '
Dispatcher.reset()
if cls._initialized:
_ffi.lib.glean_destroy_glean()
cls._initialized = False
if (cls._destroy_data_dir and cls._data_dir.exists()):
shutil.rmtree(str(cls._data_dir)) |
@classmethod
def is_initialized(cls) -> bool:
'\n Returns True if the Glean SDK has been initialized.\n '
return cls._initialized | 3,996,010,336,293,605,400 | Returns True if the Glean SDK has been initialized. | glean-core/python/glean/glean.py | is_initialized | tdsmith/glean | python | @classmethod
def is_initialized(cls) -> bool:
'\n \n '
return cls._initialized |
@classmethod
def register_ping_type(cls, ping: 'PingType'):
'\n Register the ping type in the registry.\n '
if cls.is_initialized():
_ffi.lib.glean_register_ping_type(ping._handle)
cls._ping_type_queue.add(ping) | 4,308,451,517,292,364,300 | Register the ping type in the registry. | glean-core/python/glean/glean.py | register_ping_type | tdsmith/glean | python | @classmethod
def register_ping_type(cls, ping: 'PingType'):
'\n \n '
if cls.is_initialized():
_ffi.lib.glean_register_ping_type(ping._handle)
cls._ping_type_queue.add(ping) |
@classmethod
def test_has_ping_type(cls, ping_name: str):
'\n Returns True if a ping by this name is in the ping registry.\n '
return bool(_ffi.lib.glean_test_has_ping_type(_ffi.ffi_encode_string(ping_name))) | -4,462,209,159,790,732,000 | Returns True if a ping by this name is in the ping registry. | glean-core/python/glean/glean.py | test_has_ping_type | tdsmith/glean | python | @classmethod
def test_has_ping_type(cls, ping_name: str):
'\n \n '
return bool(_ffi.lib.glean_test_has_ping_type(_ffi.ffi_encode_string(ping_name))) |
@classmethod
def set_upload_enabled(cls, enabled: bool):
"\n Enable or disable Glean collection and upload.\n\n Metric collection is enabled by default.\n\n When uploading is disabled, metrics aren't recorded at all and no data\n is uploaded.\n\n When disabling, all pending metrics, events and queued pings are cleared.\n\n When enabling, the core Glean metrics are recreated.\n\n Args:\n enabled (bool): When True, enable metric collection.\n "
if cls.is_initialized():
original_enabled = cls.get_upload_enabled()
@Dispatcher.launch
def set_upload_enabled():
_ffi.lib.glean_set_upload_enabled(enabled)
if ((original_enabled is False) and (cls.get_upload_enabled() is True)):
cls._initialize_core_metrics()
if ((original_enabled is True) and (cls.get_upload_enabled() is False)):
DeletionPingUploadWorker.process()
else:
cls._upload_enabled = enabled | -4,540,148,066,862,046,700 | Enable or disable Glean collection and upload.
Metric collection is enabled by default.
When uploading is disabled, metrics aren't recorded at all and no data
is uploaded.
When disabling, all pending metrics, events and queued pings are cleared.
When enabling, the core Glean metrics are recreated.
Args:
enabled (bool): When True, enable metric collection. | glean-core/python/glean/glean.py | set_upload_enabled | tdsmith/glean | python | @classmethod
def set_upload_enabled(cls, enabled: bool):
"\n Enable or disable Glean collection and upload.\n\n Metric collection is enabled by default.\n\n When uploading is disabled, metrics aren't recorded at all and no data\n is uploaded.\n\n When disabling, all pending metrics, events and queued pings are cleared.\n\n When enabling, the core Glean metrics are recreated.\n\n Args:\n enabled (bool): When True, enable metric collection.\n "
if cls.is_initialized():
original_enabled = cls.get_upload_enabled()
@Dispatcher.launch
def set_upload_enabled():
_ffi.lib.glean_set_upload_enabled(enabled)
if ((original_enabled is False) and (cls.get_upload_enabled() is True)):
cls._initialize_core_metrics()
if ((original_enabled is True) and (cls.get_upload_enabled() is False)):
DeletionPingUploadWorker.process()
else:
cls._upload_enabled = enabled |
@classmethod
def get_upload_enabled(cls) -> bool:
'\n Get whether or not Glean is allowed to record and upload data.\n '
if cls.is_initialized():
return bool(_ffi.lib.glean_is_upload_enabled())
else:
return cls._upload_enabled | 683,168,247,219,681,500 | Get whether or not Glean is allowed to record and upload data. | glean-core/python/glean/glean.py | get_upload_enabled | tdsmith/glean | python | @classmethod
def get_upload_enabled(cls) -> bool:
'\n \n '
if cls.is_initialized():
return bool(_ffi.lib.glean_is_upload_enabled())
else:
return cls._upload_enabled |
@classmethod
def set_experiment_active(cls, experiment_id: str, branch: str, extra: Optional[Dict[(str, str)]]=None):
'\n Indicate that an experiment is running. Glean will then add an\n experiment annotation to the environment which is sent with pings. This\n information is not persisted between runs.\n\n Args:\n experiment_id (str): The id of the active experiment (maximum 100\n bytes)\n branch (str): The experiment branch (maximum 100 bytes)\n extra (dict of str -> str): Optional metadata to output with the\n ping\n '
if (extra is None):
keys = []
values = []
else:
(keys, values) = zip(*extra.items())
@Dispatcher.launch
def set_experiment_active():
_ffi.lib.glean_set_experiment_active(_ffi.ffi_encode_string(experiment_id), _ffi.ffi_encode_string(branch), _ffi.ffi_encode_vec_string(keys), _ffi.ffi_encode_vec_string(values), len(keys)) | 2,253,114,502,048,816,400 | Indicate that an experiment is running. Glean will then add an
experiment annotation to the environment which is sent with pings. This
information is not persisted between runs.
Args:
experiment_id (str): The id of the active experiment (maximum 100
bytes)
branch (str): The experiment branch (maximum 100 bytes)
extra (dict of str -> str): Optional metadata to output with the
ping | glean-core/python/glean/glean.py | set_experiment_active | tdsmith/glean | python | @classmethod
def set_experiment_active(cls, experiment_id: str, branch: str, extra: Optional[Dict[(str, str)]]=None):
'\n Indicate that an experiment is running. Glean will then add an\n experiment annotation to the environment which is sent with pings. This\n information is not persisted between runs.\n\n Args:\n experiment_id (str): The id of the active experiment (maximum 100\n bytes)\n branch (str): The experiment branch (maximum 100 bytes)\n extra (dict of str -> str): Optional metadata to output with the\n ping\n '
if (extra is None):
keys = []
values = []
else:
(keys, values) = zip(*extra.items())
@Dispatcher.launch
def set_experiment_active():
_ffi.lib.glean_set_experiment_active(_ffi.ffi_encode_string(experiment_id), _ffi.ffi_encode_string(branch), _ffi.ffi_encode_vec_string(keys), _ffi.ffi_encode_vec_string(values), len(keys)) |
@classmethod
def set_experiment_inactive(cls, experiment_id: str):
'\n Indicate that the experiment is no longer running.\n\n Args:\n experiment_id (str): The id of the experiment to deactivate.\n '
@Dispatcher.launch
def set_experiment_inactive():
_ffi.lib.glean_set_experiment_inactive(_ffi.ffi_encode_string(experiment_id)) | -614,298,736,145,810,700 | Indicate that the experiment is no longer running.
Args:
experiment_id (str): The id of the experiment to deactivate. | glean-core/python/glean/glean.py | set_experiment_inactive | tdsmith/glean | python | @classmethod
def set_experiment_inactive(cls, experiment_id: str):
'\n Indicate that the experiment is no longer running.\n\n Args:\n experiment_id (str): The id of the experiment to deactivate.\n '
@Dispatcher.launch
def set_experiment_inactive():
_ffi.lib.glean_set_experiment_inactive(_ffi.ffi_encode_string(experiment_id)) |
@classmethod
def test_is_experiment_active(cls, experiment_id: str) -> bool:
'\n Tests whether an experiment is active, for testing purposes only.\n\n Args:\n experiment_id (str): The id of the experiment to look for.\n\n Returns:\n is_active (bool): If the experiement is active and reported in\n pings.\n '
return bool(_ffi.lib.glean_experiment_test_is_active(_ffi.ffi_encode_string(experiment_id))) | 2,430,373,024,920,355,300 | Tests whether an experiment is active, for testing purposes only.
Args:
experiment_id (str): The id of the experiment to look for.
Returns:
is_active (bool): If the experiement is active and reported in
pings. | glean-core/python/glean/glean.py | test_is_experiment_active | tdsmith/glean | python | @classmethod
def test_is_experiment_active(cls, experiment_id: str) -> bool:
'\n Tests whether an experiment is active, for testing purposes only.\n\n Args:\n experiment_id (str): The id of the experiment to look for.\n\n Returns:\n is_active (bool): If the experiement is active and reported in\n pings.\n '
return bool(_ffi.lib.glean_experiment_test_is_active(_ffi.ffi_encode_string(experiment_id))) |
@classmethod
def test_get_experiment_data(cls, experiment_id: str) -> 'RecordedExperimentData':
'\n Returns the stored data for the requested active experiment, for testing purposes only.\n\n Args:\n experiment_id (str): The id of the experiment to look for.\n\n Returns:\n experiment_data (RecordedExperimentData): The data associated with\n the experiment.\n '
from .metrics import RecordedExperimentData
json_string = _ffi.ffi_decode_string(_ffi.lib.glean_experiment_test_get_data(_ffi.ffi_encode_string(experiment_id)))
json_tree = json.loads(json_string)
return RecordedExperimentData(**json_tree) | -7,832,785,363,075,766,000 | Returns the stored data for the requested active experiment, for testing purposes only.
Args:
experiment_id (str): The id of the experiment to look for.
Returns:
experiment_data (RecordedExperimentData): The data associated with
the experiment. | glean-core/python/glean/glean.py | test_get_experiment_data | tdsmith/glean | python | @classmethod
def test_get_experiment_data(cls, experiment_id: str) -> 'RecordedExperimentData':
'\n Returns the stored data for the requested active experiment, for testing purposes only.\n\n Args:\n experiment_id (str): The id of the experiment to look for.\n\n Returns:\n experiment_data (RecordedExperimentData): The data associated with\n the experiment.\n '
from .metrics import RecordedExperimentData
json_string = _ffi.ffi_decode_string(_ffi.lib.glean_experiment_test_get_data(_ffi.ffi_encode_string(experiment_id)))
json_tree = json.loads(json_string)
return RecordedExperimentData(**json_tree) |
@classmethod
def _initialize_core_metrics(cls):
'\n Set a few metrics that will be sent as part of every ping.\n '
from ._builtins import metrics
metrics.glean.baseline.locale.set(util.get_locale_tag())
metrics.glean.internal.metrics.os.set(platform.system())
metrics.glean.internal.metrics.os_version.set(platform.release())
metrics.glean.internal.metrics.architecture.set(platform.machine())
metrics.glean.internal.metrics.locale.set(util.get_locale_tag())
sysinfo = hardware.get_system_information()
metrics.glean.internal.metrics.device_manufacturer.set(sysinfo.manufacturer)
metrics.glean.internal.metrics.device_model.set(sysinfo.model)
if (cls._configuration.channel is not None):
metrics.glean.internal.metrics.app_channel.set(cls._configuration.channel)
metrics.glean.internal.metrics.app_build.set(cls._application_id)
if (cls._application_version is not None):
metrics.glean.internal.metrics.app_display_version.set(cls._application_version) | 6,444,462,496,595,110,000 | Set a few metrics that will be sent as part of every ping. | glean-core/python/glean/glean.py | _initialize_core_metrics | tdsmith/glean | python | @classmethod
def _initialize_core_metrics(cls):
'\n \n '
from ._builtins import metrics
metrics.glean.baseline.locale.set(util.get_locale_tag())
metrics.glean.internal.metrics.os.set(platform.system())
metrics.glean.internal.metrics.os_version.set(platform.release())
metrics.glean.internal.metrics.architecture.set(platform.machine())
metrics.glean.internal.metrics.locale.set(util.get_locale_tag())
sysinfo = hardware.get_system_information()
metrics.glean.internal.metrics.device_manufacturer.set(sysinfo.manufacturer)
metrics.glean.internal.metrics.device_model.set(sysinfo.model)
if (cls._configuration.channel is not None):
metrics.glean.internal.metrics.app_channel.set(cls._configuration.channel)
metrics.glean.internal.metrics.app_build.set(cls._application_id)
if (cls._application_version is not None):
metrics.glean.internal.metrics.app_display_version.set(cls._application_version) |
@classmethod
def get_data_dir(cls) -> Path:
'\n Get the data directory for Glean.\n '
return cls._data_dir | 3,003,310,338,921,044,500 | Get the data directory for Glean. | glean-core/python/glean/glean.py | get_data_dir | tdsmith/glean | python | @classmethod
def get_data_dir(cls) -> Path:
'\n \n '
return cls._data_dir |
@classmethod
def test_collect(cls, ping: 'PingType') -> str:
'\n Collect a ping and return as a string.\n '
return _ffi.ffi_decode_string(_ffi.lib.glean_ping_collect(ping._handle)) | -2,071,894,214,660,975,000 | Collect a ping and return as a string. | glean-core/python/glean/glean.py | test_collect | tdsmith/glean | python | @classmethod
def test_collect(cls, ping: 'PingType') -> str:
'\n \n '
return _ffi.ffi_decode_string(_ffi.lib.glean_ping_collect(ping._handle)) |
@classmethod
def _submit_pings(cls, pings: List['PingType']):
'\n Collect and submit a list of pings for eventual uploading.\n\n If the ping currently contains no content, it will not be assembled and\n queued for sending.\n\n Args:\n pings (list of PingType): List of pings to submit.\n '
ping_names = [ping.name for ping in pings]
cls._submit_pings_by_name(ping_names) | -3,624,161,310,636,229,000 | Collect and submit a list of pings for eventual uploading.
If the ping currently contains no content, it will not be assembled and
queued for sending.
Args:
pings (list of PingType): List of pings to submit. | glean-core/python/glean/glean.py | _submit_pings | tdsmith/glean | python | @classmethod
def _submit_pings(cls, pings: List['PingType']):
'\n Collect and submit a list of pings for eventual uploading.\n\n If the ping currently contains no content, it will not be assembled and\n queued for sending.\n\n Args:\n pings (list of PingType): List of pings to submit.\n '
ping_names = [ping.name for ping in pings]
cls._submit_pings_by_name(ping_names) |
@classmethod
@Dispatcher.task
def _submit_pings_by_name(cls, ping_names: List[str]):
"\n Collect and submit a list of pings for eventual uploading by name.\n\n Each ping will be looked up in the known instances of\n `glean.metrics.PingType`. If the ping isn't known, an error is logged\n and the ping isn't queued for uploading.\n\n If the ping currently contains no content, it will not be assembled and\n queued for sending.\n\n Args:\n ping_names (list of str): List of ping names to submit.\n "
if (not cls.is_initialized()):
log.error('Glean must be initialized before submitting pings.')
return
if (not cls.get_upload_enabled()):
log.error('Glean must be enabled before submitting pings.')
return
sent_ping = _ffi.lib.glean_submit_pings_by_name(_ffi.ffi_encode_vec_string(ping_names), len(ping_names))
if sent_ping:
PingUploadWorker.process() | -2,446,921,619,524,210,000 | Collect and submit a list of pings for eventual uploading by name.
Each ping will be looked up in the known instances of
`glean.metrics.PingType`. If the ping isn't known, an error is logged
and the ping isn't queued for uploading.
If the ping currently contains no content, it will not be assembled and
queued for sending.
Args:
ping_names (list of str): List of ping names to submit. | glean-core/python/glean/glean.py | _submit_pings_by_name | tdsmith/glean | python | @classmethod
@Dispatcher.task
def _submit_pings_by_name(cls, ping_names: List[str]):
"\n Collect and submit a list of pings for eventual uploading by name.\n\n Each ping will be looked up in the known instances of\n `glean.metrics.PingType`. If the ping isn't known, an error is logged\n and the ping isn't queued for uploading.\n\n If the ping currently contains no content, it will not be assembled and\n queued for sending.\n\n Args:\n ping_names (list of str): List of ping names to submit.\n "
if (not cls.is_initialized()):
log.error('Glean must be initialized before submitting pings.')
return
if (not cls.get_upload_enabled()):
log.error('Glean must be enabled before submitting pings.')
return
sent_ping = _ffi.lib.glean_submit_pings_by_name(_ffi.ffi_encode_vec_string(ping_names), len(ping_names))
if sent_ping:
PingUploadWorker.process() |
def fake_mtime(self, fxn):
'Fake mtime to always be higher than expected.'
return (lambda name: (fxn(name) + 1)) | -3,927,634,467,120,221,700 | Fake mtime to always be higher than expected. | Lib/test/test_importlib/source/test_file_loader.py | fake_mtime | Ma233/RustPython | python | def fake_mtime(self, fxn):
return (lambda name: (fxn(name) + 1)) |
def manipulate_bytecode(self, name, mapping, manipulator, *, del_source=False, invalidation_mode=py_compile.PycInvalidationMode.TIMESTAMP):
'Manipulate the bytecode of a module by passing it into a callable\n that returns what to use as the new bytecode.'
try:
del sys.modules['_temp']
except KeyError:
pass
py_compile.compile(mapping[name], invalidation_mode=invalidation_mode)
if (not del_source):
bytecode_path = self.util.cache_from_source(mapping[name])
else:
os.unlink(mapping[name])
bytecode_path = make_legacy_pyc(mapping[name])
if manipulator:
with open(bytecode_path, 'rb') as file:
bc = file.read()
new_bc = manipulator(bc)
with open(bytecode_path, 'wb') as file:
if (new_bc is not None):
file.write(new_bc)
return bytecode_path | 5,683,144,941,224,898,000 | Manipulate the bytecode of a module by passing it into a callable
that returns what to use as the new bytecode. | Lib/test/test_importlib/source/test_file_loader.py | manipulate_bytecode | Ma233/RustPython | python | def manipulate_bytecode(self, name, mapping, manipulator, *, del_source=False, invalidation_mode=py_compile.PycInvalidationMode.TIMESTAMP):
'Manipulate the bytecode of a module by passing it into a callable\n that returns what to use as the new bytecode.'
try:
del sys.modules['_temp']
except KeyError:
pass
py_compile.compile(mapping[name], invalidation_mode=invalidation_mode)
if (not del_source):
bytecode_path = self.util.cache_from_source(mapping[name])
else:
os.unlink(mapping[name])
bytecode_path = make_legacy_pyc(mapping[name])
if manipulator:
with open(bytecode_path, 'rb') as file:
bc = file.read()
new_bc = manipulator(bc)
with open(bytecode_path, 'wb') as file:
if (new_bc is not None):
file.write(new_bc)
return bytecode_path |
def get_virtual_network_tap(resource_group_name: Optional[str]=None, tap_name: Optional[str]=None, opts: Optional[pulumi.InvokeOptions]=None) -> AwaitableGetVirtualNetworkTapResult:
'\n Virtual Network Tap resource.\n\n\n :param str resource_group_name: The name of the resource group.\n :param str tap_name: The name of virtual network tap.\n '
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['tapName'] = tap_name
if (opts is None):
opts = pulumi.InvokeOptions()
if (opts.version is None):
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20200701:getVirtualNetworkTap', __args__, opts=opts, typ=GetVirtualNetworkTapResult).value
return AwaitableGetVirtualNetworkTapResult(destination_load_balancer_front_end_ip_configuration=__ret__.destination_load_balancer_front_end_ip_configuration, destination_network_interface_ip_configuration=__ret__.destination_network_interface_ip_configuration, destination_port=__ret__.destination_port, etag=__ret__.etag, id=__ret__.id, location=__ret__.location, name=__ret__.name, network_interface_tap_configurations=__ret__.network_interface_tap_configurations, provisioning_state=__ret__.provisioning_state, resource_guid=__ret__.resource_guid, tags=__ret__.tags, type=__ret__.type) | 8,856,377,944,382,823,000 | Virtual Network Tap resource.
:param str resource_group_name: The name of the resource group.
:param str tap_name: The name of virtual network tap. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | get_virtual_network_tap | polivbr/pulumi-azure-native | python | def get_virtual_network_tap(resource_group_name: Optional[str]=None, tap_name: Optional[str]=None, opts: Optional[pulumi.InvokeOptions]=None) -> AwaitableGetVirtualNetworkTapResult:
'\n Virtual Network Tap resource.\n\n\n :param str resource_group_name: The name of the resource group.\n :param str tap_name: The name of virtual network tap.\n '
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['tapName'] = tap_name
if (opts is None):
opts = pulumi.InvokeOptions()
if (opts.version is None):
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20200701:getVirtualNetworkTap', __args__, opts=opts, typ=GetVirtualNetworkTapResult).value
return AwaitableGetVirtualNetworkTapResult(destination_load_balancer_front_end_ip_configuration=__ret__.destination_load_balancer_front_end_ip_configuration, destination_network_interface_ip_configuration=__ret__.destination_network_interface_ip_configuration, destination_port=__ret__.destination_port, etag=__ret__.etag, id=__ret__.id, location=__ret__.location, name=__ret__.name, network_interface_tap_configurations=__ret__.network_interface_tap_configurations, provisioning_state=__ret__.provisioning_state, resource_guid=__ret__.resource_guid, tags=__ret__.tags, type=__ret__.type) |
@property
@pulumi.getter(name='destinationLoadBalancerFrontEndIPConfiguration')
def destination_load_balancer_front_end_ip_configuration(self) -> Optional['outputs.FrontendIPConfigurationResponse']:
'\n The reference to the private IP address on the internal Load Balancer that will receive the tap.\n '
return pulumi.get(self, 'destination_load_balancer_front_end_ip_configuration') | -1,861,593,513,802,929,000 | The reference to the private IP address on the internal Load Balancer that will receive the tap. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | destination_load_balancer_front_end_ip_configuration | polivbr/pulumi-azure-native | python | @property
@pulumi.getter(name='destinationLoadBalancerFrontEndIPConfiguration')
def destination_load_balancer_front_end_ip_configuration(self) -> Optional['outputs.FrontendIPConfigurationResponse']:
'\n \n '
return pulumi.get(self, 'destination_load_balancer_front_end_ip_configuration') |
@property
@pulumi.getter(name='destinationNetworkInterfaceIPConfiguration')
def destination_network_interface_ip_configuration(self) -> Optional['outputs.NetworkInterfaceIPConfigurationResponse']:
'\n The reference to the private IP Address of the collector nic that will receive the tap.\n '
return pulumi.get(self, 'destination_network_interface_ip_configuration') | 743,069,686,348,307,300 | The reference to the private IP Address of the collector nic that will receive the tap. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | destination_network_interface_ip_configuration | polivbr/pulumi-azure-native | python | @property
@pulumi.getter(name='destinationNetworkInterfaceIPConfiguration')
def destination_network_interface_ip_configuration(self) -> Optional['outputs.NetworkInterfaceIPConfigurationResponse']:
'\n \n '
return pulumi.get(self, 'destination_network_interface_ip_configuration') |
@property
@pulumi.getter(name='destinationPort')
def destination_port(self) -> Optional[int]:
'\n The VXLAN destination port that will receive the tapped traffic.\n '
return pulumi.get(self, 'destination_port') | 5,196,420,816,154,317,000 | The VXLAN destination port that will receive the tapped traffic. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | destination_port | polivbr/pulumi-azure-native | python | @property
@pulumi.getter(name='destinationPort')
def destination_port(self) -> Optional[int]:
'\n \n '
return pulumi.get(self, 'destination_port') |
@property
@pulumi.getter
def etag(self) -> str:
'\n A unique read-only string that changes whenever the resource is updated.\n '
return pulumi.get(self, 'etag') | -4,757,010,955,465,940,000 | A unique read-only string that changes whenever the resource is updated. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | etag | polivbr/pulumi-azure-native | python | @property
@pulumi.getter
def etag(self) -> str:
'\n \n '
return pulumi.get(self, 'etag') |
@property
@pulumi.getter
def id(self) -> Optional[str]:
'\n Resource ID.\n '
return pulumi.get(self, 'id') | 6,887,155,523,158,811,000 | Resource ID. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | id | polivbr/pulumi-azure-native | python | @property
@pulumi.getter
def id(self) -> Optional[str]:
'\n \n '
return pulumi.get(self, 'id') |
@property
@pulumi.getter
def location(self) -> Optional[str]:
'\n Resource location.\n '
return pulumi.get(self, 'location') | 8,841,543,228,718,414,000 | Resource location. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | location | polivbr/pulumi-azure-native | python | @property
@pulumi.getter
def location(self) -> Optional[str]:
'\n \n '
return pulumi.get(self, 'location') |
@property
@pulumi.getter
def name(self) -> str:
'\n Resource name.\n '
return pulumi.get(self, 'name') | -2,625,941,459,458,898,000 | Resource name. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | name | polivbr/pulumi-azure-native | python | @property
@pulumi.getter
def name(self) -> str:
'\n \n '
return pulumi.get(self, 'name') |
@property
@pulumi.getter(name='networkInterfaceTapConfigurations')
def network_interface_tap_configurations(self) -> Sequence['outputs.NetworkInterfaceTapConfigurationResponse']:
'\n Specifies the list of resource IDs for the network interface IP configuration that needs to be tapped.\n '
return pulumi.get(self, 'network_interface_tap_configurations') | 1,679,524,962,285,324,500 | Specifies the list of resource IDs for the network interface IP configuration that needs to be tapped. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | network_interface_tap_configurations | polivbr/pulumi-azure-native | python | @property
@pulumi.getter(name='networkInterfaceTapConfigurations')
def network_interface_tap_configurations(self) -> Sequence['outputs.NetworkInterfaceTapConfigurationResponse']:
'\n \n '
return pulumi.get(self, 'network_interface_tap_configurations') |
@property
@pulumi.getter(name='provisioningState')
def provisioning_state(self) -> str:
'\n The provisioning state of the virtual network tap resource.\n '
return pulumi.get(self, 'provisioning_state') | 6,734,607,647,379,689,000 | The provisioning state of the virtual network tap resource. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | provisioning_state | polivbr/pulumi-azure-native | python | @property
@pulumi.getter(name='provisioningState')
def provisioning_state(self) -> str:
'\n \n '
return pulumi.get(self, 'provisioning_state') |
@property
@pulumi.getter(name='resourceGuid')
def resource_guid(self) -> str:
'\n The resource GUID property of the virtual network tap resource.\n '
return pulumi.get(self, 'resource_guid') | -8,738,393,843,981,600,000 | The resource GUID property of the virtual network tap resource. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | resource_guid | polivbr/pulumi-azure-native | python | @property
@pulumi.getter(name='resourceGuid')
def resource_guid(self) -> str:
'\n \n '
return pulumi.get(self, 'resource_guid') |
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[(str, str)]]:
'\n Resource tags.\n '
return pulumi.get(self, 'tags') | 562,229,697,900,116,900 | Resource tags. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | tags | polivbr/pulumi-azure-native | python | @property
@pulumi.getter
def tags(self) -> Optional[Mapping[(str, str)]]:
'\n \n '
return pulumi.get(self, 'tags') |
@property
@pulumi.getter
def type(self) -> str:
'\n Resource type.\n '
return pulumi.get(self, 'type') | -5,079,398,349,541,291,000 | Resource type. | sdk/python/pulumi_azure_native/network/v20200701/get_virtual_network_tap.py | type | polivbr/pulumi-azure-native | python | @property
@pulumi.getter
def type(self) -> str:
'\n \n '
return pulumi.get(self, 'type') |
def exclude(list, value=['-v', '-vv', '-vvv']):
'\n remove value from the list\n '
new = []
for item in list:
if (not (item in value)):
new.append(item)
return new | -7,035,163,786,894,781,000 | remove value from the list | runner/input.py | exclude | F0xedb/IS-HTTP-HEADER | python | def exclude(list, value=['-v', '-vv', '-vvv']):
'\n \n '
new = []
for item in list:
if (not (item in value)):
new.append(item)
return new |
def help():
'\n print the help menu and exit\n '
print('Usage: {} [-h | --help] [-vv] domain'.format(runner.config.NAME))
print('\t {} -h | --help \t\tprint this help menu'.format(runner.config.NAME))
print('\t {} -v <domain> \t\tEnable debug messages'.format(runner.config.NAME))
print('\t {} -vv <domain> \t\tEnable debug messages with more information'.format(runner.config.NAME))
print('\t {} <domain> \t\tperform a scan on the given domain/URI or URL\n'.format(runner.config.NAME))
print('Copyright Meyers Tom')
print('Licensed under the MIT License')
quit() | 3,205,780,205,608,500,700 | print the help menu and exit | runner/input.py | help | F0xedb/IS-HTTP-HEADER | python | def help():
'\n \n '
print('Usage: {} [-h | --help] [-vv] domain'.format(runner.config.NAME))
print('\t {} -h | --help \t\tprint this help menu'.format(runner.config.NAME))
print('\t {} -v <domain> \t\tEnable debug messages'.format(runner.config.NAME))
print('\t {} -vv <domain> \t\tEnable debug messages with more information'.format(runner.config.NAME))
print('\t {} <domain> \t\tperform a scan on the given domain/URI or URL\n'.format(runner.config.NAME))
print('Copyright Meyers Tom')
print('Licensed under the MIT License')
quit() |
def getInput():
'\n Check if the input is null. If that is the case simply listen for stdin\n Returns the input that it got eg a url, uri or domain\n Second return type is if debug messages should be enabled\n '
if (len(sys.argv) == 1):
return (fileinput.input()[0], False, False)
if (('-h' in sys.argv) or ('--help' in sys.argv)):
help()
domain = ''.join(exclude(sys.argv[1:], ['-v', '-vv']))
if (domain == ''):
print('Wrong input formation\n')
help()
return (domain, ('-v' in sys.argv), ('-vv' in sys.argv)) | 7,766,959,161,890,298,000 | Check if the input is null. If that is the case simply listen for stdin
Returns the input that it got eg a url, uri or domain
Second return type is if debug messages should be enabled | runner/input.py | getInput | F0xedb/IS-HTTP-HEADER | python | def getInput():
'\n Check if the input is null. If that is the case simply listen for stdin\n Returns the input that it got eg a url, uri or domain\n Second return type is if debug messages should be enabled\n '
if (len(sys.argv) == 1):
return (fileinput.input()[0], False, False)
if (('-h' in sys.argv) or ('--help' in sys.argv)):
help()
domain = .join(exclude(sys.argv[1:], ['-v', '-vv']))
if (domain == ):
print('Wrong input formation\n')
help()
return (domain, ('-v' in sys.argv), ('-vv' in sys.argv)) |
def connect_mandatory_exits(world, entrances, caves, must_be_exits, player, dp_must_exit=None):
'This works inplace'
random.shuffle(entrances)
random.shuffle(caves)
used_caves = []
while must_be_exits:
exit = must_be_exits.pop()
cave = None
for candidate in caves:
if (not isinstance(candidate, str)):
cave = candidate
break
if (cave is None):
raise RuntimeError('No more caves left. Should not happen!')
connect_two_way(world, exit, cave[(- 1)], player)
if (len(cave) == 2):
entrance = entrances.pop()
if ((world.mode[player] != 'inverted') and (entrance == 'Dark Death Mountain Ledge (West)')):
new_entrance = entrances.pop()
entrances.append(entrance)
entrance = new_entrance
if ((world.mode[player] == 'inverted') and (entrance == dp_must_exit)):
new_entrance = entrances.pop()
entrances.append(entrance)
entrance = new_entrance
connect_two_way(world, entrance, cave[0], player)
elif (cave[(- 1)] == 'Spectacle Rock Cave Exit'):
for exit in cave[:(- 1)]:
connect_two_way(world, entrances.pop(), exit, player)
else:
caves.append(cave[0:(- 1)])
random.shuffle(caves)
used_caves.append(cave[0:(- 1)])
caves.remove(cave)
for cave in used_caves:
if (cave in caves):
for exit in cave:
connect_two_way(world, entrances.pop(), exit, player)
caves.remove(cave) | -8,403,411,320,272,371,000 | This works inplace | EntranceShuffle.py | connect_mandatory_exits | Nathan-Carlson/ALttPDoorRandomizer | python | def connect_mandatory_exits(world, entrances, caves, must_be_exits, player, dp_must_exit=None):
random.shuffle(entrances)
random.shuffle(caves)
used_caves = []
while must_be_exits:
exit = must_be_exits.pop()
cave = None
for candidate in caves:
if (not isinstance(candidate, str)):
cave = candidate
break
if (cave is None):
raise RuntimeError('No more caves left. Should not happen!')
connect_two_way(world, exit, cave[(- 1)], player)
if (len(cave) == 2):
entrance = entrances.pop()
if ((world.mode[player] != 'inverted') and (entrance == 'Dark Death Mountain Ledge (West)')):
new_entrance = entrances.pop()
entrances.append(entrance)
entrance = new_entrance
if ((world.mode[player] == 'inverted') and (entrance == dp_must_exit)):
new_entrance = entrances.pop()
entrances.append(entrance)
entrance = new_entrance
connect_two_way(world, entrance, cave[0], player)
elif (cave[(- 1)] == 'Spectacle Rock Cave Exit'):
for exit in cave[:(- 1)]:
connect_two_way(world, entrances.pop(), exit, player)
else:
caves.append(cave[0:(- 1)])
random.shuffle(caves)
used_caves.append(cave[0:(- 1)])
caves.remove(cave)
for cave in used_caves:
if (cave in caves):
for exit in cave:
connect_two_way(world, entrances.pop(), exit, player)
caves.remove(cave) |
def connect_caves(world, lw_entrances, dw_entrances, caves, player):
'This works inplace'
random.shuffle(lw_entrances)
random.shuffle(dw_entrances)
random.shuffle(caves)
while caves:
cave_candidate = (None, 0)
for (i, cave) in enumerate(caves):
if isinstance(cave, str):
cave = (cave,)
if (len(cave) > cave_candidate[1]):
cave_candidate = (i, len(cave))
cave = caves.pop(cave_candidate[0])
target = (lw_entrances if (random.randint(0, 1) == 0) else dw_entrances)
if isinstance(cave, str):
cave = (cave,)
if (len(target) < len(cave)):
target = (lw_entrances if (target is dw_entrances) else dw_entrances)
for exit in cave:
connect_two_way(world, target.pop(), exit, player) | 1,495,098,255,793,150,200 | This works inplace | EntranceShuffle.py | connect_caves | Nathan-Carlson/ALttPDoorRandomizer | python | def connect_caves(world, lw_entrances, dw_entrances, caves, player):
random.shuffle(lw_entrances)
random.shuffle(dw_entrances)
random.shuffle(caves)
while caves:
cave_candidate = (None, 0)
for (i, cave) in enumerate(caves):
if isinstance(cave, str):
cave = (cave,)
if (len(cave) > cave_candidate[1]):
cave_candidate = (i, len(cave))
cave = caves.pop(cave_candidate[0])
target = (lw_entrances if (random.randint(0, 1) == 0) else dw_entrances)
if isinstance(cave, str):
cave = (cave,)
if (len(target) < len(cave)):
target = (lw_entrances if (target is dw_entrances) else dw_entrances)
for exit in cave:
connect_two_way(world, target.pop(), exit, player) |
def connect_doors(world, doors, targets, player):
'This works inplace'
random.shuffle(doors)
random.shuffle(targets)
while doors:
door = doors.pop()
target = targets.pop()
connect_entrance(world, door, target, player) | 4,035,181,009,632,101,000 | This works inplace | EntranceShuffle.py | connect_doors | Nathan-Carlson/ALttPDoorRandomizer | python | def connect_doors(world, doors, targets, player):
random.shuffle(doors)
random.shuffle(targets)
while doors:
door = doors.pop()
target = targets.pop()
connect_entrance(world, door, target, player) |
def to_device(data, device=None, dtype=None, non_blocking=False, copy=False):
'Change the device of object recursively'
if isinstance(data, dict):
return {k: to_device(v, device, dtype, non_blocking, copy) for (k, v) in data.items()}
elif (dataclasses.is_dataclass(data) and (not isinstance(data, type))):
return type(data)(*[to_device(v, device, dtype, non_blocking, copy) for v in dataclasses.astuple(data)])
elif (isinstance(data, tuple) and (type(data) is not tuple)):
return type(data)(*[to_device(o, device, dtype, non_blocking, copy) for o in data])
elif isinstance(data, (list, tuple)):
return type(data)((to_device(v, device, dtype, non_blocking, copy) for v in data))
elif isinstance(data, np.ndarray):
return to_device(torch.from_numpy(data), device, dtype, non_blocking, copy)
elif isinstance(data, torch.Tensor):
return data.to(device, dtype, non_blocking, copy)
else:
return data | -9,016,043,390,750,908,000 | Change the device of object recursively | espnet2/torch_utils/device_funcs.py | to_device | 18445864529/espnet | python | def to_device(data, device=None, dtype=None, non_blocking=False, copy=False):
if isinstance(data, dict):
return {k: to_device(v, device, dtype, non_blocking, copy) for (k, v) in data.items()}
elif (dataclasses.is_dataclass(data) and (not isinstance(data, type))):
return type(data)(*[to_device(v, device, dtype, non_blocking, copy) for v in dataclasses.astuple(data)])
elif (isinstance(data, tuple) and (type(data) is not tuple)):
return type(data)(*[to_device(o, device, dtype, non_blocking, copy) for o in data])
elif isinstance(data, (list, tuple)):
return type(data)((to_device(v, device, dtype, non_blocking, copy) for v in data))
elif isinstance(data, np.ndarray):
return to_device(torch.from_numpy(data), device, dtype, non_blocking, copy)
elif isinstance(data, torch.Tensor):
return data.to(device, dtype, non_blocking, copy)
else:
return data |
def force_gatherable(data, device):
'Change object to gatherable in torch.nn.DataParallel recursively\n\n The difference from to_device() is changing to torch.Tensor if float or int\n value is found.\n\n The restriction to the returned value in DataParallel:\n The object must be\n - torch.cuda.Tensor\n - 1 or more dimension. 0-dimension-tensor sends warning.\n or a list, tuple, dict.\n\n '
if isinstance(data, dict):
return {k: force_gatherable(v, device) for (k, v) in data.items()}
elif (isinstance(data, tuple) and (type(data) is not tuple)):
return type(data)(*[force_gatherable(o, device) for o in data])
elif isinstance(data, (list, tuple, set)):
return type(data)((force_gatherable(v, device) for v in data))
elif isinstance(data, np.ndarray):
return force_gatherable(torch.from_numpy(data), device)
elif isinstance(data, torch.Tensor):
if (data.dim() == 0):
data = data[None]
return data.to(device)
elif isinstance(data, float):
return torch.tensor([data], dtype=torch.float, device=device)
elif isinstance(data, int):
return torch.tensor([data], dtype=torch.long, device=device)
elif (data is None):
return None
else:
warnings.warn(f'{type(data)} may not be gatherable by DataParallel')
return data | -8,141,642,253,402,283,000 | Change object to gatherable in torch.nn.DataParallel recursively
The difference from to_device() is changing to torch.Tensor if float or int
value is found.
The restriction to the returned value in DataParallel:
The object must be
- torch.cuda.Tensor
- 1 or more dimension. 0-dimension-tensor sends warning.
or a list, tuple, dict. | espnet2/torch_utils/device_funcs.py | force_gatherable | 18445864529/espnet | python | def force_gatherable(data, device):
'Change object to gatherable in torch.nn.DataParallel recursively\n\n The difference from to_device() is changing to torch.Tensor if float or int\n value is found.\n\n The restriction to the returned value in DataParallel:\n The object must be\n - torch.cuda.Tensor\n - 1 or more dimension. 0-dimension-tensor sends warning.\n or a list, tuple, dict.\n\n '
if isinstance(data, dict):
return {k: force_gatherable(v, device) for (k, v) in data.items()}
elif (isinstance(data, tuple) and (type(data) is not tuple)):
return type(data)(*[force_gatherable(o, device) for o in data])
elif isinstance(data, (list, tuple, set)):
return type(data)((force_gatherable(v, device) for v in data))
elif isinstance(data, np.ndarray):
return force_gatherable(torch.from_numpy(data), device)
elif isinstance(data, torch.Tensor):
if (data.dim() == 0):
data = data[None]
return data.to(device)
elif isinstance(data, float):
return torch.tensor([data], dtype=torch.float, device=device)
elif isinstance(data, int):
return torch.tensor([data], dtype=torch.long, device=device)
elif (data is None):
return None
else:
warnings.warn(f'{type(data)} may not be gatherable by DataParallel')
return data |
def __init__(self, *, host: str=DEFAULT_HOST, credentials: ga_credentials.Credentials=None, credentials_file: Optional[str]=None, scopes: Optional[Sequence[str]]=None, quota_project_id: Optional[str]=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool]=False, **kwargs) -> None:
"Instantiate the transport.\n\n Args:\n host (Optional[str]):\n The hostname to connect to.\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is mutually exclusive with credentials.\n scopes (Optional[Sequence[str]]): A list of scopes.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you're developing\n your own client library.\n always_use_jwt_access (Optional[bool]): Whether self signed JWT should\n be used for service account credentials.\n "
if (':' not in host):
host += ':443'
self._host = host
scopes_kwargs = {'scopes': scopes, 'default_scopes': self.AUTH_SCOPES}
self._scopes = scopes
if (credentials and credentials_file):
raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
if (credentials_file is not None):
(credentials, _) = google.auth.load_credentials_from_file(credentials_file, **scopes_kwargs, quota_project_id=quota_project_id)
elif (credentials is None):
(credentials, _) = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id)
if (always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, 'with_always_use_jwt_access')):
credentials = credentials.with_always_use_jwt_access(True)
self._credentials = credentials | 2,693,825,205,371,460,600 | Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials. | google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/base.py | __init__ | LaudateCorpus1/python-datalabeling | python | def __init__(self, *, host: str=DEFAULT_HOST, credentials: ga_credentials.Credentials=None, credentials_file: Optional[str]=None, scopes: Optional[Sequence[str]]=None, quota_project_id: Optional[str]=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool]=False, **kwargs) -> None:
"Instantiate the transport.\n\n Args:\n host (Optional[str]):\n The hostname to connect to.\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is mutually exclusive with credentials.\n scopes (Optional[Sequence[str]]): A list of scopes.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you're developing\n your own client library.\n always_use_jwt_access (Optional[bool]): Whether self signed JWT should\n be used for service account credentials.\n "
if (':' not in host):
host += ':443'
self._host = host
scopes_kwargs = {'scopes': scopes, 'default_scopes': self.AUTH_SCOPES}
self._scopes = scopes
if (credentials and credentials_file):
raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
if (credentials_file is not None):
(credentials, _) = google.auth.load_credentials_from_file(credentials_file, **scopes_kwargs, quota_project_id=quota_project_id)
elif (credentials is None):
(credentials, _) = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id)
if (always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, 'with_always_use_jwt_access')):
credentials = credentials.with_always_use_jwt_access(True)
self._credentials = credentials |
def close(self):
'Closes resources associated with the transport.\n\n .. warning::\n Only call this method if the transport is NOT shared\n with other clients - this may cause errors in other clients!\n '
raise NotImplementedError() | 8,736,099,590,164,054,000 | Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients! | google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/base.py | close | LaudateCorpus1/python-datalabeling | python | def close(self):
'Closes resources associated with the transport.\n\n .. warning::\n Only call this method if the transport is NOT shared\n with other clients - this may cause errors in other clients!\n '
raise NotImplementedError() |
@property
def operations_client(self):
'Return the client designed to process long-running operations.'
raise NotImplementedError() | -4,809,876,076,924,260,000 | Return the client designed to process long-running operations. | google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/base.py | operations_client | LaudateCorpus1/python-datalabeling | python | @property
def operations_client(self):
raise NotImplementedError() |
def flatten_json(dictionary: dict) -> dict:
'Flatten a nested json file'
while True:
dictionary = dict(chain.from_iterable(starmap(_unpack, dictionary.items())))
if _atomic_values(dictionary):
break
return dictionary | 2,568,173,172,647,186,400 | Flatten a nested json file | activejson/flatten_json_tools.py | flatten_json | BentoBox-Project/activejson | python | def flatten_json(dictionary: dict) -> dict:
while True:
dictionary = dict(chain.from_iterable(starmap(_unpack, dictionary.items())))
if _atomic_values(dictionary):
break
return dictionary |
def _unpack(parent_key, parent_value):
'Unpack one level of nesting in json file'
if isinstance(parent_value, dict):
for (key, value) in parent_value.items():
(yield _process_dict_values(parent_key, key, value))
elif isinstance(parent_value, list):
for (i, value) in enumerate(parent_value):
(yield _proccess_list(parent_key, i, value))
else:
(yield (parent_key, parent_value)) | 5,847,600,174,646,479,000 | Unpack one level of nesting in json file | activejson/flatten_json_tools.py | _unpack | BentoBox-Project/activejson | python | def _unpack(parent_key, parent_value):
if isinstance(parent_value, dict):
for (key, value) in parent_value.items():
(yield _process_dict_values(parent_key, key, value))
elif isinstance(parent_value, list):
for (i, value) in enumerate(parent_value):
(yield _proccess_list(parent_key, i, value))
else:
(yield (parent_key, parent_value)) |
def _compute_covariance_(self):
'not used'
self.inv_cov = np.linalg.inv(self.covariance)
self._norm_factor = (np.sqrt(np.linalg.det(((2 * np.pi) * self.covariance))) * self.n) | -4,217,486,315,340,992,000 | not used | statsmodels/sandbox/nonparametric/kdecovclass.py | _compute_covariance_ | ADI10HERO/statsmodels | python | def _compute_covariance_(self):
self.inv_cov = np.linalg.inv(self.covariance)
self._norm_factor = (np.sqrt(np.linalg.det(((2 * np.pi) * self.covariance))) * self.n) |
def allow_relation(self, obj1, obj2, **hints):
'\n Allow relations if a model in the app is involved.\n '
if ((obj1._meta.app_label in self.apps) or (obj2._meta.app_label in self.apps)):
return True
return None | -7,350,398,318,565,561,000 | Allow relations if a model in the app is involved. | kyleandemily/rsvp/db_router.py | allow_relation | ehayne/KyleAndEmily | python | def allow_relation(self, obj1, obj2, **hints):
'\n \n '
if ((obj1._meta.app_label in self.apps) or (obj2._meta.app_label in self.apps)):
return True
return None |
def allow_syncdb(self, db, model):
'Make sure the apps we care about appear in the db'
if (model._meta.app_label in ['south']):
return True
if (db == self.using):
return (model._meta.app_label in self.apps)
elif (model._meta.app_label in self.apps):
return False
return None | -7,139,704,136,752,588,000 | Make sure the apps we care about appear in the db | kyleandemily/rsvp/db_router.py | allow_syncdb | ehayne/KyleAndEmily | python | def allow_syncdb(self, db, model):
if (model._meta.app_label in ['south']):
return True
if (db == self.using):
return (model._meta.app_label in self.apps)
elif (model._meta.app_label in self.apps):
return False
return None |
def find(func, iteratee):
'Returns the first element that match the query'
for value in iteratee:
if func(value):
return value
return None | 5,811,513,123,915,050,000 | Returns the first element that match the query | webapi/tools.py | find | JWebgames/WebAPI | python | def find(func, iteratee):
for value in iteratee:
if func(value):
return value
return None |
def cast(val, typ, *types):
'Cast a value to the given type. /!\\ Hack /!\\ '
if ((typ.__class__ in [Union.__class__, Optional.__class__]) and (len(typ.__args__) == 2) and (typ.__args__[1] is None)):
typ = typ.__args__[0]
elif (typ.__class__ == Union.__class__):
return cast(val, *typ.__args__)
if (typ.__class__ == List.__class__):
values = []
for element in val:
values.append(cast(element, typ.__args__[0]))
return values
types = ([typ] + list(types))
for typ in types:
try:
return typ(val)
except:
continue
raise TypeError('{} not castable in any of {{{}}}.'.format(val, types)) | -7,651,569,357,427,979,000 | Cast a value to the given type. /!\ Hack /!\ | webapi/tools.py | cast | JWebgames/WebAPI | python | def cast(val, typ, *types):
'Cast a value to the given type. /!\\ Hack /!\\ '
if ((typ.__class__ in [Union.__class__, Optional.__class__]) and (len(typ.__args__) == 2) and (typ.__args__[1] is None)):
typ = typ.__args__[0]
elif (typ.__class__ == Union.__class__):
return cast(val, *typ.__args__)
if (typ.__class__ == List.__class__):
values = []
for element in val:
values.append(cast(element, typ.__args__[0]))
return values
types = ([typ] + list(types))
for typ in types:
try:
return typ(val)
except:
continue
raise TypeError('{} not castable in any of {{{}}}.'.format(val, types)) |
def real_type(typ):
'Escape the type from Union and Optional. /!\\ Hack /!\\ '
if (typ.__class__ in [Union.__class__, Optional.__class__]):
return typ.__args__[0]
return typ | -7,790,184,980,972,846,000 | Escape the type from Union and Optional. /!\ Hack /!\ | webapi/tools.py | real_type | JWebgames/WebAPI | python | def real_type(typ):
'Escape the type from Union and Optional. /!\\ Hack /!\\ '
if (typ.__class__ in [Union.__class__, Optional.__class__]):
return typ.__args__[0]
return typ |
def root():
'Return the path of the package root'
return dirname(abspath(__file__)) | -3,393,299,980,272,062,500 | Return the path of the package root | webapi/tools.py | root | JWebgames/WebAPI | python | def root():
return dirname(abspath(__file__)) |
def generate_token(key, iat=None, exp_delta=timedelta(minutes=5), typ='player', tid=None, uid='00000000-0000-0000-0000-000000000000'):
'Generate a JSON Web Token'
if (iat is None):
iat = datetime.utcnow()
if (tid is None):
tid = str(uuid4())
return jwtlib.encode({'iss': 'webapi', 'sub': 'webgames', 'iat': iat, 'exp': (iat + exp_delta), 'jti': tid, 'typ': typ, 'uid': uid}, key, algorithm='HS256').decode() | -4,334,631,335,055,515,000 | Generate a JSON Web Token | webapi/tools.py | generate_token | JWebgames/WebAPI | python | def generate_token(key, iat=None, exp_delta=timedelta(minutes=5), typ='player', tid=None, uid='00000000-0000-0000-0000-000000000000'):
if (iat is None):
iat = datetime.utcnow()
if (tid is None):
tid = str(uuid4())
return jwtlib.encode({'iss': 'webapi', 'sub': 'webgames', 'iat': iat, 'exp': (iat + exp_delta), 'jti': tid, 'typ': typ, 'uid': uid}, key, algorithm='HS256').decode() |
def ask_bool(prompt):
'Ask a question to the user, retry until the reply is valid'
while True:
try:
return strtobool(input(('%s (yes/no) ' % prompt)).strip().casefold())
except ValueError:
continue | 2,350,650,505,838,491,600 | Ask a question to the user, retry until the reply is valid | webapi/tools.py | ask_bool | JWebgames/WebAPI | python | def ask_bool(prompt):
while True:
try:
return strtobool(input(('%s (yes/no) ' % prompt)).strip().casefold())
except ValueError:
continue |
def fake_async(func):
'Fake coroutine by awaiting asyncio.sleep(0)'
async def wrapped(*args, **kwargs):
'The faked coroutine'
(await sleep(0))
return func(*args, **kwargs)
return wrapped | -3,451,616,267,512,242,700 | Fake coroutine by awaiting asyncio.sleep(0) | webapi/tools.py | fake_async | JWebgames/WebAPI | python | def fake_async(func):
async def wrapped(*args, **kwargs):
'The faked coroutine'
(await sleep(0))
return func(*args, **kwargs)
return wrapped |
def lruc(coro, loop=get_event_loop()):
'Short version of loop.run_until_complete(coro)'
return loop.run_until_complete(coro) | 8,520,320,821,994,307,000 | Short version of loop.run_until_complete(coro) | webapi/tools.py | lruc | JWebgames/WebAPI | python | def lruc(coro, loop=get_event_loop()):
return loop.run_until_complete(coro) |
def async_partial(func, *args, **keywords):
'async functools.partial'
async def newfunc(*fargs, **fkeywords):
'the mocked function'
newkeywords = keywords.copy()
newkeywords.update(fkeywords)
return (await func(*args, *fargs, **newkeywords))
newfunc.func = func
newfunc.args = args
newfunc.keywords = keywords
return newfunc | -2,745,387,906,364,145,700 | async functools.partial | webapi/tools.py | async_partial | JWebgames/WebAPI | python | def async_partial(func, *args, **keywords):
async def newfunc(*fargs, **fkeywords):
'the mocked function'
newkeywords = keywords.copy()
newkeywords.update(fkeywords)
return (await func(*args, *fargs, **newkeywords))
newfunc.func = func
newfunc.args = args
newfunc.keywords = keywords
return newfunc |
def flush(self):
'Flush this BufferingHandler to all the delayed handlers.'
self.acquire()
try:
for handler in self.delayed_handlers:
for record in self.buffer:
if (record.levelno >= handler.level):
handler.handle(record)
self.buffer = []
finally:
self.release() | -904,547,716,423,307,500 | Flush this BufferingHandler to all the delayed handlers. | webapi/tools.py | flush | JWebgames/WebAPI | python | def flush(self):
self.acquire()
try:
for handler in self.delayed_handlers:
for record in self.buffer:
if (record.levelno >= handler.level):
handler.handle(record)
self.buffer = []
finally:
self.release() |
def __enter__(self):
'Replace the handlers by this BufferingHandler'
self.delayed_handlers.extend(self.delayed_logger.handlers)
self.delayed_logger.handlers.clear()
self.delayed_logger.addHandler(self)
return self | -3,614,360,796,317,711,000 | Replace the handlers by this BufferingHandler | webapi/tools.py | __enter__ | JWebgames/WebAPI | python | def __enter__(self):
self.delayed_handlers.extend(self.delayed_logger.handlers)
self.delayed_logger.handlers.clear()
self.delayed_logger.addHandler(self)
return self |
def __exit__(self, typ, val, traceback):
'Restore the handlers and flush this BufferingHandler'
self.delayed_logger.removeHandler(self)
self.delayed_logger.handlers.extend(self.delayed_handlers)
self.close() | 6,036,077,338,375,966,000 | Restore the handlers and flush this BufferingHandler | webapi/tools.py | __exit__ | JWebgames/WebAPI | python | def __exit__(self, typ, val, traceback):
self.delayed_logger.removeHandler(self)
self.delayed_logger.handlers.extend(self.delayed_handlers)
self.close() |
async def wrapped(*args, **kwargs):
'The faked coroutine'
(await sleep(0))
return func(*args, **kwargs) | -5,996,175,070,215,983,000 | The faked coroutine | webapi/tools.py | wrapped | JWebgames/WebAPI | python | async def wrapped(*args, **kwargs):
(await sleep(0))
return func(*args, **kwargs) |
async def newfunc(*fargs, **fkeywords):
'the mocked function'
newkeywords = keywords.copy()
newkeywords.update(fkeywords)
return (await func(*args, *fargs, **newkeywords)) | 2,898,546,621,943,174,000 | the mocked function | webapi/tools.py | newfunc | JWebgames/WebAPI | python | async def newfunc(*fargs, **fkeywords):
newkeywords = keywords.copy()
newkeywords.update(fkeywords)
return (await func(*args, *fargs, **newkeywords)) |
def __init__(self, options: argparse.Namespace) -> None:
'Initialize with the options parsed from config and cli.\n\n This also calls a hook, :meth:`after_init`, so subclasses do not need\n to call super to call this method.\n\n :param options:\n User specified configuration parsed from both configuration files\n and the command-line interface.\n :type options:\n :class:`argparse.Namespace`\n '
self.options = options
self.filename = options.output_file
self.output_fd: Optional[IO[str]] = None
self.newline = '\n'
self.after_init() | 5,970,333,299,775,267,000 | Initialize with the options parsed from config and cli.
This also calls a hook, :meth:`after_init`, so subclasses do not need
to call super to call this method.
:param options:
User specified configuration parsed from both configuration files
and the command-line interface.
:type options:
:class:`argparse.Namespace` | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | __init__ | AWSCookbook/Databases | python | def __init__(self, options: argparse.Namespace) -> None:
'Initialize with the options parsed from config and cli.\n\n This also calls a hook, :meth:`after_init`, so subclasses do not need\n to call super to call this method.\n\n :param options:\n User specified configuration parsed from both configuration files\n and the command-line interface.\n :type options:\n :class:`argparse.Namespace`\n '
self.options = options
self.filename = options.output_file
self.output_fd: Optional[IO[str]] = None
self.newline = '\n'
self.after_init() |
def after_init(self) -> None:
'Initialize the formatter further.' | -8,280,229,469,268,128,000 | Initialize the formatter further. | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | after_init | AWSCookbook/Databases | python | def after_init(self) -> None:
|
def beginning(self, filename: str) -> None:
"Notify the formatter that we're starting to process a file.\n\n :param str filename:\n The name of the file that Flake8 is beginning to report results\n from.\n " | -8,265,437,601,763,686,000 | Notify the formatter that we're starting to process a file.
:param str filename:
The name of the file that Flake8 is beginning to report results
from. | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | beginning | AWSCookbook/Databases | python | def beginning(self, filename: str) -> None:
"Notify the formatter that we're starting to process a file.\n\n :param str filename:\n The name of the file that Flake8 is beginning to report results\n from.\n " |
def finished(self, filename: str) -> None:
"Notify the formatter that we've finished processing a file.\n\n :param str filename:\n The name of the file that Flake8 has finished reporting results\n from.\n " | 4,409,744,504,326,485,500 | Notify the formatter that we've finished processing a file.
:param str filename:
The name of the file that Flake8 has finished reporting results
from. | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | finished | AWSCookbook/Databases | python | def finished(self, filename: str) -> None:
"Notify the formatter that we've finished processing a file.\n\n :param str filename:\n The name of the file that Flake8 has finished reporting results\n from.\n " |
def start(self) -> None:
'Prepare the formatter to receive input.\n\n This defaults to initializing :attr:`output_fd` if :attr:`filename`\n '
if self.filename:
dirname = os.path.dirname(os.path.abspath(self.filename))
os.makedirs(dirname, exist_ok=True)
self.output_fd = open(self.filename, 'a') | -2,026,882,811,889,377,000 | Prepare the formatter to receive input.
This defaults to initializing :attr:`output_fd` if :attr:`filename` | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | start | AWSCookbook/Databases | python | def start(self) -> None:
'Prepare the formatter to receive input.\n\n This defaults to initializing :attr:`output_fd` if :attr:`filename`\n '
if self.filename:
dirname = os.path.dirname(os.path.abspath(self.filename))
os.makedirs(dirname, exist_ok=True)
self.output_fd = open(self.filename, 'a') |
def handle(self, error: 'Violation') -> None:
'Handle an error reported by Flake8.\n\n This defaults to calling :meth:`format`, :meth:`show_source`, and\n then :meth:`write`. To extend how errors are handled, override this\n method.\n\n :param error:\n This will be an instance of\n :class:`~flake8.style_guide.Violation`.\n :type error:\n flake8.style_guide.Violation\n '
line = self.format(error)
source = self.show_source(error)
self.write(line, source) | 8,289,702,982,474,264,000 | Handle an error reported by Flake8.
This defaults to calling :meth:`format`, :meth:`show_source`, and
then :meth:`write`. To extend how errors are handled, override this
method.
:param error:
This will be an instance of
:class:`~flake8.style_guide.Violation`.
:type error:
flake8.style_guide.Violation | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | handle | AWSCookbook/Databases | python | def handle(self, error: 'Violation') -> None:
'Handle an error reported by Flake8.\n\n This defaults to calling :meth:`format`, :meth:`show_source`, and\n then :meth:`write`. To extend how errors are handled, override this\n method.\n\n :param error:\n This will be an instance of\n :class:`~flake8.style_guide.Violation`.\n :type error:\n flake8.style_guide.Violation\n '
line = self.format(error)
source = self.show_source(error)
self.write(line, source) |
def format(self, error: 'Violation') -> Optional[str]:
'Format an error reported by Flake8.\n\n This method **must** be implemented by subclasses.\n\n :param error:\n This will be an instance of\n :class:`~flake8.style_guide.Violation`.\n :type error:\n flake8.style_guide.Violation\n :returns:\n The formatted error string.\n :rtype:\n str\n '
raise NotImplementedError('Subclass of BaseFormatter did not implement format.') | -80,566,825,370,105,060 | Format an error reported by Flake8.
This method **must** be implemented by subclasses.
:param error:
This will be an instance of
:class:`~flake8.style_guide.Violation`.
:type error:
flake8.style_guide.Violation
:returns:
The formatted error string.
:rtype:
str | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | format | AWSCookbook/Databases | python | def format(self, error: 'Violation') -> Optional[str]:
'Format an error reported by Flake8.\n\n This method **must** be implemented by subclasses.\n\n :param error:\n This will be an instance of\n :class:`~flake8.style_guide.Violation`.\n :type error:\n flake8.style_guide.Violation\n :returns:\n The formatted error string.\n :rtype:\n str\n '
raise NotImplementedError('Subclass of BaseFormatter did not implement format.') |
def show_statistics(self, statistics: 'Statistics') -> None:
'Format and print the statistics.'
for error_code in statistics.error_codes():
stats_for_error_code = statistics.statistics_for(error_code)
statistic = next(stats_for_error_code)
count = statistic.count
count += sum((stat.count for stat in stats_for_error_code))
self._write(f'{count:<5} {error_code} {statistic.message}') | 5,616,093,956,626,915,000 | Format and print the statistics. | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | show_statistics | AWSCookbook/Databases | python | def show_statistics(self, statistics: 'Statistics') -> None:
for error_code in statistics.error_codes():
stats_for_error_code = statistics.statistics_for(error_code)
statistic = next(stats_for_error_code)
count = statistic.count
count += sum((stat.count for stat in stats_for_error_code))
self._write(f'{count:<5} {error_code} {statistic.message}') |
def show_benchmarks(self, benchmarks: List[Tuple[(str, float)]]) -> None:
'Format and print the benchmarks.'
float_format = '{value:<10.3} {statistic}'.format
int_format = '{value:<10} {statistic}'.format
for (statistic, value) in benchmarks:
if isinstance(value, int):
benchmark = int_format(statistic=statistic, value=value)
else:
benchmark = float_format(statistic=statistic, value=value)
self._write(benchmark) | -462,945,648,584,049,100 | Format and print the benchmarks. | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | show_benchmarks | AWSCookbook/Databases | python | def show_benchmarks(self, benchmarks: List[Tuple[(str, float)]]) -> None:
float_format = '{value:<10.3} {statistic}'.format
int_format = '{value:<10} {statistic}'.format
for (statistic, value) in benchmarks:
if isinstance(value, int):
benchmark = int_format(statistic=statistic, value=value)
else:
benchmark = float_format(statistic=statistic, value=value)
self._write(benchmark) |
def show_source(self, error: 'Violation') -> Optional[str]:
'Show the physical line generating the error.\n\n This also adds an indicator for the particular part of the line that\n is reported as generating the problem.\n\n :param error:\n This will be an instance of\n :class:`~flake8.style_guide.Violation`.\n :type error:\n flake8.style_guide.Violation\n :returns:\n The formatted error string if the user wants to show the source.\n If the user does not want to show the source, this will return\n ``None``.\n :rtype:\n str\n '
if ((not self.options.show_source) or (error.physical_line is None)):
return ''
indent = ''.join(((c if c.isspace() else ' ') for c in error.physical_line[:(error.column_number - 1)]))
return f'{error.physical_line}{indent}^' | -5,564,130,577,554,606,000 | Show the physical line generating the error.
This also adds an indicator for the particular part of the line that
is reported as generating the problem.
:param error:
This will be an instance of
:class:`~flake8.style_guide.Violation`.
:type error:
flake8.style_guide.Violation
:returns:
The formatted error string if the user wants to show the source.
If the user does not want to show the source, this will return
``None``.
:rtype:
str | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | show_source | AWSCookbook/Databases | python | def show_source(self, error: 'Violation') -> Optional[str]:
'Show the physical line generating the error.\n\n This also adds an indicator for the particular part of the line that\n is reported as generating the problem.\n\n :param error:\n This will be an instance of\n :class:`~flake8.style_guide.Violation`.\n :type error:\n flake8.style_guide.Violation\n :returns:\n The formatted error string if the user wants to show the source.\n If the user does not want to show the source, this will return\n ``None``.\n :rtype:\n str\n '
if ((not self.options.show_source) or (error.physical_line is None)):
return
indent = .join(((c if c.isspace() else ' ') for c in error.physical_line[:(error.column_number - 1)]))
return f'{error.physical_line}{indent}^' |
def _write(self, output: str) -> None:
'Handle logic of whether to use an output file or print().'
if (self.output_fd is not None):
self.output_fd.write((output + self.newline))
if ((self.output_fd is None) or self.options.tee):
sys.stdout.buffer.write((output.encode() + self.newline.encode())) | 8,018,973,449,682,329,000 | Handle logic of whether to use an output file or print(). | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | _write | AWSCookbook/Databases | python | def _write(self, output: str) -> None:
if (self.output_fd is not None):
self.output_fd.write((output + self.newline))
if ((self.output_fd is None) or self.options.tee):
sys.stdout.buffer.write((output.encode() + self.newline.encode())) |
def write(self, line: Optional[str], source: Optional[str]) -> None:
'Write the line either to the output file or stdout.\n\n This handles deciding whether to write to a file or print to standard\n out for subclasses. Override this if you want behaviour that differs\n from the default.\n\n :param str line:\n The formatted string to print or write.\n :param str source:\n The source code that has been formatted and associated with the\n line of output.\n '
if line:
self._write(line)
if source:
self._write(source) | -1,437,962,729,121,187,300 | Write the line either to the output file or stdout.
This handles deciding whether to write to a file or print to standard
out for subclasses. Override this if you want behaviour that differs
from the default.
:param str line:
The formatted string to print or write.
:param str source:
The source code that has been formatted and associated with the
line of output. | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | write | AWSCookbook/Databases | python | def write(self, line: Optional[str], source: Optional[str]) -> None:
'Write the line either to the output file or stdout.\n\n This handles deciding whether to write to a file or print to standard\n out for subclasses. Override this if you want behaviour that differs\n from the default.\n\n :param str line:\n The formatted string to print or write.\n :param str source:\n The source code that has been formatted and associated with the\n line of output.\n '
if line:
self._write(line)
if source:
self._write(source) |
def stop(self) -> None:
'Clean up after reporting is finished.'
if (self.output_fd is not None):
self.output_fd.close()
self.output_fd = None | -7,843,594,433,352,375,000 | Clean up after reporting is finished. | .venv/lib/python3.9/site-packages/flake8/formatting/base.py | stop | AWSCookbook/Databases | python | def stop(self) -> None:
if (self.output_fd is not None):
self.output_fd.close()
self.output_fd = None |
def import_module(module, min_module_version=None, min_python_version=None, warn_not_installed=None, warn_old_version=None, module_version_attr='__version__', module_version_attr_call_args=None, import_kwargs={}, catch=()):
"\n Import and return a module if it is installed.\n\n If the module is not installed, it returns None.\n\n A minimum version for the module can be given as the keyword argument\n min_module_version. This should be comparable against the module version.\n By default, module.__version__ is used to get the module version. To\n override this, set the module_version_attr keyword argument. If the\n attribute of the module to get the version should be called (e.g.,\n module.version()), then set module_version_attr_call_args to the args such\n that module.module_version_attr(*module_version_attr_call_args) returns the\n module's version.\n\n If the module version is less than min_module_version using the Python <\n comparison, None will be returned, even if the module is installed. You can\n use this to keep from importing an incompatible older version of a module.\n\n You can also specify a minimum Python version by using the\n min_python_version keyword argument. This should be comparable against\n sys.version_info.\n\n If the keyword argument warn_not_installed is set to True, the function will\n emit a UserWarning when the module is not installed.\n\n If the keyword argument warn_old_version is set to True, the function will\n emit a UserWarning when the library is installed, but cannot be imported\n because of the min_module_version or min_python_version options.\n\n Note that because of the way warnings are handled, a warning will be\n emitted for each module only once. You can change the default warning\n behavior by overriding the values of WARN_NOT_INSTALLED and WARN_OLD_VERSION\n in sympy.external.importtools. By default, WARN_NOT_INSTALLED is False and\n WARN_OLD_VERSION is True.\n\n This function uses __import__() to import the module. To pass additional\n options to __import__(), use the import_kwargs keyword argument. For\n example, to import a submodule A.B, you must pass a nonempty fromlist option\n to __import__. See the docstring of __import__().\n\n This catches ImportError to determine if the module is not installed. To\n catch additional errors, pass them as a tuple to the catch keyword\n argument.\n\n Examples\n ========\n\n >>> from sympy.external import import_module\n\n >>> numpy = import_module('numpy')\n\n >>> numpy = import_module('numpy', min_python_version=(2, 7),\n ... warn_old_version=False)\n\n >>> numpy = import_module('numpy', min_module_version='1.5',\n ... warn_old_version=False) # numpy.__version__ is a string\n\n >>> # gmpy does not have __version__, but it does have gmpy.version()\n\n >>> gmpy = import_module('gmpy', min_module_version='1.14',\n ... module_version_attr='version', module_version_attr_call_args=(),\n ... warn_old_version=False)\n\n >>> # To import a submodule, you must pass a nonempty fromlist to\n >>> # __import__(). The values do not matter.\n >>> p3 = import_module('mpl_toolkits.mplot3d',\n ... import_kwargs={'fromlist':['something']})\n\n >>> # matplotlib.pyplot can raise RuntimeError when the display cannot be opened\n >>> matplotlib = import_module('matplotlib',\n ... import_kwargs={'fromlist':['pyplot']}, catch=(RuntimeError,))\n\n "
warn_old_version = (WARN_OLD_VERSION if (WARN_OLD_VERSION is not None) else (warn_old_version or True))
warn_not_installed = (WARN_NOT_INSTALLED if (WARN_NOT_INSTALLED is not None) else (warn_not_installed or False))
import warnings
if min_python_version:
if (sys.version_info < min_python_version):
if warn_old_version:
warnings.warn(('Python version is too old to use %s (%s or newer required)' % (module, '.'.join(map(str, min_python_version)))), UserWarning, stacklevel=2)
return
if ((module == 'numpy') and ('__pypy__' in sys.builtin_module_names)):
return
try:
mod = __import__(module, **import_kwargs)
from_list = import_kwargs.get('fromlist', tuple())
for submod in from_list:
if ((submod == 'collections') and (mod.__name__ == 'matplotlib')):
__import__(((module + '.') + submod))
except ImportError:
if warn_not_installed:
warnings.warn(('%s module is not installed' % module), UserWarning, stacklevel=2)
return
except catch as e:
if warn_not_installed:
warnings.warn(('%s module could not be used (%s)' % (module, repr(e))), stacklevel=2)
return
if min_module_version:
modversion = getattr(mod, module_version_attr)
if (module_version_attr_call_args is not None):
modversion = modversion(*module_version_attr_call_args)
if (version_tuple(modversion) < version_tuple(min_module_version)):
if warn_old_version:
if isinstance(min_module_version, str):
verstr = min_module_version
elif isinstance(min_module_version, (tuple, list)):
verstr = '.'.join(map(str, min_module_version))
else:
verstr = str(min_module_version)
warnings.warn(('%s version is too old to use (%s or newer required)' % (module, verstr)), UserWarning, stacklevel=2)
return
return mod | -5,560,196,216,558,401,000 | Import and return a module if it is installed.
If the module is not installed, it returns None.
A minimum version for the module can be given as the keyword argument
min_module_version. This should be comparable against the module version.
By default, module.__version__ is used to get the module version. To
override this, set the module_version_attr keyword argument. If the
attribute of the module to get the version should be called (e.g.,
module.version()), then set module_version_attr_call_args to the args such
that module.module_version_attr(*module_version_attr_call_args) returns the
module's version.
If the module version is less than min_module_version using the Python <
comparison, None will be returned, even if the module is installed. You can
use this to keep from importing an incompatible older version of a module.
You can also specify a minimum Python version by using the
min_python_version keyword argument. This should be comparable against
sys.version_info.
If the keyword argument warn_not_installed is set to True, the function will
emit a UserWarning when the module is not installed.
If the keyword argument warn_old_version is set to True, the function will
emit a UserWarning when the library is installed, but cannot be imported
because of the min_module_version or min_python_version options.
Note that because of the way warnings are handled, a warning will be
emitted for each module only once. You can change the default warning
behavior by overriding the values of WARN_NOT_INSTALLED and WARN_OLD_VERSION
in sympy.external.importtools. By default, WARN_NOT_INSTALLED is False and
WARN_OLD_VERSION is True.
This function uses __import__() to import the module. To pass additional
options to __import__(), use the import_kwargs keyword argument. For
example, to import a submodule A.B, you must pass a nonempty fromlist option
to __import__. See the docstring of __import__().
This catches ImportError to determine if the module is not installed. To
catch additional errors, pass them as a tuple to the catch keyword
argument.
Examples
========
>>> from sympy.external import import_module
>>> numpy = import_module('numpy')
>>> numpy = import_module('numpy', min_python_version=(2, 7),
... warn_old_version=False)
>>> numpy = import_module('numpy', min_module_version='1.5',
... warn_old_version=False) # numpy.__version__ is a string
>>> # gmpy does not have __version__, but it does have gmpy.version()
>>> gmpy = import_module('gmpy', min_module_version='1.14',
... module_version_attr='version', module_version_attr_call_args=(),
... warn_old_version=False)
>>> # To import a submodule, you must pass a nonempty fromlist to
>>> # __import__(). The values do not matter.
>>> p3 = import_module('mpl_toolkits.mplot3d',
... import_kwargs={'fromlist':['something']})
>>> # matplotlib.pyplot can raise RuntimeError when the display cannot be opened
>>> matplotlib = import_module('matplotlib',
... import_kwargs={'fromlist':['pyplot']}, catch=(RuntimeError,)) | .environment/lib/python3.8/site-packages/sympy/external/importtools.py | import_module | LuisMi1245/QPath-and-Snakes | python | def import_module(module, min_module_version=None, min_python_version=None, warn_not_installed=None, warn_old_version=None, module_version_attr='__version__', module_version_attr_call_args=None, import_kwargs={}, catch=()):
"\n Import and return a module if it is installed.\n\n If the module is not installed, it returns None.\n\n A minimum version for the module can be given as the keyword argument\n min_module_version. This should be comparable against the module version.\n By default, module.__version__ is used to get the module version. To\n override this, set the module_version_attr keyword argument. If the\n attribute of the module to get the version should be called (e.g.,\n module.version()), then set module_version_attr_call_args to the args such\n that module.module_version_attr(*module_version_attr_call_args) returns the\n module's version.\n\n If the module version is less than min_module_version using the Python <\n comparison, None will be returned, even if the module is installed. You can\n use this to keep from importing an incompatible older version of a module.\n\n You can also specify a minimum Python version by using the\n min_python_version keyword argument. This should be comparable against\n sys.version_info.\n\n If the keyword argument warn_not_installed is set to True, the function will\n emit a UserWarning when the module is not installed.\n\n If the keyword argument warn_old_version is set to True, the function will\n emit a UserWarning when the library is installed, but cannot be imported\n because of the min_module_version or min_python_version options.\n\n Note that because of the way warnings are handled, a warning will be\n emitted for each module only once. You can change the default warning\n behavior by overriding the values of WARN_NOT_INSTALLED and WARN_OLD_VERSION\n in sympy.external.importtools. By default, WARN_NOT_INSTALLED is False and\n WARN_OLD_VERSION is True.\n\n This function uses __import__() to import the module. To pass additional\n options to __import__(), use the import_kwargs keyword argument. For\n example, to import a submodule A.B, you must pass a nonempty fromlist option\n to __import__. See the docstring of __import__().\n\n This catches ImportError to determine if the module is not installed. To\n catch additional errors, pass them as a tuple to the catch keyword\n argument.\n\n Examples\n ========\n\n >>> from sympy.external import import_module\n\n >>> numpy = import_module('numpy')\n\n >>> numpy = import_module('numpy', min_python_version=(2, 7),\n ... warn_old_version=False)\n\n >>> numpy = import_module('numpy', min_module_version='1.5',\n ... warn_old_version=False) # numpy.__version__ is a string\n\n >>> # gmpy does not have __version__, but it does have gmpy.version()\n\n >>> gmpy = import_module('gmpy', min_module_version='1.14',\n ... module_version_attr='version', module_version_attr_call_args=(),\n ... warn_old_version=False)\n\n >>> # To import a submodule, you must pass a nonempty fromlist to\n >>> # __import__(). The values do not matter.\n >>> p3 = import_module('mpl_toolkits.mplot3d',\n ... import_kwargs={'fromlist':['something']})\n\n >>> # matplotlib.pyplot can raise RuntimeError when the display cannot be opened\n >>> matplotlib = import_module('matplotlib',\n ... import_kwargs={'fromlist':['pyplot']}, catch=(RuntimeError,))\n\n "
warn_old_version = (WARN_OLD_VERSION if (WARN_OLD_VERSION is not None) else (warn_old_version or True))
warn_not_installed = (WARN_NOT_INSTALLED if (WARN_NOT_INSTALLED is not None) else (warn_not_installed or False))
import warnings
if min_python_version:
if (sys.version_info < min_python_version):
if warn_old_version:
warnings.warn(('Python version is too old to use %s (%s or newer required)' % (module, '.'.join(map(str, min_python_version)))), UserWarning, stacklevel=2)
return
if ((module == 'numpy') and ('__pypy__' in sys.builtin_module_names)):
return
try:
mod = __import__(module, **import_kwargs)
from_list = import_kwargs.get('fromlist', tuple())
for submod in from_list:
if ((submod == 'collections') and (mod.__name__ == 'matplotlib')):
__import__(((module + '.') + submod))
except ImportError:
if warn_not_installed:
warnings.warn(('%s module is not installed' % module), UserWarning, stacklevel=2)
return
except catch as e:
if warn_not_installed:
warnings.warn(('%s module could not be used (%s)' % (module, repr(e))), stacklevel=2)
return
if min_module_version:
modversion = getattr(mod, module_version_attr)
if (module_version_attr_call_args is not None):
modversion = modversion(*module_version_attr_call_args)
if (version_tuple(modversion) < version_tuple(min_module_version)):
if warn_old_version:
if isinstance(min_module_version, str):
verstr = min_module_version
elif isinstance(min_module_version, (tuple, list)):
verstr = '.'.join(map(str, min_module_version))
else:
verstr = str(min_module_version)
warnings.warn(('%s version is too old to use (%s or newer required)' % (module, verstr)), UserWarning, stacklevel=2)
return
return mod |
@remote_compatible
def test_ap_fragmentation_rts_set_high(dev, apdev):
'WPA2-PSK AP with fragmentation and RTS thresholds larger than frame length'
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['rts_threshold'] = '1000'
params['fragm_threshold'] = '2000'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].request('DISCONNECT')
hapd.disable()
hapd.set('fragm_threshold', '-1')
hapd.set('rts_threshold', '-1')
hapd.enable() | -1,689,783,130,872,543,200 | WPA2-PSK AP with fragmentation and RTS thresholds larger than frame length | tests/hwsim/test_ap_params.py | test_ap_fragmentation_rts_set_high | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_fragmentation_rts_set_high(dev, apdev):
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['rts_threshold'] = '1000'
params['fragm_threshold'] = '2000'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].request('DISCONNECT')
hapd.disable()
hapd.set('fragm_threshold', '-1')
hapd.set('rts_threshold', '-1')
hapd.enable() |
@remote_compatible
def test_ap_fragmentation_open(dev, apdev):
'Open AP with fragmentation threshold'
ssid = 'fragmentation'
params = {}
params['ssid'] = ssid
params['fragm_threshold'] = '1000'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].request('DISCONNECT')
hapd.disable()
hapd.set('fragm_threshold', '-1')
hapd.enable() | -7,394,483,077,532,986,000 | Open AP with fragmentation threshold | tests/hwsim/test_ap_params.py | test_ap_fragmentation_open | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_fragmentation_open(dev, apdev):
ssid = 'fragmentation'
params = {}
params['ssid'] = ssid
params['fragm_threshold'] = '1000'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].request('DISCONNECT')
hapd.disable()
hapd.set('fragm_threshold', '-1')
hapd.enable() |
@remote_compatible
def test_ap_fragmentation_wpa2(dev, apdev):
'WPA2-PSK AP with fragmentation threshold'
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['fragm_threshold'] = '1000'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].request('DISCONNECT')
hapd.disable()
hapd.set('fragm_threshold', '-1')
hapd.enable() | -7,620,590,295,070,773,000 | WPA2-PSK AP with fragmentation threshold | tests/hwsim/test_ap_params.py | test_ap_fragmentation_wpa2 | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_fragmentation_wpa2(dev, apdev):
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['fragm_threshold'] = '1000'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].request('DISCONNECT')
hapd.disable()
hapd.set('fragm_threshold', '-1')
hapd.enable() |
def test_ap_vendor_elements(dev, apdev):
'WPA2-PSK AP with vendor elements added'
bssid = apdev[0]['bssid']
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['vendor_elements'] = 'dd0411223301'
params['assocresp_elements'] = 'dd0411223302'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
bss = dev[0].get_bss(bssid)
if ('dd0411223301' not in bss['ie']):
raise Exception('Vendor element not shown in scan results')
hapd.set('vendor_elements', 'dd051122330203dd0400137400dd04001374ff')
if ('OK' not in hapd.request('UPDATE_BEACON')):
raise Exception('UPDATE_BEACON failed')
dev[1].scan_for_bss(apdev[0]['bssid'], freq='2412')
bss = dev[1].get_bss(bssid)
if ('dd0411223301' in bss['ie']):
raise Exception('Old vendor element still in scan results')
if ('dd051122330203' not in bss['ie']):
raise Exception('New vendor element not shown in scan results') | 7,576,574,619,357,614,000 | WPA2-PSK AP with vendor elements added | tests/hwsim/test_ap_params.py | test_ap_vendor_elements | AreedAhmed/fragattacks | python | def test_ap_vendor_elements(dev, apdev):
bssid = apdev[0]['bssid']
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['vendor_elements'] = 'dd0411223301'
params['assocresp_elements'] = 'dd0411223302'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
bss = dev[0].get_bss(bssid)
if ('dd0411223301' not in bss['ie']):
raise Exception('Vendor element not shown in scan results')
hapd.set('vendor_elements', 'dd051122330203dd0400137400dd04001374ff')
if ('OK' not in hapd.request('UPDATE_BEACON')):
raise Exception('UPDATE_BEACON failed')
dev[1].scan_for_bss(apdev[0]['bssid'], freq='2412')
bss = dev[1].get_bss(bssid)
if ('dd0411223301' in bss['ie']):
raise Exception('Old vendor element still in scan results')
if ('dd051122330203' not in bss['ie']):
raise Exception('New vendor element not shown in scan results') |
def test_ap_element_parse(dev, apdev):
'Information element parsing - extra coverage'
bssid = apdev[0]['bssid']
ssid = 'test-wpa2-psk'
params = {'ssid': ssid, 'vendor_elements': '380501020304059e009e009e009e009e009e00'}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412')
bss = dev[0].get_bss(bssid)
if ('38050102030405' not in bss['ie']):
raise Exception('Timeout element not shown in scan results') | 8,378,705,670,552,019,000 | Information element parsing - extra coverage | tests/hwsim/test_ap_params.py | test_ap_element_parse | AreedAhmed/fragattacks | python | def test_ap_element_parse(dev, apdev):
bssid = apdev[0]['bssid']
ssid = 'test-wpa2-psk'
params = {'ssid': ssid, 'vendor_elements': '380501020304059e009e009e009e009e009e00'}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412')
bss = dev[0].get_bss(bssid)
if ('38050102030405' not in bss['ie']):
raise Exception('Timeout element not shown in scan results') |
@remote_compatible
def test_ap_element_parse_oom(dev, apdev):
'Information element parsing OOM'
bssid = apdev[0]['bssid']
ssid = 'test-wpa2-psk'
params = {'ssid': ssid, 'vendor_elements': 'dd0d506f9a0a00000600411c440028'}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412')
with alloc_fail(dev[0], 1, 'wpabuf_alloc;ieee802_11_vendor_ie_concat'):
bss = dev[0].get_bss(bssid)
logger.info(str(bss)) | 5,307,229,635,925,322,000 | Information element parsing OOM | tests/hwsim/test_ap_params.py | test_ap_element_parse_oom | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_element_parse_oom(dev, apdev):
bssid = apdev[0]['bssid']
ssid = 'test-wpa2-psk'
params = {'ssid': ssid, 'vendor_elements': 'dd0d506f9a0a00000600411c440028'}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412')
with alloc_fail(dev[0], 1, 'wpabuf_alloc;ieee802_11_vendor_ie_concat'):
bss = dev[0].get_bss(bssid)
logger.info(str(bss)) |
def test_ap_country(dev, apdev):
'WPA2-PSK AP setting country code and using 5 GHz band'
try:
hapd = None
bssid = apdev[0]['bssid']
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['country_code'] = 'FI'
params['ieee80211d'] = '1'
params['hw_mode'] = 'a'
params['channel'] = '36'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='5180')
hwsim_utils.test_connectivity(dev[0], hapd)
finally:
if hapd:
hapd.request('DISABLE')
dev[0].disconnect_and_stop_scan()
hostapd.cmd_execute(apdev[0], ['iw', 'reg', 'set', '00'])
dev[0].wait_event(['CTRL-EVENT-REGDOM-CHANGE'], timeout=0.5)
dev[0].flush_scan_cache() | -7,713,962,687,290,602,000 | WPA2-PSK AP setting country code and using 5 GHz band | tests/hwsim/test_ap_params.py | test_ap_country | AreedAhmed/fragattacks | python | def test_ap_country(dev, apdev):
try:
hapd = None
bssid = apdev[0]['bssid']
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['country_code'] = 'FI'
params['ieee80211d'] = '1'
params['hw_mode'] = 'a'
params['channel'] = '36'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='5180')
hwsim_utils.test_connectivity(dev[0], hapd)
finally:
if hapd:
hapd.request('DISABLE')
dev[0].disconnect_and_stop_scan()
hostapd.cmd_execute(apdev[0], ['iw', 'reg', 'set', '00'])
dev[0].wait_event(['CTRL-EVENT-REGDOM-CHANGE'], timeout=0.5)
dev[0].flush_scan_cache() |
def test_ap_acl_accept(dev, apdev):
'MAC ACL accept list'
ssid = 'acl'
params = {}
filename = hostapd.acl_file(dev, apdev, 'hostapd.macaddr')
hostapd.send_file(apdev[0], filename, filename)
params['ssid'] = ssid
params['accept_mac_file'] = filename
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412')
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
dev[1].scan_for_bss(apdev[0]['bssid'], freq='2412')
dev[1].connect(ssid, key_mgmt='NONE', scan_freq='2412')
dev[0].request('REMOVE_NETWORK all')
dev[1].request('REMOVE_NETWORK all')
hapd.request('SET macaddr_acl 1')
dev[1].dump_monitor()
dev[1].connect(ssid, key_mgmt='NONE', scan_freq='2412', wait_connect=False)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
ev = dev[1].wait_event(['CTRL-EVENT-CONNECTED'], timeout=1)
if (ev is not None):
raise Exception('Unexpected association')
if filename.startswith('/tmp/'):
os.unlink(filename) | 8,061,923,139,287,026,000 | MAC ACL accept list | tests/hwsim/test_ap_params.py | test_ap_acl_accept | AreedAhmed/fragattacks | python | def test_ap_acl_accept(dev, apdev):
ssid = 'acl'
params = {}
filename = hostapd.acl_file(dev, apdev, 'hostapd.macaddr')
hostapd.send_file(apdev[0], filename, filename)
params['ssid'] = ssid
params['accept_mac_file'] = filename
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412')
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
dev[1].scan_for_bss(apdev[0]['bssid'], freq='2412')
dev[1].connect(ssid, key_mgmt='NONE', scan_freq='2412')
dev[0].request('REMOVE_NETWORK all')
dev[1].request('REMOVE_NETWORK all')
hapd.request('SET macaddr_acl 1')
dev[1].dump_monitor()
dev[1].connect(ssid, key_mgmt='NONE', scan_freq='2412', wait_connect=False)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
ev = dev[1].wait_event(['CTRL-EVENT-CONNECTED'], timeout=1)
if (ev is not None):
raise Exception('Unexpected association')
if filename.startswith('/tmp/'):
os.unlink(filename) |
def test_ap_acl_deny(dev, apdev):
'MAC ACL deny list'
ssid = 'acl'
params = {}
filename = hostapd.acl_file(dev, apdev, 'hostapd.macaddr')
hostapd.send_file(apdev[0], filename, filename)
params['ssid'] = ssid
params['deny_mac_file'] = filename
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412', passive=True)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412', wait_connect=False)
dev[1].scan_for_bss(apdev[0]['bssid'], freq='2412')
dev[1].connect(ssid, key_mgmt='NONE', scan_freq='2412')
ev = dev[0].wait_event(['CTRL-EVENT-CONNECTED'], timeout=1)
if (ev is not None):
raise Exception('Unexpected association')
if filename.startswith('/tmp/'):
os.unlink(filename) | -3,875,011,034,569,438,000 | MAC ACL deny list | tests/hwsim/test_ap_params.py | test_ap_acl_deny | AreedAhmed/fragattacks | python | def test_ap_acl_deny(dev, apdev):
ssid = 'acl'
params = {}
filename = hostapd.acl_file(dev, apdev, 'hostapd.macaddr')
hostapd.send_file(apdev[0], filename, filename)
params['ssid'] = ssid
params['deny_mac_file'] = filename
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412', passive=True)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412', wait_connect=False)
dev[1].scan_for_bss(apdev[0]['bssid'], freq='2412')
dev[1].connect(ssid, key_mgmt='NONE', scan_freq='2412')
ev = dev[0].wait_event(['CTRL-EVENT-CONNECTED'], timeout=1)
if (ev is not None):
raise Exception('Unexpected association')
if filename.startswith('/tmp/'):
os.unlink(filename) |
def test_ap_acl_mgmt(dev, apdev):
'MAC ACL accept/deny management'
ssid = 'acl'
params = {}
filename = hostapd.acl_file(dev, apdev, 'hostapd.macaddr')
hostapd.send_file(apdev[0], filename, filename)
params['ssid'] = ssid
params['deny_mac_file'] = filename
hapd = hostapd.add_ap(apdev[0], params)
accept = hapd.request('ACCEPT_ACL SHOW').splitlines()
logger.info(('accept: ' + str(accept)))
deny = hapd.request('DENY_ACL SHOW').splitlines()
logger.info(('deny: ' + str(deny)))
if (len(accept) != 0):
raise Exception('Unexpected number of accept entries')
if (len(deny) != 3):
raise Exception('Unexpected number of deny entries')
if ('01:01:01:01:01:01 VLAN_ID=0' not in deny):
raise Exception('Missing deny entry')
hapd.request('ACCEPT_ACL ADD_MAC 22:33:44:55:66:77')
hapd.request('DENY_ACL ADD_MAC 22:33:44:55:66:88 VLAN_ID=2')
accept = hapd.request('ACCEPT_ACL SHOW').splitlines()
logger.info(('accept: ' + str(accept)))
deny = hapd.request('DENY_ACL SHOW').splitlines()
logger.info(('deny: ' + str(deny)))
if (len(accept) != 1):
raise Exception('Unexpected number of accept entries (2)')
if (len(deny) != 4):
raise Exception('Unexpected number of deny entries (2)')
if ('01:01:01:01:01:01 VLAN_ID=0' not in deny):
raise Exception('Missing deny entry (2)')
if ('22:33:44:55:66:88 VLAN_ID=2' not in deny):
raise Exception('Missing deny entry (2)')
if ('22:33:44:55:66:77 VLAN_ID=0' not in accept):
raise Exception('Missing accept entry (2)')
hapd.request('ACCEPT_ACL DEL_MAC 22:33:44:55:66:77')
hapd.request('DENY_ACL DEL_MAC 22:33:44:55:66:88')
accept = hapd.request('ACCEPT_ACL SHOW').splitlines()
logger.info(('accept: ' + str(accept)))
deny = hapd.request('DENY_ACL SHOW').splitlines()
logger.info(('deny: ' + str(deny)))
if (len(accept) != 0):
raise Exception('Unexpected number of accept entries (3)')
if (len(deny) != 3):
raise Exception('Unexpected number of deny entries (3)')
if ('01:01:01:01:01:01 VLAN_ID=0' not in deny):
raise Exception('Missing deny entry (3)')
hapd.request('ACCEPT_ACL CLEAR')
hapd.request('DENY_ACL CLEAR')
accept = hapd.request('ACCEPT_ACL SHOW').splitlines()
logger.info(('accept: ' + str(accept)))
deny = hapd.request('DENY_ACL SHOW').splitlines()
logger.info(('deny: ' + str(deny)))
if (len(accept) != 0):
raise Exception('Unexpected number of accept entries (4)')
if (len(deny) != 0):
raise Exception('Unexpected number of deny entries (4)')
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412')
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
dev[0].dump_monitor()
hapd.request(('DENY_ACL ADD_MAC ' + dev[0].own_addr()))
dev[0].wait_disconnected()
dev[0].request('DISCONNECT')
if filename.startswith('/tmp/'):
os.unlink(filename) | 6,064,152,405,938,550,000 | MAC ACL accept/deny management | tests/hwsim/test_ap_params.py | test_ap_acl_mgmt | AreedAhmed/fragattacks | python | def test_ap_acl_mgmt(dev, apdev):
ssid = 'acl'
params = {}
filename = hostapd.acl_file(dev, apdev, 'hostapd.macaddr')
hostapd.send_file(apdev[0], filename, filename)
params['ssid'] = ssid
params['deny_mac_file'] = filename
hapd = hostapd.add_ap(apdev[0], params)
accept = hapd.request('ACCEPT_ACL SHOW').splitlines()
logger.info(('accept: ' + str(accept)))
deny = hapd.request('DENY_ACL SHOW').splitlines()
logger.info(('deny: ' + str(deny)))
if (len(accept) != 0):
raise Exception('Unexpected number of accept entries')
if (len(deny) != 3):
raise Exception('Unexpected number of deny entries')
if ('01:01:01:01:01:01 VLAN_ID=0' not in deny):
raise Exception('Missing deny entry')
hapd.request('ACCEPT_ACL ADD_MAC 22:33:44:55:66:77')
hapd.request('DENY_ACL ADD_MAC 22:33:44:55:66:88 VLAN_ID=2')
accept = hapd.request('ACCEPT_ACL SHOW').splitlines()
logger.info(('accept: ' + str(accept)))
deny = hapd.request('DENY_ACL SHOW').splitlines()
logger.info(('deny: ' + str(deny)))
if (len(accept) != 1):
raise Exception('Unexpected number of accept entries (2)')
if (len(deny) != 4):
raise Exception('Unexpected number of deny entries (2)')
if ('01:01:01:01:01:01 VLAN_ID=0' not in deny):
raise Exception('Missing deny entry (2)')
if ('22:33:44:55:66:88 VLAN_ID=2' not in deny):
raise Exception('Missing deny entry (2)')
if ('22:33:44:55:66:77 VLAN_ID=0' not in accept):
raise Exception('Missing accept entry (2)')
hapd.request('ACCEPT_ACL DEL_MAC 22:33:44:55:66:77')
hapd.request('DENY_ACL DEL_MAC 22:33:44:55:66:88')
accept = hapd.request('ACCEPT_ACL SHOW').splitlines()
logger.info(('accept: ' + str(accept)))
deny = hapd.request('DENY_ACL SHOW').splitlines()
logger.info(('deny: ' + str(deny)))
if (len(accept) != 0):
raise Exception('Unexpected number of accept entries (3)')
if (len(deny) != 3):
raise Exception('Unexpected number of deny entries (3)')
if ('01:01:01:01:01:01 VLAN_ID=0' not in deny):
raise Exception('Missing deny entry (3)')
hapd.request('ACCEPT_ACL CLEAR')
hapd.request('DENY_ACL CLEAR')
accept = hapd.request('ACCEPT_ACL SHOW').splitlines()
logger.info(('accept: ' + str(accept)))
deny = hapd.request('DENY_ACL SHOW').splitlines()
logger.info(('deny: ' + str(deny)))
if (len(accept) != 0):
raise Exception('Unexpected number of accept entries (4)')
if (len(deny) != 0):
raise Exception('Unexpected number of deny entries (4)')
dev[0].scan_for_bss(apdev[0]['bssid'], freq='2412')
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
dev[0].dump_monitor()
hapd.request(('DENY_ACL ADD_MAC ' + dev[0].own_addr()))
dev[0].wait_disconnected()
dev[0].request('DISCONNECT')
if filename.startswith('/tmp/'):
os.unlink(filename) |
@remote_compatible
def test_ap_wds_sta(dev, apdev):
'WPA2-PSK AP with STA using 4addr mode'
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['wds_sta'] = '1'
params['wds_bridge'] = 'wds-br0'
hapd = hostapd.add_ap(apdev[0], params)
try:
dev[0].cmd_execute(['brctl', 'addbr', 'wds-br0'])
dev[0].cmd_execute(['brctl', 'setfd', 'wds-br0', '0'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'up'])
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'on'])
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
ev = hapd.wait_event(['WDS-STA-INTERFACE-ADDED'], timeout=10)
if (ev is None):
raise Exception('No WDS-STA-INTERFACE-ADDED event seen')
if (('sta_addr=' + dev[0].own_addr()) not in ev):
raise Exception(('No sta_addr match in ' + ev))
if ((('ifname=' + hapd.ifname) + '.sta') not in ev):
raise Exception(('No ifname match in ' + ev))
sta = hapd.get_sta(dev[0].own_addr())
if ('wds_sta_ifname' not in sta):
raise Exception('Missing wds_sta_ifname in STA data')
if (('ifname=' + sta['wds_sta_ifname']) not in ev):
raise Exception(('wds_sta_ifname %s not in event: %s' % (sta['wds_sta_ifname'], ev)))
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('SET reassoc_same_bss_optim 1')
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=5, timeout=1)
finally:
dev[0].request('SET reassoc_same_bss_optim 0')
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'off'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'down'])
dev[0].cmd_execute(['brctl', 'delbr', 'wds-br0']) | -2,695,078,478,137,275,000 | WPA2-PSK AP with STA using 4addr mode | tests/hwsim/test_ap_params.py | test_ap_wds_sta | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_wds_sta(dev, apdev):
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['wds_sta'] = '1'
params['wds_bridge'] = 'wds-br0'
hapd = hostapd.add_ap(apdev[0], params)
try:
dev[0].cmd_execute(['brctl', 'addbr', 'wds-br0'])
dev[0].cmd_execute(['brctl', 'setfd', 'wds-br0', '0'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'up'])
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'on'])
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
ev = hapd.wait_event(['WDS-STA-INTERFACE-ADDED'], timeout=10)
if (ev is None):
raise Exception('No WDS-STA-INTERFACE-ADDED event seen')
if (('sta_addr=' + dev[0].own_addr()) not in ev):
raise Exception(('No sta_addr match in ' + ev))
if ((('ifname=' + hapd.ifname) + '.sta') not in ev):
raise Exception(('No ifname match in ' + ev))
sta = hapd.get_sta(dev[0].own_addr())
if ('wds_sta_ifname' not in sta):
raise Exception('Missing wds_sta_ifname in STA data')
if (('ifname=' + sta['wds_sta_ifname']) not in ev):
raise Exception(('wds_sta_ifname %s not in event: %s' % (sta['wds_sta_ifname'], ev)))
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('SET reassoc_same_bss_optim 1')
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=5, timeout=1)
finally:
dev[0].request('SET reassoc_same_bss_optim 0')
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'off'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'down'])
dev[0].cmd_execute(['brctl', 'delbr', 'wds-br0']) |
def test_ap_wds_sta_eap(dev, apdev):
'WPA2-EAP AP with STA using 4addr mode'
ssid = 'test-wpa2-eap'
params = hostapd.wpa2_eap_params(ssid=ssid)
params['wds_sta'] = '1'
params['wds_bridge'] = 'wds-br0'
hapd = hostapd.add_ap(apdev[0], params)
try:
dev[0].cmd_execute(['brctl', 'addbr', 'wds-br0'])
dev[0].cmd_execute(['brctl', 'setfd', 'wds-br0', '0'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'up'])
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'on'])
dev[0].connect(ssid, key_mgmt='WPA-EAP', eap='GPSK', identity='gpsk user', password='EXAMPLE_KEY', scan_freq='2412')
ev = hapd.wait_event(['WDS-STA-INTERFACE-ADDED'], timeout=10)
if (ev is None):
raise Exception('No WDS-STA-INTERFACE-ADDED event seen')
if (('sta_addr=' + dev[0].own_addr()) not in ev):
raise Exception(('No sta_addr match in ' + ev))
if ((('ifname=' + hapd.ifname) + '.sta') not in ev):
raise Exception(('No ifname match in ' + ev))
sta = hapd.get_sta(dev[0].own_addr())
if ('wds_sta_ifname' not in sta):
raise Exception('Missing wds_sta_ifname in STA data')
if (('ifname=' + sta['wds_sta_ifname']) not in ev):
raise Exception(('wds_sta_ifname %s not in event: %s' % (sta['wds_sta_ifname'], ev)))
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
finally:
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'off'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'down'])
dev[0].cmd_execute(['brctl', 'delbr', 'wds-br0']) | -2,112,182,697,049,031,700 | WPA2-EAP AP with STA using 4addr mode | tests/hwsim/test_ap_params.py | test_ap_wds_sta_eap | AreedAhmed/fragattacks | python | def test_ap_wds_sta_eap(dev, apdev):
ssid = 'test-wpa2-eap'
params = hostapd.wpa2_eap_params(ssid=ssid)
params['wds_sta'] = '1'
params['wds_bridge'] = 'wds-br0'
hapd = hostapd.add_ap(apdev[0], params)
try:
dev[0].cmd_execute(['brctl', 'addbr', 'wds-br0'])
dev[0].cmd_execute(['brctl', 'setfd', 'wds-br0', '0'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'up'])
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'on'])
dev[0].connect(ssid, key_mgmt='WPA-EAP', eap='GPSK', identity='gpsk user', password='EXAMPLE_KEY', scan_freq='2412')
ev = hapd.wait_event(['WDS-STA-INTERFACE-ADDED'], timeout=10)
if (ev is None):
raise Exception('No WDS-STA-INTERFACE-ADDED event seen')
if (('sta_addr=' + dev[0].own_addr()) not in ev):
raise Exception(('No sta_addr match in ' + ev))
if ((('ifname=' + hapd.ifname) + '.sta') not in ev):
raise Exception(('No ifname match in ' + ev))
sta = hapd.get_sta(dev[0].own_addr())
if ('wds_sta_ifname' not in sta):
raise Exception('Missing wds_sta_ifname in STA data')
if (('ifname=' + sta['wds_sta_ifname']) not in ev):
raise Exception(('wds_sta_ifname %s not in event: %s' % (sta['wds_sta_ifname'], ev)))
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
finally:
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'off'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'down'])
dev[0].cmd_execute(['brctl', 'delbr', 'wds-br0']) |
def test_ap_wds_sta_open(dev, apdev):
'Open AP with STA using 4addr mode'
ssid = 'test-wds-open'
params = {}
params['ssid'] = ssid
params['wds_sta'] = '1'
params['wds_bridge'] = 'wds-br0'
hapd = hostapd.add_ap(apdev[0], params)
try:
dev[0].cmd_execute(['brctl', 'addbr', 'wds-br0'])
dev[0].cmd_execute(['brctl', 'setfd', 'wds-br0', '0'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'up'])
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'on'])
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('SET reassoc_same_bss_optim 1')
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=5, timeout=1)
finally:
dev[0].request('SET reassoc_same_bss_optim 0')
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'off'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'down'])
dev[0].cmd_execute(['brctl', 'delbr', 'wds-br0']) | -5,671,805,850,814,587,000 | Open AP with STA using 4addr mode | tests/hwsim/test_ap_params.py | test_ap_wds_sta_open | AreedAhmed/fragattacks | python | def test_ap_wds_sta_open(dev, apdev):
ssid = 'test-wds-open'
params = {}
params['ssid'] = ssid
params['wds_sta'] = '1'
params['wds_bridge'] = 'wds-br0'
hapd = hostapd.add_ap(apdev[0], params)
try:
dev[0].cmd_execute(['brctl', 'addbr', 'wds-br0'])
dev[0].cmd_execute(['brctl', 'setfd', 'wds-br0', '0'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'up'])
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'on'])
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412')
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('SET reassoc_same_bss_optim 1')
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=5, timeout=1)
finally:
dev[0].request('SET reassoc_same_bss_optim 0')
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'off'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'down'])
dev[0].cmd_execute(['brctl', 'delbr', 'wds-br0']) |
def test_ap_wds_sta_wep(dev, apdev):
'WEP AP with STA using 4addr mode'
check_wep_capa(dev[0])
ssid = 'test-wds-wep'
params = {}
params['ssid'] = ssid
params['ieee80211n'] = '0'
params['wep_key0'] = '"hello"'
params['wds_sta'] = '1'
params['wds_bridge'] = 'wds-br0'
hapd = hostapd.add_ap(apdev[0], params)
try:
dev[0].cmd_execute(['brctl', 'addbr', 'wds-br0'])
dev[0].cmd_execute(['brctl', 'setfd', 'wds-br0', '0'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'up'])
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'on'])
dev[0].connect(ssid, key_mgmt='NONE', wep_key0='"hello"', scan_freq='2412')
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('SET reassoc_same_bss_optim 1')
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=5, timeout=1)
finally:
dev[0].request('SET reassoc_same_bss_optim 0')
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'off'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'down'])
dev[0].cmd_execute(['brctl', 'delbr', 'wds-br0']) | 894,608,105,446,954,100 | WEP AP with STA using 4addr mode | tests/hwsim/test_ap_params.py | test_ap_wds_sta_wep | AreedAhmed/fragattacks | python | def test_ap_wds_sta_wep(dev, apdev):
check_wep_capa(dev[0])
ssid = 'test-wds-wep'
params = {}
params['ssid'] = ssid
params['ieee80211n'] = '0'
params['wep_key0'] = '"hello"'
params['wds_sta'] = '1'
params['wds_bridge'] = 'wds-br0'
hapd = hostapd.add_ap(apdev[0], params)
try:
dev[0].cmd_execute(['brctl', 'addbr', 'wds-br0'])
dev[0].cmd_execute(['brctl', 'setfd', 'wds-br0', '0'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'up'])
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'on'])
dev[0].connect(ssid, key_mgmt='NONE', wep_key0='"hello"', scan_freq='2412')
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=15)
dev[0].request('SET reassoc_same_bss_optim 1')
dev[0].request('REATTACH')
dev[0].wait_connected()
hwsim_utils.test_connectivity_iface(dev[0], hapd, 'wds-br0', max_tries=5, timeout=1)
finally:
dev[0].request('SET reassoc_same_bss_optim 0')
dev[0].cmd_execute(['iw', dev[0].ifname, 'set', '4addr', 'off'])
dev[0].cmd_execute(['ip', 'link', 'set', 'dev', 'wds-br0', 'down'])
dev[0].cmd_execute(['brctl', 'delbr', 'wds-br0']) |
@remote_compatible
def test_ap_inactivity_poll(dev, apdev):
'AP using inactivity poll'
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['ap_max_inactivity'] = '1'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
hapd.set('ext_mgmt_frame_handling', '1')
dev[0].request('DISCONNECT')
ev = hapd.wait_event(['MGMT-RX'], timeout=5)
if (ev is None):
raise Exception('MGMT RX wait timed out for Deauth')
hapd.set('ext_mgmt_frame_handling', '0')
ev = hapd.wait_event(['AP-STA-DISCONNECTED'], timeout=30)
if (ev is None):
raise Exception('STA disconnection on inactivity was not reported') | -1,038,927,223,853,263,000 | AP using inactivity poll | tests/hwsim/test_ap_params.py | test_ap_inactivity_poll | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_inactivity_poll(dev, apdev):
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['ap_max_inactivity'] = '1'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
hapd.set('ext_mgmt_frame_handling', '1')
dev[0].request('DISCONNECT')
ev = hapd.wait_event(['MGMT-RX'], timeout=5)
if (ev is None):
raise Exception('MGMT RX wait timed out for Deauth')
hapd.set('ext_mgmt_frame_handling', '0')
ev = hapd.wait_event(['AP-STA-DISCONNECTED'], timeout=30)
if (ev is None):
raise Exception('STA disconnection on inactivity was not reported') |
@remote_compatible
def test_ap_inactivity_disconnect(dev, apdev):
'AP using inactivity disconnect'
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['ap_max_inactivity'] = '1'
params['skip_inactivity_poll'] = '1'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
hapd.set('ext_mgmt_frame_handling', '1')
dev[0].request('DISCONNECT')
ev = hapd.wait_event(['MGMT-RX'], timeout=5)
if (ev is None):
raise Exception('MGMT RX wait timed out for Deauth')
hapd.set('ext_mgmt_frame_handling', '0')
ev = hapd.wait_event(['AP-STA-DISCONNECTED'], timeout=30)
if (ev is None):
raise Exception('STA disconnection on inactivity was not reported') | -8,289,084,894,035,620,000 | AP using inactivity disconnect | tests/hwsim/test_ap_params.py | test_ap_inactivity_disconnect | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_inactivity_disconnect(dev, apdev):
ssid = 'test-wpa2-psk'
passphrase = 'qwertyuiop'
params = hostapd.wpa2_params(ssid=ssid, passphrase=passphrase)
params['ap_max_inactivity'] = '1'
params['skip_inactivity_poll'] = '1'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk=passphrase, scan_freq='2412')
hapd.set('ext_mgmt_frame_handling', '1')
dev[0].request('DISCONNECT')
ev = hapd.wait_event(['MGMT-RX'], timeout=5)
if (ev is None):
raise Exception('MGMT RX wait timed out for Deauth')
hapd.set('ext_mgmt_frame_handling', '0')
ev = hapd.wait_event(['AP-STA-DISCONNECTED'], timeout=30)
if (ev is None):
raise Exception('STA disconnection on inactivity was not reported') |
@remote_compatible
def test_ap_basic_rates(dev, apdev):
'Open AP with lots of basic rates'
ssid = 'basic rates'
params = {}
params['ssid'] = ssid
params['basic_rates'] = '10 20 55 110 60 90 120 180 240 360 480 540'
hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412') | 5,663,216,637,988,832,000 | Open AP with lots of basic rates | tests/hwsim/test_ap_params.py | test_ap_basic_rates | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_basic_rates(dev, apdev):
ssid = 'basic rates'
params = {}
params['ssid'] = ssid
params['basic_rates'] = '10 20 55 110 60 90 120 180 240 360 480 540'
hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412') |
@remote_compatible
def test_ap_short_preamble(dev, apdev):
'Open AP with short preamble'
ssid = 'short preamble'
params = {}
params['ssid'] = ssid
params['preamble'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412') | -2,499,632,660,013,664,000 | Open AP with short preamble | tests/hwsim/test_ap_params.py | test_ap_short_preamble | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_short_preamble(dev, apdev):
ssid = 'short preamble'
params = {}
params['ssid'] = ssid
params['preamble'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412') |
def test_ap_spectrum_management_required(dev, apdev):
'Open AP with spectrum management required'
ssid = 'spectrum mgmt'
params = {}
params['ssid'] = ssid
params['country_code'] = 'JP'
params['hw_mode'] = 'a'
params['channel'] = '36'
params['ieee80211d'] = '1'
params['local_pwr_constraint'] = '3'
params['spectrum_mgmt_required'] = '1'
try:
hapd = None
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='5180')
dev[0].wait_regdom(country_ie=True)
finally:
if hapd:
hapd.request('DISABLE')
dev[0].disconnect_and_stop_scan()
hostapd.cmd_execute(apdev[0], ['iw', 'reg', 'set', '00'])
dev[0].wait_event(['CTRL-EVENT-REGDOM-CHANGE'], timeout=0.5)
dev[0].flush_scan_cache() | -3,732,414,986,622,310,400 | Open AP with spectrum management required | tests/hwsim/test_ap_params.py | test_ap_spectrum_management_required | AreedAhmed/fragattacks | python | def test_ap_spectrum_management_required(dev, apdev):
ssid = 'spectrum mgmt'
params = {}
params['ssid'] = ssid
params['country_code'] = 'JP'
params['hw_mode'] = 'a'
params['channel'] = '36'
params['ieee80211d'] = '1'
params['local_pwr_constraint'] = '3'
params['spectrum_mgmt_required'] = '1'
try:
hapd = None
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='5180')
dev[0].wait_regdom(country_ie=True)
finally:
if hapd:
hapd.request('DISABLE')
dev[0].disconnect_and_stop_scan()
hostapd.cmd_execute(apdev[0], ['iw', 'reg', 'set', '00'])
dev[0].wait_event(['CTRL-EVENT-REGDOM-CHANGE'], timeout=0.5)
dev[0].flush_scan_cache() |
@remote_compatible
def test_ap_max_listen_interval(dev, apdev):
'Open AP with maximum listen interval limit'
ssid = 'listen'
params = {}
params['ssid'] = ssid
params['max_listen_interval'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412', wait_connect=False)
ev = dev[0].wait_event(['CTRL-EVENT-ASSOC-REJECT'])
if (ev is None):
raise Exception('Association rejection not reported')
if ('status_code=51' not in ev):
raise Exception('Unexpected ASSOC-REJECT reason') | -3,338,879,630,809,117,700 | Open AP with maximum listen interval limit | tests/hwsim/test_ap_params.py | test_ap_max_listen_interval | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_max_listen_interval(dev, apdev):
ssid = 'listen'
params = {}
params['ssid'] = ssid
params['max_listen_interval'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412', wait_connect=False)
ev = dev[0].wait_event(['CTRL-EVENT-ASSOC-REJECT'])
if (ev is None):
raise Exception('Association rejection not reported')
if ('status_code=51' not in ev):
raise Exception('Unexpected ASSOC-REJECT reason') |
@remote_compatible
def test_ap_max_num_sta(dev, apdev):
'Open AP with maximum STA count'
ssid = 'max'
params = {}
params['ssid'] = ssid
params['max_num_sta'] = '1'
hostapd.add_ap(apdev[0], params)
dev[1].connect(ssid, key_mgmt='NONE', scan_freq='2412')
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412', wait_connect=False)
ev = dev[0].wait_event(['CTRL-EVENT-CONNECTED'], timeout=1)
if (ev is not None):
raise Exception('Unexpected association') | 7,185,428,380,504,295,000 | Open AP with maximum STA count | tests/hwsim/test_ap_params.py | test_ap_max_num_sta | AreedAhmed/fragattacks | python | @remote_compatible
def test_ap_max_num_sta(dev, apdev):
ssid = 'max'
params = {}
params['ssid'] = ssid
params['max_num_sta'] = '1'
hostapd.add_ap(apdev[0], params)
dev[1].connect(ssid, key_mgmt='NONE', scan_freq='2412')
dev[0].connect(ssid, key_mgmt='NONE', scan_freq='2412', wait_connect=False)
ev = dev[0].wait_event(['CTRL-EVENT-CONNECTED'], timeout=1)
if (ev is not None):
raise Exception('Unexpected association') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.