code
stringlengths 75
104k
| docstring
stringlengths 1
46.9k
|
---|---|
def which_users_can(self, name):
"""Which role can SendMail? """
_roles = self.which_roles_can(name)
result = [self.get_role_members(i.get('role')) for i in _roles]
return result | Which role can SendMail? |
def __verify_minion_publish(self, clear_load):
'''
Verify that the passed information authorized a minion to execute
:param dict clear_load: A publication load from a minion
:rtype: bool
:return: A boolean indicating if the minion is allowed to publish the command in the load
'''
# Verify that the load is valid
if 'peer' not in self.opts:
return False
if not isinstance(self.opts['peer'], dict):
return False
if any(key not in clear_load for key in ('fun', 'arg', 'tgt', 'ret', 'tok', 'id')):
return False
# If the command will make a recursive publish don't run
if clear_load['fun'].startswith('publish.'):
return False
# Check the permissions for this minion
if not self.__verify_minion(clear_load['id'], clear_load['tok']):
# The minion is not who it says it is!
# We don't want to listen to it!
log.warning(
'Minion id %s is not who it says it is and is attempting '
'to issue a peer command', clear_load['id']
)
return False
clear_load.pop('tok')
perms = []
for match in self.opts['peer']:
if re.match(match, clear_load['id']):
# This is the list of funcs/modules!
if isinstance(self.opts['peer'][match], list):
perms.extend(self.opts['peer'][match])
if ',' in clear_load['fun']:
# 'arg': [['cat', '/proc/cpuinfo'], [], ['foo']]
clear_load['fun'] = clear_load['fun'].split(',')
arg_ = []
for arg in clear_load['arg']:
arg_.append(arg.split())
clear_load['arg'] = arg_
# finally, check the auth of the load
return self.ckminions.auth_check(
perms,
clear_load['fun'],
clear_load['arg'],
clear_load['tgt'],
clear_load.get('tgt_type', 'glob'),
publish_validate=True) | Verify that the passed information authorized a minion to execute
:param dict clear_load: A publication load from a minion
:rtype: bool
:return: A boolean indicating if the minion is allowed to publish the command in the load |
def boolify(value, nullable=False, return_string=False):
"""Convert a number, string, or sequence type into a pure boolean.
Args:
value (number, string, sequence): pretty much anything
Returns:
bool: boolean representation of the given value
Examples:
>>> [boolify(x) for x in ('yes', 'no')]
[True, False]
>>> [boolify(x) for x in (0.1, 0+0j, True, '0', '0.0', '0.1', '2')]
[True, False, True, False, False, True, True]
>>> [boolify(x) for x in ("true", "yes", "on", "y")]
[True, True, True, True]
>>> [boolify(x) for x in ("no", "non", "none", "off", "")]
[False, False, False, False, False]
>>> [boolify(x) for x in ([], set(), dict(), tuple())]
[False, False, False, False]
>>> [boolify(x) for x in ([1], set([False]), dict({'a': 1}), tuple([2]))]
[True, True, True, True]
"""
# cast number types naturally
if isinstance(value, BOOL_COERCEABLE_TYPES):
return bool(value)
# try to coerce string into number
val = text_type(value).strip().lower().replace('.', '', 1)
if val.isnumeric():
return bool(float(val))
elif val in BOOLISH_TRUE:
return True
elif nullable and val in NULL_STRINGS:
return None
elif val in BOOLISH_FALSE:
return False
else: # must be False
try:
return bool(complex(val))
except ValueError:
if isinstance(value, string_types) and return_string:
return value
raise TypeCoercionError(value, "The value %r cannot be boolified." % value) | Convert a number, string, or sequence type into a pure boolean.
Args:
value (number, string, sequence): pretty much anything
Returns:
bool: boolean representation of the given value
Examples:
>>> [boolify(x) for x in ('yes', 'no')]
[True, False]
>>> [boolify(x) for x in (0.1, 0+0j, True, '0', '0.0', '0.1', '2')]
[True, False, True, False, False, True, True]
>>> [boolify(x) for x in ("true", "yes", "on", "y")]
[True, True, True, True]
>>> [boolify(x) for x in ("no", "non", "none", "off", "")]
[False, False, False, False, False]
>>> [boolify(x) for x in ([], set(), dict(), tuple())]
[False, False, False, False]
>>> [boolify(x) for x in ([1], set([False]), dict({'a': 1}), tuple([2]))]
[True, True, True, True] |
def _ReadStringDataTypeDefinition(
self, definitions_registry, definition_values, definition_name,
is_member=False):
"""Reads a string data type definition.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
definition_values (dict[str, object]): definition values.
definition_name (str): name of the definition.
is_member (Optional[bool]): True if the data type definition is a member
data type definition.
Returns:
StringDefinition: string data type definition.
Raises:
DefinitionReaderError: if the definitions values are missing or if
the format is incorrect.
"""
if is_member:
supported_definition_values = (
self._SUPPORTED_DEFINITION_VALUES_STRING_MEMBER)
else:
supported_definition_values = self._SUPPORTED_DEFINITION_VALUES_STRING
definition_object = self._ReadElementSequenceDataTypeDefinition(
definitions_registry, definition_values, data_types.StringDefinition,
definition_name, supported_definition_values)
encoding = definition_values.get('encoding', None)
if not encoding:
error_message = 'missing encoding'
raise errors.DefinitionReaderError(definition_name, error_message)
definition_object.encoding = encoding
return definition_object | Reads a string data type definition.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
definition_values (dict[str, object]): definition values.
definition_name (str): name of the definition.
is_member (Optional[bool]): True if the data type definition is a member
data type definition.
Returns:
StringDefinition: string data type definition.
Raises:
DefinitionReaderError: if the definitions values are missing or if
the format is incorrect. |
def generate(self, chars):
"""Generate audio CAPTCHA data. The return data is a bytearray.
:param chars: text to be generated.
"""
if not self._cache:
self.load()
body = self.create_wave_body(chars)
return patch_wave_header(body) | Generate audio CAPTCHA data. The return data is a bytearray.
:param chars: text to be generated. |
def snake_to_pascal(name, singularize=False):
"""Converts snake_case to PascalCase. If singularize is True, an attempt is made at singularizing
each part of the resulting name.
"""
parts = name.split("_")
if singularize:
return "".join(p.upper() if p in _ALL_CAPS else to_singular(p.title()) for p in parts)
else:
return "".join(p.upper() if p in _ALL_CAPS else p.title() for p in parts) | Converts snake_case to PascalCase. If singularize is True, an attempt is made at singularizing
each part of the resulting name. |
def app_state(self, app):
"""Informs if application is running."""
if not self.available or not self.screen_on:
return STATE_OFF
if self.current_app["package"] == app:
return STATE_ON
return STATE_OFF | Informs if application is running. |
def models(cls, api_version=DEFAULT_API_VERSION):
"""Module depends on the API version:
* 2015-08-01: :mod:`v2015_08_01.models<azure.mgmt.eventhub.v2015_08_01.models>`
* 2017-04-01: :mod:`v2017_04_01.models<azure.mgmt.eventhub.v2017_04_01.models>`
* 2018-01-01-preview: :mod:`v2018_01_01_preview.models<azure.mgmt.eventhub.v2018_01_01_preview.models>`
"""
if api_version == '2015-08-01':
from .v2015_08_01 import models
return models
elif api_version == '2017-04-01':
from .v2017_04_01 import models
return models
elif api_version == '2018-01-01-preview':
from .v2018_01_01_preview import models
return models
raise NotImplementedError("APIVersion {} is not available".format(api_version)) | Module depends on the API version:
* 2015-08-01: :mod:`v2015_08_01.models<azure.mgmt.eventhub.v2015_08_01.models>`
* 2017-04-01: :mod:`v2017_04_01.models<azure.mgmt.eventhub.v2017_04_01.models>`
* 2018-01-01-preview: :mod:`v2018_01_01_preview.models<azure.mgmt.eventhub.v2018_01_01_preview.models>` |
def OnCellText(self, event):
"""Text entry event handler"""
row, col, _ = self.grid.actions.cursor
self.grid.GetTable().SetValue(row, col, event.code)
event.Skip() | Text entry event handler |
def loads(content):
"""Loads variable definitions from a string."""
lines = _group_lines(line for line in content.split('\n'))
lines = [
(i, _parse_envfile_line(line))
for i, line in lines if line.strip()
]
errors = []
# Reject files with duplicate variables (no sane default).
duplicates = _find_duplicates(((i, line[0]) for i, line in lines))
for i, variable, j in duplicates:
errors.append(''.join([
'Line %d: duplicate environment variable "%s": ',
'already appears on line %d.',
]) % (i + 1, variable, j + 1)
)
# Done!
if errors:
raise ValueError(errors)
return {k: v for _, (k, v) in lines} | Loads variable definitions from a string. |
def create(self, _attributes=None, _joining=None, _touch=True, **attributes):
"""
Create a new instance of the related model.
:param attributes: The attributes
:type attributes: dict
:rtype: orator.orm.Model
"""
if _attributes is not None:
attributes.update(_attributes)
instance = self._related.new_instance(attributes)
instance.save({"touch": False})
self.attach(instance.get_key(), _joining, _touch)
return instance | Create a new instance of the related model.
:param attributes: The attributes
:type attributes: dict
:rtype: orator.orm.Model |
def _apply_dvs_infrastructure_traffic_resources(infra_traffic_resources,
resource_dicts):
'''
Applies the values of the resource dictionaries to infra traffic resources,
creating the infra traffic resource if required
(vim.DistributedVirtualSwitchProductSpec)
'''
for res_dict in resource_dicts:
filtered_traffic_resources = \
[r for r in infra_traffic_resources if r.key == res_dict['key']]
if filtered_traffic_resources:
traffic_res = filtered_traffic_resources[0]
else:
traffic_res = vim.DvsHostInfrastructureTrafficResource()
traffic_res.key = res_dict['key']
traffic_res.allocationInfo = \
vim.DvsHostInfrastructureTrafficResourceAllocation()
infra_traffic_resources.append(traffic_res)
if res_dict.get('limit'):
traffic_res.allocationInfo.limit = res_dict['limit']
if res_dict.get('reservation'):
traffic_res.allocationInfo.reservation = res_dict['reservation']
if res_dict.get('num_shares') or res_dict.get('share_level'):
if not traffic_res.allocationInfo.shares:
traffic_res.allocationInfo.shares = vim.SharesInfo()
if res_dict.get('share_level'):
traffic_res.allocationInfo.shares.level = \
vim.SharesLevel(res_dict['share_level'])
if res_dict.get('num_shares'):
#XXX Even though we always set the number of shares if provided,
#the vCenter will ignore it unless the share level is 'custom'.
traffic_res.allocationInfo.shares.shares = res_dict['num_shares'] | Applies the values of the resource dictionaries to infra traffic resources,
creating the infra traffic resource if required
(vim.DistributedVirtualSwitchProductSpec) |
def pacific_atlantic(matrix):
"""
:type matrix: List[List[int]]
:rtype: List[List[int]]
"""
n = len(matrix)
if not n: return []
m = len(matrix[0])
if not m: return []
res = []
atlantic = [[False for _ in range (n)] for _ in range(m)]
pacific = [[False for _ in range (n)] for _ in range(m)]
for i in range(n):
dfs(pacific, matrix, float("-inf"), i, 0)
dfs(atlantic, matrix, float("-inf"), i, m-1)
for i in range(m):
dfs(pacific, matrix, float("-inf"), 0, i)
dfs(atlantic, matrix, float("-inf"), n-1, i)
for i in range(n):
for j in range(m):
if pacific[i][j] and atlantic[i][j]:
res.append([i, j])
return res | :type matrix: List[List[int]]
:rtype: List[List[int]] |
async def dump_message(obj, msg, field_archiver=None):
"""
Dumps message to the object.
Returns message popo representation.
:param obj:
:param msg:
:param field_archiver:
:return:
"""
mtype = msg.__class__
fields = mtype.f_specs()
obj = collections.OrderedDict() if obj is None else get_elem(obj)
for field in fields:
await dump_message_field(obj, msg=msg, field=field, field_archiver=field_archiver)
return obj | Dumps message to the object.
Returns message popo representation.
:param obj:
:param msg:
:param field_archiver:
:return: |
def create_record(awsclient, name_prefix, instance_reference, type="A", host_zone_name=None):
"""
Builds route53 record entries enabling DNS names for services
Note: gcdt.route53 create_record(awsclient, ...)
is used in dataplatform cloudformation.py templates!
:param name_prefix: The sub domain prefix to use
:param instance_reference: The EC2 troposphere reference which's private IP should be linked to
:param type: The type of the record A or CNAME (default: A)
:param host_zone_name: The host zone name to use (like preprod.ds.glomex.cloud. - DO NOT FORGET THE DOT!)
:return: RecordSetType
"""
# Only fetch the host zone from the COPS stack if nessary
if host_zone_name is None:
host_zone_name = _retrieve_stack_host_zone_name(awsclient)
if not (type == "A" or type == "CNAME"):
raise Exception("Record set type is not supported!")
name_of_record = name_prefix \
.replace('.', '') \
.replace('-', '') \
.title() + "HostRecord"
# Reference EC2 instance automatically to their private IP
if isinstance(instance_reference, Instance):
resource_record = troposphere.GetAtt(
instance_reference,
"PrivateIp"
)
else:
resource_record = instance_reference
return RecordSetType(
name_of_record,
HostedZoneName=host_zone_name,
Name=troposphere.Join("", [
name_prefix + ".",
host_zone_name,
]),
Type=type,
TTL=TTL_DEFAULT,
ResourceRecords=[
resource_record
],
) | Builds route53 record entries enabling DNS names for services
Note: gcdt.route53 create_record(awsclient, ...)
is used in dataplatform cloudformation.py templates!
:param name_prefix: The sub domain prefix to use
:param instance_reference: The EC2 troposphere reference which's private IP should be linked to
:param type: The type of the record A or CNAME (default: A)
:param host_zone_name: The host zone name to use (like preprod.ds.glomex.cloud. - DO NOT FORGET THE DOT!)
:return: RecordSetType |
def use_comparative_grade_system_view(self):
"""Pass through to provider GradeSystemLookupSession.use_comparative_grade_system_view"""
self._object_views['grade_system'] = COMPARATIVE
# self._get_provider_session('grade_system_lookup_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
session.use_comparative_grade_system_view()
except AttributeError:
pass | Pass through to provider GradeSystemLookupSession.use_comparative_grade_system_view |
def _translate_div(self, oprnd1, oprnd2, oprnd3):
"""Return a formula representation of an DIV instruction.
"""
assert oprnd1.size and oprnd2.size and oprnd3.size
assert oprnd1.size == oprnd2.size
op1_var = self._translate_src_oprnd(oprnd1)
op2_var = self._translate_src_oprnd(oprnd2)
op3_var, op3_var_constrs = self._translate_dst_oprnd(oprnd3)
if oprnd3.size > oprnd1.size:
op1_var_zx = smtfunction.zero_extend(op1_var, oprnd3.size)
op2_var_zx = smtfunction.zero_extend(op2_var, oprnd3.size)
result = op1_var_zx.udiv(op2_var_zx)
elif oprnd3.size < oprnd1.size:
result = smtfunction.extract(op1_var.udiv(op2_var), 0, oprnd3.size)
else:
result = op1_var.udiv(op2_var)
return [op3_var == result] + op3_var_constrs | Return a formula representation of an DIV instruction. |
def _get_powercfg_minute_values(scheme, guid, subguid, safe_name):
'''
Returns the AC/DC values in an dict for a guid and subguid for a the given
scheme
'''
if scheme is None:
scheme = _get_current_scheme()
if __grains__['osrelease'] == '7':
cmd = 'powercfg /q {0} {1}'.format(scheme, guid)
else:
cmd = 'powercfg /q {0} {1} {2}'.format(scheme, guid, subguid)
out = __salt__['cmd.run'](cmd, python_shell=False)
split = out.split('\r\n\r\n')
if len(split) > 1:
for s in split:
if safe_name in s or subguid in s:
out = s
break
else:
out = split[0]
raw_settings = re.findall(r'Power Setting Index: ([0-9a-fx]+)', out)
return {'ac': int(raw_settings[0], 0) / 60,
'dc': int(raw_settings[1], 0) / 60} | Returns the AC/DC values in an dict for a guid and subguid for a the given
scheme |
def synthesize_property(property_name,
default = None,
contract = None,
read_only = False,
private_member_name = None):
"""
When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``property_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param property_name: Name of the property to synthesize.
:type property_name: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the property will not a have a setter.
:type read_only: bool
:param private_member_name: Custom name for the private attribute that contains the property's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property.
"""
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = property_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = PropertyDelegate()) | When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``property_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param property_name: Name of the property to synthesize.
:type property_name: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the property will not a have a setter.
:type read_only: bool
:param private_member_name: Custom name for the private attribute that contains the property's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property. |
def get_auth(self):
''' Returns username from the configfile. '''
return (self._cfgparse.get(self._section, 'username'), self._cfgparse.get(self._section, 'password')) | Returns username from the configfile. |
def get_call_signature(fn: FunctionType,
args: ArgsType,
kwargs: KwargsType,
debug_cache: bool = False) -> str:
"""
Takes a function and its args/kwargs, and produces a string description
of the function call (the call signature) suitable for use indirectly as a
cache key. The string is a JSON representation. See ``make_cache_key`` for
a more suitable actual cache key.
"""
# Note that the function won't have the __self__ argument (as in
# fn.__self__), at this point, even if it's a member function.
try:
call_sig = json_encode((fn.__qualname__, args, kwargs))
except TypeError:
log.critical(
"\nTo decorate using @django_cache_function without specifying "
"cache_key, the decorated function's owning class and its "
"parameters must be JSON-serializable (see jsonfunc.py, "
"django_cache_fn.py).\n")
raise
if debug_cache:
log.debug("Making call signature {!r}", call_sig)
return call_sig | Takes a function and its args/kwargs, and produces a string description
of the function call (the call signature) suitable for use indirectly as a
cache key. The string is a JSON representation. See ``make_cache_key`` for
a more suitable actual cache key. |
def gumbel_softmax(x,
z_size,
mode,
softmax_k=0,
temperature_warmup_steps=150000,
summary=True,
name=None):
"""Gumbel softmax discretization bottleneck.
Args:
x: Input to the discretization bottleneck.
z_size: Number of bits, where discrete codes range from 1 to 2**z_size.
mode: tf.estimator.ModeKeys.
softmax_k: If > 0 then do top-k softmax.
temperature_warmup_steps: Number of steps it takes to decay temperature to
0.
summary: Whether to write summaries.
name: Name for the bottleneck scope.
Returns:
Embedding function, discrete code, and loss.
"""
with tf.variable_scope(name, default_name="gumbel_softmax"):
m = tf.layers.dense(x, 2**z_size, name="mask")
if softmax_k > 0:
m, kl = top_k_softmax(m, softmax_k)
return m, m, 1.0 - tf.reduce_mean(kl)
logsm = tf.nn.log_softmax(m)
# Gumbel-softmax sample.
gumbel_samples = gumbel_sample(common_layers.shape_list(m))
steps = temperature_warmup_steps
gumbel_samples *= common_layers.inverse_exp_decay(steps // 5) * 0.5
temperature = 1.2 - common_layers.inverse_lin_decay(steps)
# 10% of the time keep reasonably high temperature to keep learning.
temperature = tf.cond(
tf.less(tf.random_uniform([]), 0.9), lambda: temperature,
lambda: tf.random_uniform([], minval=0.5, maxval=1.0))
s = tf.nn.softmax((logsm + gumbel_samples) / temperature)
m = tf.nn.softmax(m)
kl = -tf.reduce_max(logsm, axis=-1)
if summary:
tf.summary.histogram("max-log", tf.reshape(kl, [-1]))
# Calculate the argmax and construct hot vectors.
maxvec = tf.reshape(tf.argmax(m, axis=-1), [-1])
maxvhot = tf.stop_gradient(tf.one_hot(maxvec, 2**z_size))
# Add losses that prevent too few being used.
distrib = tf.reshape(logsm, [-1, 2**z_size]) * maxvhot
d_mean = tf.reduce_mean(distrib, axis=[0], keep_dims=True)
d_variance = tf.reduce_mean(
tf.squared_difference(distrib, d_mean), axis=[0])
d_dev = -tf.reduce_mean(d_variance)
ret = s
if mode != tf.estimator.ModeKeys.TRAIN:
ret = tf.reshape(maxvhot, common_layers.shape_list(s)) # Just hot @eval.
return m, ret, d_dev * 5.0 + tf.reduce_mean(kl) * 0.002 | Gumbel softmax discretization bottleneck.
Args:
x: Input to the discretization bottleneck.
z_size: Number of bits, where discrete codes range from 1 to 2**z_size.
mode: tf.estimator.ModeKeys.
softmax_k: If > 0 then do top-k softmax.
temperature_warmup_steps: Number of steps it takes to decay temperature to
0.
summary: Whether to write summaries.
name: Name for the bottleneck scope.
Returns:
Embedding function, discrete code, and loss. |
def _health_check_thread(self):
"""
Health checker thread that pings the service every 30 seconds
:return: None
"""
while self._run_health_checker:
response = self._health_check(Health_pb2.HealthCheckRequest(service='predix-event-hub.grpc.health'))
logging.debug('received health check: ' + str(response))
time.sleep(30)
return | Health checker thread that pings the service every 30 seconds
:return: None |
def request(self, apdu):
"""This function is called by transaction functions to send
to the application."""
if _debug: ServerSSM._debug("request %r", apdu)
# make sure it has a good source and destination
apdu.pduSource = self.pdu_address
apdu.pduDestination = None
# send it via the device
self.ssmSAP.sap_request(apdu) | This function is called by transaction functions to send
to the application. |
def _is_inventory_group(key, value):
'''
Verify that a module-level variable (key = value) is a valid inventory group.
'''
if (
key.startswith('_')
or not isinstance(value, (list, tuple, GeneratorType))
):
return False
# If the group is a tuple of (hosts, data), check the hosts
if isinstance(value, tuple):
value = value[0]
# Expand any generators of hosts
if isinstance(value, GeneratorType):
value = list(value)
return all(
isinstance(item, ALLOWED_HOST_TYPES)
for item in value
) | Verify that a module-level variable (key = value) is a valid inventory group. |
def is_canonical_address(address: Any) -> bool:
"""
Returns `True` if the `value` is an address in its canonical form.
"""
if not is_bytes(address) or len(address) != 20:
return False
return address == to_canonical_address(address) | Returns `True` if the `value` is an address in its canonical form. |
def squash_layouts(self, layouts):
'''
Returns a squashed layout
The first element takes precedence (i.e. left to right).
Dictionaries are recursively merged, overwrites only occur on non-dictionary entries.
[0,1]
0:
test: 'my data'
1:
test: 'stuff'
Result:
test: 'my data'
@param layouts: List of layouts to merge together
@return: New layout with list of layouts squash merged
'''
top_layout = layouts[0]
json_data = {}
# Generate a new container Layout
layout = Layout(top_layout.name(), json_data, layouts)
# Merge in each of the layouts
for mlayout in reversed(layouts):
# Overwrite all fields, *except* dictionaries
# For dictionaries, keep recursing until non-dictionaries are found
self.dict_merge(layout.json(), mlayout.json())
return layout | Returns a squashed layout
The first element takes precedence (i.e. left to right).
Dictionaries are recursively merged, overwrites only occur on non-dictionary entries.
[0,1]
0:
test: 'my data'
1:
test: 'stuff'
Result:
test: 'my data'
@param layouts: List of layouts to merge together
@return: New layout with list of layouts squash merged |
def add_template_events_to_ifo(self, ifo, columns, vectors):
""" Add a vector indexed """
# Just call through to the standard function
self.template_events = self.template_event_dict[ifo]
self.add_template_events(columns, vectors)
self.template_event_dict[ifo] = self.template_events
self.template_events = None | Add a vector indexed |
def add_component_type(self, component_type):
"""
Adds a component type to the model.
@param component_type: Component type to be added.
@type component_type: lems.model.fundamental.ComponentType
"""
name = component_type.name
# To handle colons in names in LEMS
if ':' in name:
name = name.replace(':', '_')
component_type.name = name
self.component_types[name] = component_type | Adds a component type to the model.
@param component_type: Component type to be added.
@type component_type: lems.model.fundamental.ComponentType |
def _FlushInput(self):
""" Flush all read data until no more available. """
self.ser.flush()
flushed = 0
while True:
ready_r, ready_w, ready_x = select.select([self.ser], [],
[self.ser], 0)
if len(ready_x) > 0:
logging.error("Exception from serial port.")
return None
elif len(ready_r) > 0:
flushed += 1
self.ser.read(1) # This may cause underlying buffering.
self.ser.flush() # Flush the underlying buffer too.
else:
break | Flush all read data until no more available. |
def calc_J(self):
"""Updates self.J, returns nothing"""
del self.J
self.J = np.zeros([self.param_vals.size, self.data.size])
dp = np.zeros_like(self.param_vals)
f0 = self.model.copy()
for a in range(self.param_vals.size):
dp *= 0
dp[a] = self.dl[a]
f1 = self.func(self.param_vals + dp, *self.func_args, **self.func_kwargs)
grad_func = (f1 - f0) / dp[a]
#J = grad(residuals) = -grad(model)
self.J[a] = -grad_func | Updates self.J, returns nothing |
def parse(self):
"""Parse our string and return a Survey object, None, or raise :exc:`ParseException`"""
if not self.survey_str:
return None
lines = self.survey_str.splitlines()
if len(lines) < 10:
raise ParseException("Expected at least 10 lines in a Compass Survey, only found %d!\nlines=%s" % (len(lines), lines))
# undelimited Cave Name may be empty string and "skipped"
first_line = lines.pop(0).strip()
if first_line.startswith('SURVEY NAME:'):
cave_name = ''
name = first_line.strip('SURVEY NAME:').strip()
else:
cave_name = first_line
name = lines.pop(0).split('SURVEY NAME:', 1)[1].strip()
# Date and Comment on one line, Comment may be missing
date_comment_toks = lines.pop(0).split('SURVEY DATE:', 1)[1].split('COMMENT:')
date = CompassSurveyParser._parse_date(date_comment_toks[0])
comment = date_comment_toks[1].strip() if len(date_comment_toks) > 1 else ''
lines.pop(0) # SURVEY TEAM:\n (actual team members are on the next line)
team = [member.strip() for member in lines.pop(0).split(',')] # We're already decoding from windows-1252 codec so we have unicode for names like 'Tanya Pietra\xdf'
# TODO: implement format (units!), instrument correction(s)
dec_fmt_corr = lines.pop(0)
declination, fmt, corrections, corrections2 = CompassSurveyParser._parse_declination_line(dec_fmt_corr)
lines.pop(0)
shot_header = lines.pop(0).split()
val_count = len(shot_header) - 2 if 'FLAGS' in shot_header else len(shot_header) # 1998 vintage data has no FLAGS, COMMENTS at end
lines.pop(0)
survey = Survey(name=name, date=date, comment=comment, team=team, cave_name=cave_name,
shot_header=shot_header, declination=declination,
file_format=fmt, corrections=corrections, corrections2=corrections2)
shots = []
shot_lines = lines
for shot_line in shot_lines:
shot_vals = shot_line.split(None, val_count)
if len(shot_vals) > val_count: # last two spare columns are FLAGS and COMMENTS, either value may be missing
flags_comment = shot_vals.pop()
if not flags_comment.startswith('#|'):
flags, comment = '', flags_comment
else:
try:
flags, comment = flags_comment.split('#|', 1)[1].split('#', 1)
except ValueError:
raise ParseException('Invalid flags in %s survey: %s' % (name, flags_comment)) # A 2013 bug in Compass inserted corrupt binary garbage into FLAGS column, causes parse to barf
shot_vals += [flags, comment.strip()]
shot_vals = [(header, self._coerce(header, val)) for (header, val) in zip(shot_header, shot_vals)]
shot = Shot(shot_vals)
survey.add_shot(shot)
#log.debug("Survey: name=%s shots=%d length=%0.1f date=%s team=%s\n%s", name, len(shots), survey.length, date, team, '\n'.join([str(shot) for shot in survey.shots]))
return survey | Parse our string and return a Survey object, None, or raise :exc:`ParseException` |
def formula(self, atom_sequence):
'''
Constructs standardized chemical formula
NB: this is the PUBLIC method
@returns formula_str
'''
labels = {}
types = []
y = 0
for k, atomi in enumerate(atom_sequence):
lbl = re.sub("[0-9]+", "", atomi).capitalize()
if lbl not in labels:
labels[lbl] = y
types.append([k+1])
y += 1
else:
types[ labels[lbl] ].append(k+1)
atoms = list(labels.keys())
atoms = [x for x in self.formula_sequence if x in atoms] + [x for x in atoms if x not in self.formula_sequence] # accordingly
formula = ''
for atom in atoms:
n = len(types[labels[atom]])
if n==1: n = ''
else: n = str(n)
formula += atom + n
return formula | Constructs standardized chemical formula
NB: this is the PUBLIC method
@returns formula_str |
def list_reshape(list_, new_shape, trail=False):
r"""
reshapes leaving trailing dimnsions in front if prod(new_shape) != len(list_)
Args:
list_ (list):
new_shape (tuple):
Returns:
list: list_
CommandLine:
python -m utool.util_list --exec-list_reshape --show
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_list import * # NOQA
>>> import utool as ut
>>> import numpy as np
>>> list_ = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
>>> new_shape = (2, 2, 3)
>>> newlist = list_reshape(list_, new_shape)
>>> depth = ut.depth_profile(newlist)
>>> result = ('list_ = %s' % (ut.repr2(newlist, nl=1),))
>>> print('depth = %r' % (depth,))
>>> print(result)
>>> newlist2 = np.reshape(list_, depth).tolist()
>>> ut.assert_eq(newlist, newlist2)
"""
if not trail:
total = reduce(operator.mul, new_shape)
assert total == len(list_)
newlist = list_
for dim in reversed(new_shape):
slice_ = (newlist[i::dim] for i in range(dim))
newlist = list(map(list, zip(*slice_)))
if not trail:
newlist = newlist[0]
return newlist | r"""
reshapes leaving trailing dimnsions in front if prod(new_shape) != len(list_)
Args:
list_ (list):
new_shape (tuple):
Returns:
list: list_
CommandLine:
python -m utool.util_list --exec-list_reshape --show
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_list import * # NOQA
>>> import utool as ut
>>> import numpy as np
>>> list_ = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
>>> new_shape = (2, 2, 3)
>>> newlist = list_reshape(list_, new_shape)
>>> depth = ut.depth_profile(newlist)
>>> result = ('list_ = %s' % (ut.repr2(newlist, nl=1),))
>>> print('depth = %r' % (depth,))
>>> print(result)
>>> newlist2 = np.reshape(list_, depth).tolist()
>>> ut.assert_eq(newlist, newlist2) |
def get_grid(grid_id):
"""
Return the specified grid.
:param grid_id: The grid identification in h2o
:returns: an :class:`H2OGridSearch` instance.
"""
assert_is_type(grid_id, str)
grid_json = api("GET /99/Grids/%s" % grid_id)
models = [get_model(key["name"]) for key in grid_json["model_ids"]]
# get first model returned in list of models from grid search to get model class (binomial, multinomial, etc)
first_model_json = api("GET /3/Models/%s" % grid_json["model_ids"][0]["name"])["models"][0]
gs = H2OGridSearch(None, {}, grid_id)
gs._resolve_grid(grid_id, grid_json, first_model_json)
gs.models = models
hyper_params = {param: set() for param in gs.hyper_names}
for param in gs.hyper_names:
for model in models:
if isinstance(model.full_parameters[param]["actual_value"], list):
hyper_params[param].add(model.full_parameters[param]["actual_value"][0])
else:
hyper_params[param].add(model.full_parameters[param]["actual_value"])
hyper_params = {str(param): list(vals) for param, vals in hyper_params.items()}
gs.hyper_params = hyper_params
gs.model = model.__class__()
return gs | Return the specified grid.
:param grid_id: The grid identification in h2o
:returns: an :class:`H2OGridSearch` instance. |
def split_heads(self, x):
"""Split x into different heads, and transpose the resulting value.
The tensor is transposed to insure the inner dimensions hold the correct
values during the matrix multiplication.
Args:
x: A tensor with shape [batch_size, length, hidden_size]
Returns:
A tensor with shape [batch_size, num_heads, length, hidden_size/num_heads]
"""
with tf.name_scope("split_heads"):
batch_size = tf.shape(x)[0]
length = tf.shape(x)[1]
# Calculate depth of last dimension after it has been split.
depth = (self.hidden_size // self.num_heads)
# Split the last dimension
x = tf.reshape(x, [batch_size, length, self.num_heads, depth])
# Transpose the result
return tf.transpose(x, [0, 2, 1, 3]) | Split x into different heads, and transpose the resulting value.
The tensor is transposed to insure the inner dimensions hold the correct
values during the matrix multiplication.
Args:
x: A tensor with shape [batch_size, length, hidden_size]
Returns:
A tensor with shape [batch_size, num_heads, length, hidden_size/num_heads] |
def _R2deriv(self,R,z,phi=0.,t=0.):
"""
NAME:
_Rderiv
PURPOSE:
evaluate the second radial derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the second radial derivative
HISTORY:
2013-06-28 - Written - Bovy (IAS)
"""
r= nu.sqrt(R*R+z*z)
return 4.*nu.pi*r**(-2.-self.alpha)*nu.exp(-(r/self.rc)**2.)*R**2.\
+self._mass(r)/r**5.*(z**2.-2.*R**2.) | NAME:
_Rderiv
PURPOSE:
evaluate the second radial derivative for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the second radial derivative
HISTORY:
2013-06-28 - Written - Bovy (IAS) |
def _reset(self, framer):
"""
Reset the state for the framer. It is safe to call this
method multiple times with the same framer; the ID of the
framer object will be saved and the state only reset if the
IDs are different. After resetting the state, the framer's
``init_state()`` method will be called.
"""
# Do nothing if we're already properly initialized
if id(framer) == self._framer_id:
return
# Reset the state
self._other = {}
# Initialize the state and save the framer ID
framer.init_state(self)
self._framer_id = id(framer) | Reset the state for the framer. It is safe to call this
method multiple times with the same framer; the ID of the
framer object will be saved and the state only reset if the
IDs are different. After resetting the state, the framer's
``init_state()`` method will be called. |
def remove_service(service, zone=None, permanent=True):
'''
Remove a service from zone. This option can be specified multiple times.
If zone is omitted, default zone will be used.
CLI Example:
.. code-block:: bash
salt '*' firewalld.remove_service ssh
To remove a service from a specific zone
.. code-block:: bash
salt '*' firewalld.remove_service ssh dmz
'''
if zone:
cmd = '--zone={0} --remove-service={1}'.format(zone, service)
else:
cmd = '--remove-service={0}'.format(service)
if permanent:
cmd += ' --permanent'
return __firewall_cmd(cmd) | Remove a service from zone. This option can be specified multiple times.
If zone is omitted, default zone will be used.
CLI Example:
.. code-block:: bash
salt '*' firewalld.remove_service ssh
To remove a service from a specific zone
.. code-block:: bash
salt '*' firewalld.remove_service ssh dmz |
def put_task_info(self, task_name, key, value):
"""
Put information into a task.
:param task_name: name of the task
:param key: key of the information item
:param value: value of the information item
"""
params = OrderedDict([('info', ''), ('taskname', task_name)])
headers = {'Content-Type': 'application/xml'}
body = self.TaskInfo(key=key, value=value).serialize()
self._client.put(self.resource(), params=params, headers=headers, data=body) | Put information into a task.
:param task_name: name of the task
:param key: key of the information item
:param value: value of the information item |
def set_random_seed():
"""Set the random seed from flag everywhere."""
tf.set_random_seed(FLAGS.random_seed)
random.seed(FLAGS.random_seed)
np.random.seed(FLAGS.random_seed) | Set the random seed from flag everywhere. |
def plot(self, key=None, invert=None, plotmethod='imshow',
cmap=plt.cm.gray, ms=4, Max=None,
fs=None, dmargin=None, wintit=None,
draw=True, connect=True):
""" Plot the data content in a predefined figure """
dax, KH = _plot.Data_plot(self, key=key, invert=invert, Max=Max,
plotmethod=plotmethod, cmap=cmap, ms=ms,
fs=fs, dmargin=dmargin, wintit=wintit,
draw=draw, connect=connect)
return dax, KH | Plot the data content in a predefined figure |
def execute(self, program: Program):
"""
Execute a program on the QVM.
Note that the QAM is stateful. Subsequent calls to :py:func:`execute` will not
automatically reset the wavefunction or the classical RAM. If this is desired,
consider starting your program with ``RESET``.
:return: ``self`` to support method chaining.
"""
# TODO: why are DEFGATEs not just included in the list of instructions?
for dg in program.defined_gates:
if dg.parameters is not None:
raise NotImplementedError("PyQVM does not support parameterized DEFGATEs")
self.defined_gates[dg.name] = dg.matrix
# initialize program counter
self.program = program
self.program_counter = 0
halted = len(program) == 0
while not halted:
halted = self.transition()
return self | Execute a program on the QVM.
Note that the QAM is stateful. Subsequent calls to :py:func:`execute` will not
automatically reset the wavefunction or the classical RAM. If this is desired,
consider starting your program with ``RESET``.
:return: ``self`` to support method chaining. |
def crypto_box_keypair():
"""
Returns a randomly generated public and secret key.
:rtype: (bytes(public_key), bytes(secret_key))
"""
pk = ffi.new("unsigned char[]", crypto_box_PUBLICKEYBYTES)
sk = ffi.new("unsigned char[]", crypto_box_SECRETKEYBYTES)
rc = lib.crypto_box_keypair(pk, sk)
ensure(rc == 0,
'Unexpected library error',
raising=exc.RuntimeError)
return (
ffi.buffer(pk, crypto_box_PUBLICKEYBYTES)[:],
ffi.buffer(sk, crypto_box_SECRETKEYBYTES)[:],
) | Returns a randomly generated public and secret key.
:rtype: (bytes(public_key), bytes(secret_key)) |
def show_xyzs(self, xs, ys, zs, imgsize:int=4, figsize:Optional[Tuple[int,int]]=None, **kwargs):
"Show `xs` (inputs), `ys` (targets) and `zs` (predictions) on a figure of `figsize`."
title = 'Input / Prediction / Target'
axs = subplots(len(xs), 3, imgsize=imgsize, figsize=figsize, title=title, weight='bold', size=14)
for i,(x,y,z) in enumerate(zip(xs,ys,zs)):
x.show(ax=axs[i,0], **kwargs)
y.show(ax=axs[i,2], **kwargs)
z.show(ax=axs[i,1], **kwargs) | Show `xs` (inputs), `ys` (targets) and `zs` (predictions) on a figure of `figsize`. |
def user_picklist(i_info, command):
"""Display list of instances matching args and ask user to select target.
Instance list displayed and user asked to enter the number corresponding
to the desired target instance, or '0' to abort.
Args:
i_info (dict): information on instances and details.
command (str): command specified on the command line.
Returns:
tar_idx (int): the dictionary index number of the targeted instance.
"""
valid_entry = False
awsc.get_all_aminames(i_info)
list_instances(i_info, "", True)
msg_txt = ("Enter {0}#{1} of instance to {3} ({0}1{1}-{0}{4}{1})"
" [{2}0 aborts{1}]: ".format(C_WARN, C_NORM, C_TI,
command, len(i_info)))
while not valid_entry:
entry_raw = obtain_input(msg_txt)
try:
entry_int = int(entry_raw)
except ValueError:
entry_int = 999
(tar_idx, valid_entry) = user_entry(entry_int, len(i_info), command)
return tar_idx | Display list of instances matching args and ask user to select target.
Instance list displayed and user asked to enter the number corresponding
to the desired target instance, or '0' to abort.
Args:
i_info (dict): information on instances and details.
command (str): command specified on the command line.
Returns:
tar_idx (int): the dictionary index number of the targeted instance. |
def do_levmarq_all_particle_groups(s, region_size=40, max_iter=2, damping=1.0,
decrease_damp_factor=10., run_length=4, collect_stats=False, **kwargs):
"""
Levenberg-Marquardt optimization for every particle in the state.
Convenience wrapper for LMParticleGroupCollection. Same keyword args,
but I've set the defaults to what I've found to be useful values for
optimizing particles. See LMParticleGroupCollection for documentation.
See Also
--------
do_levmarq_particles : Levenberg-Marquardt optimization of a
specified set of particles.
do_levmarq : Levenberg-Marquardt optimization of the entire state;
useful for optimizing global parameters.
LMParticleGroupCollection : The workhorse of do_levmarq.
LMEngine : Engine superclass for all the optimizers.
"""
lp = LMParticleGroupCollection(s, region_size=region_size, damping=damping,
run_length=run_length, decrease_damp_factor=decrease_damp_factor,
get_cos=collect_stats, max_iter=max_iter, **kwargs)
lp.do_run_2()
if collect_stats:
return lp.stats | Levenberg-Marquardt optimization for every particle in the state.
Convenience wrapper for LMParticleGroupCollection. Same keyword args,
but I've set the defaults to what I've found to be useful values for
optimizing particles. See LMParticleGroupCollection for documentation.
See Also
--------
do_levmarq_particles : Levenberg-Marquardt optimization of a
specified set of particles.
do_levmarq : Levenberg-Marquardt optimization of the entire state;
useful for optimizing global parameters.
LMParticleGroupCollection : The workhorse of do_levmarq.
LMEngine : Engine superclass for all the optimizers. |
def plot_mixing_lines(self, p=None, rv=None, **kwargs):
r'''Plot lines of constant mixing ratio.
Adds lines of constant mixing ratio (isohumes) to the
plot. The default style of these lines is dashed green lines with an
alpha value of 0.8. These can be overridden using keyword arguments.
Parameters
----------
rv : array_like, optional
1-dimensional array of unitless mixing ratio values to plot. If
none are given, default values are used.
p : array_like, optional
1-dimensional array of pressure values to be included in the
isohumes. If not specified, they will be linearly distributed
across the current plotted pressure range.
kwargs
Other keyword arguments to pass to
`matplotlib.collections.LineCollection`
See Also
--------
`matplotlib.collections.LineCollection`
'''
for artist in self._mixing_lines:
artist.remove()
self._mixing_lines = []
# Default mixing level values if necessary
if rv is None:
rv = np.array([
0.1e-3, 0.2e-3, 0.5e-3, 1e-3, 1.5e-3, 2e-3, 3e-3, 4e-3, 6e-3,
8e-3, 10e-3, 12e-3, 15e-3, 20e-3, 30e-3, 40e-3,
50e-3]).reshape(-1, 1)
else:
rv = np.asarray(rv).reshape(-1, 1)
# Set pressure range if necessary
if p is None:
p = np.linspace(min(self.get_ylim()), max(self.get_ylim()))
else:
p = np.asarray(p)
# Assemble data for plotting
Td = calculate(
'Td', p=p, rv=rv, p_units='hPa', rv_units='kg/kg',
Td_units='degC')
Td_label = calculate('Td', p=550, p_units='hPa', rv=rv,
Td_units='degC')
linedata = [np.vstack((t, p)).T for t in Td]
# Add to plot
kwargs.setdefault('clip_on', True)
kwargs.setdefault('colors', '#166916')
kwargs.setdefault('linestyles', '--')
kwargs.setdefault('alpha', 1)
kwargs.setdefault('linewidth', 0.5)
kwargs.setdefault('zorder', 1.1)
collection = LineCollection(linedata, **kwargs)
self._mixing_lines.append(collection)
self.add_collection(collection)
rv = rv.flatten() * 1000
for i in range(len(rv)):
if rv[i] < 1:
format_string = '{:.1f}'
else:
format_string = '{:.0f}'
t = self.text(Td_label[i], 550, format_string.format(rv[i]),
fontsize=8, ha='right', va='center', rotation=60,
color='#166916', bbox={
'facecolor': 'w', 'edgecolor': 'w', 'alpha': 0,
}, zorder=1.2)
t.set_clip_on(True)
self._mixing_lines.append(t) | r'''Plot lines of constant mixing ratio.
Adds lines of constant mixing ratio (isohumes) to the
plot. The default style of these lines is dashed green lines with an
alpha value of 0.8. These can be overridden using keyword arguments.
Parameters
----------
rv : array_like, optional
1-dimensional array of unitless mixing ratio values to plot. If
none are given, default values are used.
p : array_like, optional
1-dimensional array of pressure values to be included in the
isohumes. If not specified, they will be linearly distributed
across the current plotted pressure range.
kwargs
Other keyword arguments to pass to
`matplotlib.collections.LineCollection`
See Also
--------
`matplotlib.collections.LineCollection` |
def join_channel(self, channel):
"""
Join a different chat channel on Twitch.
Note, this function returns immediately, but the switch might
take a moment
:param channel: name of the channel (without #)
"""
self.s.send(('JOIN #%s\r\n' % channel).encode('utf-8'))
if self.verbose:
print('JOIN #%s\r\n' % channel) | Join a different chat channel on Twitch.
Note, this function returns immediately, but the switch might
take a moment
:param channel: name of the channel (without #) |
def form_node(cls):
"""A class decorator to finalize fully derived FormNode subclasses."""
assert issubclass(cls, FormNode)
res = attrs(init=False, slots=True)(cls)
res._args = []
res._required_args = 0
res._rest_arg = None
state = _FormArgMode.REQUIRED
for field in fields(res):
if 'arg_mode' in field.metadata:
if state is _FormArgMode.REST:
raise RuntimeError('rest argument must be last')
if field.metadata['arg_mode'] is _FormArgMode.REQUIRED:
if state is _FormArgMode.OPTIONAL:
raise RuntimeError('required arg after optional arg')
res._args.append(field)
res._required_args += 1
elif field.metadata['arg_mode'] is _FormArgMode.OPTIONAL:
state = _FormArgMode.OPTIONAL
res._args.append(field)
elif field.metadata['arg_mode'] is _FormArgMode.REST:
state = _FormArgMode.REST
res._rest_arg = field
else:
assert 0
return res | A class decorator to finalize fully derived FormNode subclasses. |
def index(ubifs, lnum, offset, inodes={}, bad_blocks=[]):
"""Walk the index gathering Inode, Dir Entry, and File nodes.
Arguments:
Obj:ubifs -- UBIFS object.
Int:lnum -- Logical erase block number.
Int:offset -- Offset in logical erase block.
Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.
Returns:
Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.
'ino' -- Inode node.
'data' -- List of data nodes if present.
'dent' -- List of directory entry nodes if present.
"""
try:
if len(bad_blocks):
if lnum in bad_blocks:
return
ubifs.file.seek((ubifs.leb_size * lnum) + offset)
buf = ubifs.file.read(UBIFS_COMMON_HDR_SZ)
chdr = nodes.common_hdr(buf)
log(index , '%s file addr: %s' % (chdr, ubifs.file.last_read_addr()))
verbose_display(chdr)
node_buf = ubifs.file.read(chdr.len - UBIFS_COMMON_HDR_SZ)
file_offset = ubifs.file.last_read_addr()
except Exception as e:
if str(e) == 'Bad Read Offset Request' and settings.warn_only_block_read_errors:
bad_blocks.append(lnum)
return
else:
error(index, 'Fatal', 'LEB: %s, UBIFS offset: %s, error: %s' % (lnum, ((ubifs.leb_size * lnum) + offset), e))
if chdr.node_type == UBIFS_IDX_NODE:
try:
idxn = nodes.idx_node(node_buf)
except Exception as e:
if settings.warn_only_block_read_errors:
error(index, 'Error', 'Problem at file address: %s extracting idx_node: %s' % (file_offset, e))
return
else:
error(index, 'Fatal', 'Problem at file address: %s extracting idx_node: %s' % (file_offset, e))
log(index, '%s file addr: %s' % (idxn, file_offset))
verbose_display(idxn)
branch_idx = 0
for branch in idxn.branches:
verbose_log(index, '-------------------')
log(index, '%s file addr: %s' % (branch, file_offset + UBIFS_IDX_NODE_SZ + (branch_idx * UBIFS_BRANCH_SZ)))
verbose_display(branch)
index(ubifs, branch.lnum, branch.offs, inodes, bad_blocks)
branch_idx += 1
elif chdr.node_type == UBIFS_INO_NODE:
try:
inon = nodes.ino_node(node_buf)
except Exception as e:
if settings.warn_only_block_read_errors:
error(index, 'Error', 'Problem at file address: %s extracting ino_node: %s' % (file_offset, e))
return
else:
error(index, 'Fatal', 'Problem at file address: %s extracting ino_node: %s' % (file_offset, e))
ino_num = inon.key['ino_num']
log(index, '%s file addr: %s, ino num: %s' % (inon, file_offset, ino_num))
verbose_display(inon)
if not ino_num in inodes:
inodes[ino_num] = {}
inodes[ino_num]['ino'] = inon
elif chdr.node_type == UBIFS_DATA_NODE:
try:
datn = nodes.data_node(node_buf, (ubifs.leb_size * lnum) + UBIFS_COMMON_HDR_SZ + offset + UBIFS_DATA_NODE_SZ)
except Exception as e:
if settings.warn_only_block_read_errors:
error(index, 'Error', 'Problem at file address: %s extracting data_node: %s' % (file_offset, e))
return
else:
error(index, 'Fatal', 'Problem at file address: %s extracting data_node: %s' % (file_offset, e))
ino_num = datn.key['ino_num']
log(index, '%s file addr: %s, ino num: %s' % (datn, file_offset, ino_num))
verbose_display(datn)
if not ino_num in inodes:
inodes[ino_num] = {}
if not 'data' in inodes[ino_num]:
inodes[ino_num]['data']= []
inodes[ino_num]['data'].append(datn)
elif chdr.node_type == UBIFS_DENT_NODE:
try:
dn = nodes.dent_node(node_buf)
except Exception as e:
if settings.warn_only_block_read_errors:
error(index, 'Error', 'Problem at file address: %s extracting dent_node: %s' % (file_offset, e))
return
else:
error(index, 'Fatal', 'Problem at file address: %s extracting dent_node: %s' % (file_offset, e))
ino_num = dn.key['ino_num']
log(index, '%s file addr: %s, ino num: %s' % (dn, file_offset, ino_num))
verbose_display(dn)
if not ino_num in inodes:
inodes[ino_num] = {}
if not 'dent' in inodes[ino_num]:
inodes[ino_num]['dent']= []
inodes[ino_num]['dent'].append(dn) | Walk the index gathering Inode, Dir Entry, and File nodes.
Arguments:
Obj:ubifs -- UBIFS object.
Int:lnum -- Logical erase block number.
Int:offset -- Offset in logical erase block.
Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.
Returns:
Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.
'ino' -- Inode node.
'data' -- List of data nodes if present.
'dent' -- List of directory entry nodes if present. |
def added(self):
'''
Returns all keys that have been added.
If the keys are in child dictionaries they will be represented with
. notation
'''
def _added(diffs, prefix):
keys = []
for key in diffs.keys():
if isinstance(diffs[key], dict) and 'old' not in diffs[key]:
keys.extend(_added(diffs[key],
prefix='{0}{1}.'.format(prefix, key)))
elif diffs[key]['old'] == self.NONE_VALUE:
if isinstance(diffs[key]['new'], dict):
keys.extend(
_added(diffs[key]['new'],
prefix='{0}{1}.'.format(prefix, key)))
else:
keys.append('{0}{1}'.format(prefix, key))
return keys
return sorted(_added(self._diffs, prefix='')) | Returns all keys that have been added.
If the keys are in child dictionaries they will be represented with
. notation |
def cursor_up(self, count=None):
"""Move cursor up the indicated # of lines in same column.
Cursor stops at top margin.
:param int count: number of lines to skip.
"""
top, _bottom = self.margins or Margins(0, self.lines - 1)
self.cursor.y = max(self.cursor.y - (count or 1), top) | Move cursor up the indicated # of lines in same column.
Cursor stops at top margin.
:param int count: number of lines to skip. |
def printstartfinish(verb, inp=None, kcount=None):
r"""Print start and finish with time measure and kernel count."""
if inp:
if verb > 1:
ttxt = str(timedelta(seconds=default_timer() - inp))
ktxt = ' '
if kcount:
ktxt += str(kcount) + ' kernel call(s)'
print('\n:: empymod END; runtime = ' + ttxt + ' ::' + ktxt + '\n')
else:
t0 = default_timer()
if verb > 2:
print("\n:: empymod START ::\n")
return t0 | r"""Print start and finish with time measure and kernel count. |
def _init_img_params(param):
"""
Initialize 2D image-type parameters that can accept either a
single or two values.
"""
if param is not None:
param = np.atleast_1d(param)
if len(param) == 1:
param = np.repeat(param, 2)
return param | Initialize 2D image-type parameters that can accept either a
single or two values. |
def free_memory(self):
"""Free memory signal."""
self.main.free_memory()
QTimer.singleShot(self.INITIAL_FREE_MEMORY_TIME_TRIGGER,
lambda: self.main.free_memory())
QTimer.singleShot(self.SECONDARY_FREE_MEMORY_TIME_TRIGGER,
lambda: self.main.free_memory()) | Free memory signal. |
def full_keywords(soup):
"author keywords list including inline tags, such as italic"
if not raw_parser.author_keywords(soup):
return []
return list(map(node_contents_str, raw_parser.author_keywords(soup))) | author keywords list including inline tags, such as italic |
def _connect():
'''
Return server object used to interact with Jenkins.
:return: server object used to interact with Jenkins
'''
jenkins_url = __salt__['config.get']('jenkins.url') or \
__salt__['config.get']('jenkins:url') or \
__salt__['pillar.get']('jenkins.url')
jenkins_user = __salt__['config.get']('jenkins.user') or \
__salt__['config.get']('jenkins:user') or \
__salt__['pillar.get']('jenkins.user')
jenkins_password = __salt__['config.get']('jenkins.password') or \
__salt__['config.get']('jenkins:password') or \
__salt__['pillar.get']('jenkins.password')
if not jenkins_url:
raise SaltInvocationError('No Jenkins URL found.')
return jenkins.Jenkins(jenkins_url,
username=jenkins_user,
password=jenkins_password) | Return server object used to interact with Jenkins.
:return: server object used to interact with Jenkins |
def iter_filths():
"""Iterate over all instances of filth"""
for filth_cls in iter_filth_clss():
if issubclass(filth_cls, RegexFilth):
m = next(re.finditer(r"\s+", "fake pattern string"))
yield filth_cls(m)
else:
yield filth_cls() | Iterate over all instances of filth |
def classes(self):
"""return all class nodes in the diagram"""
return [o for o in self.objects if isinstance(o.node, astroid.ClassDef)] | return all class nodes in the diagram |
def retrieve_file_handles_of_same_dataset(self, **args):
'''
:return: List of handles, or empty list. Should never return None.
:raise: SolrSwitchedOff
:raise SolrError: If both strategies to find file handles failed.
'''
mandatory_args = ['drs_id', 'version_number', 'data_node']
esgfpid.utils.check_presence_of_mandatory_args(args, mandatory_args)
LOGGER.debug('Looking for files of dataset "%s", version "%s".',
args['drs_id'], str(args['version_number']))
if self.__switched_on:
return self.__retrieve_file_handles_of_same_dataset(**args)
else:
msg = 'Cannot retrieve handles of files of the same dataset.'
raise esgfpid.exceptions.SolrSwitchedOff(msg) | :return: List of handles, or empty list. Should never return None.
:raise: SolrSwitchedOff
:raise SolrError: If both strategies to find file handles failed. |
def attach_zone(geoid, organization_id_or_slug):
'''Attach a zone <geoid> restricted to level for a given <organization>.'''
organization = Organization.objects.get_by_id_or_slug(
organization_id_or_slug)
if not organization:
log.error('No organization found for %s', organization_id_or_slug)
geozone = GeoZone.objects.get(id=geoid)
if not geozone:
log.error('No geozone found for %s', geoid)
log.info('Attaching {organization} with {geozone.name}'.format(
organization=organization, geozone=geozone))
organization.zone = geozone.id
organization.save()
log.info('Done') | Attach a zone <geoid> restricted to level for a given <organization>. |
def get_group(self, group_id):
"""
Return specified group.
Returns a Command.
"""
def process_result(result):
return Group(self, result)
return Command('get', [ROOT_GROUPS, group_id],
process_result=process_result) | Return specified group.
Returns a Command. |
def open(cls, pkg_file):
"""
Return an |OpcPackage| instance loaded with the contents of
*pkg_file*.
"""
pkg_reader = PackageReader.from_file(pkg_file)
package = cls()
Unmarshaller.unmarshal(pkg_reader, package, PartFactory)
return package | Return an |OpcPackage| instance loaded with the contents of
*pkg_file*. |
def status(self, remote=False):
"""
Return the connection status, both locally and remotely.
The local connection status is a dictionary that gives:
* the count of multiple queries sent to the server.
* the count of single queries sent to the server.
* the count of actions sent to the server.
* the count of actions executed successfully by the server.
* the count of actions queued to go to the server.
The remote connection status includes whether the server is live,
as well as data about version and build. The server data is
cached, unless the remote flag is specified.
:param remote: whether to query the server for its latest status
:return: tuple of status dicts: (local, server).
"""
if remote:
components = urlparse.urlparse(self.endpoint)
try:
result = self.session.get(components[0] + "://" + components[1] + "/status", timeout=self.timeout)
except Exception as e:
if self.logger: self.logger.debug("Failed to connect to server for status: %s", e)
result = None
if result and result.status_code == 200:
self.server_status = result.json()
self.server_status["endpoint"] = self.endpoint
elif result:
if self.logger: self.logger.debug("Server status response not understandable: Status: %d, Body: %s",
result.status_code, result.text)
self.server_status = {"endpoint": self.endpoint,
"status": ("Unexpected HTTP status " + str(result.status_code) + " at: " +
strftime("%d %b %Y %H:%M:%S +0000", gmtime()))}
else:
self.server_status = {"endpoint": self.endpoint,
"status": "Unreachable at: " + strftime("%d %b %Y %H:%M:%S +0000", gmtime())}
return self.local_status, self.server_status | Return the connection status, both locally and remotely.
The local connection status is a dictionary that gives:
* the count of multiple queries sent to the server.
* the count of single queries sent to the server.
* the count of actions sent to the server.
* the count of actions executed successfully by the server.
* the count of actions queued to go to the server.
The remote connection status includes whether the server is live,
as well as data about version and build. The server data is
cached, unless the remote flag is specified.
:param remote: whether to query the server for its latest status
:return: tuple of status dicts: (local, server). |
def _default(cls, opts):
"""Setup default logger"""
level = getattr(logging, opts.log_level, logging.DEBUG)
logger = logging.getLogger('luigi-interface')
logger.setLevel(level)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(level)
formatter = logging.Formatter('%(levelname)s: %(message)s')
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
return True | Setup default logger |
def GetHostMemPhysFreeMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemPhysFreeMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGuestLibException(ret)
return counter.value | Undocumented. |
def Type_string(self, text, interval = 0, dl = 0):
"""键盘输入字符串,interval是字符间输入时间间隔,单位"秒"
"""
self.Delay(dl)
self.keyboard.type_string(text, interval) | 键盘输入字符串,interval是字符间输入时间间隔,单位"秒" |
def footprint(self,nside):
"""
Download the survey footprint for HEALpix pixels.
"""
import healpy
import ugali.utils.projector
if nside > 2**9: raise Exception("Overflow error: nside must be <=2**9")
pix = np.arange(healpy.nside2npix(nside),dtype='int')
footprint = np.zeros(healpy.nside2npix(nside),dtype='bool')
ra,dec = ugali.utils.projector.pixToAng(nside,pix)
table_name = 'Pix%i'%nside
self.upload(np.array([pix,ra,dec]).T, ['pix','ra','dec'], name=table_name)
radius = healpy.nside2resol(nside_superpix,arcmin=True)
query="""
SELECT t.pix, dbo.fInFootprintEq(t.ra, t.dec, %g)
FROM %s AS t
"""%(radius, table_name) | Download the survey footprint for HEALpix pixels. |
def mod_watch(name, **kwargs):
'''
Execute a cmd function based on a watch call
.. note::
This state exists to support special handling of the ``watch``
:ref:`requisite <requisites>`. It should not be called directly.
Parameters for this function should be set by the state being triggered.
'''
if kwargs['sfun'] in ('wait', 'run', 'watch'):
if kwargs.get('stateful'):
kwargs.pop('stateful')
return _reinterpreted_state(run(name, **kwargs))
return run(name, **kwargs)
elif kwargs['sfun'] == 'wait_script' or kwargs['sfun'] == 'script':
if kwargs.get('stateful'):
kwargs.pop('stateful')
return _reinterpreted_state(script(name, **kwargs))
return script(name, **kwargs)
elif kwargs['sfun'] == 'wait_call' or kwargs['sfun'] == 'call':
if kwargs.get('func'):
func = kwargs.pop('func')
return call(name, func, **kwargs)
else:
return {'name': name,
'changes': {},
'comment': (
'cmd.{0[sfun]} needs a named parameter func'
).format(kwargs),
'result': False}
return {'name': name,
'changes': {},
'comment': 'cmd.{0[sfun]} does not work with the watch requisite, '
'please use cmd.wait or cmd.wait_script'.format(kwargs),
'result': False} | Execute a cmd function based on a watch call
.. note::
This state exists to support special handling of the ``watch``
:ref:`requisite <requisites>`. It should not be called directly.
Parameters for this function should be set by the state being triggered. |
def get_id(self, request_data, parameter_name='id'):
"""Extract an integer from request data."""
if parameter_name not in request_data:
raise ParseError("`{}` parameter is required".format(parameter_name))
id_parameter = request_data.get(parameter_name, None)
if not isinstance(id_parameter, int):
raise ParseError("`{}` parameter not an integer".format(parameter_name))
return id_parameter | Extract an integer from request data. |
def __add_bgedge(self, bgedge, merge=True):
""" Adds supplied :class:`bg.edge.BGEdge` object to current instance of :class:`BreakpointGraph`.
Checks that vertices in supplied :class:`bg.edge.BGEdge` instance actually are present in current :class:`BreakpointGraph` if **merge** option of provided. Otherwise a new edge is added to the current :class:`BreakpointGraph`.
:param bgedge: instance of :class:`bg.edge.BGEdge` infromation form which is to be added to current :class:`BreakpointGraph`
:type bgedge: :class:`bg.edge.BGEdge`
:param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices
:type merge: ``Boolean``
:return: ``None``, performs inplace changes
"""
if bgedge.vertex1 in self.bg and bgedge.vertex2 in self.bg[bgedge.vertex1] and merge:
key = min(self.bg[bgedge.vertex1][bgedge.vertex2].keys())
self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"] += bgedge.multicolor
self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["data"] = {}
else:
self.bg.add_edge(bgedge.vertex1, bgedge.vertex2, attr_dict={"multicolor": deepcopy(bgedge.multicolor),
"data": bgedge.data})
self.cache_valid["overall_set_of_colors"] = False | Adds supplied :class:`bg.edge.BGEdge` object to current instance of :class:`BreakpointGraph`.
Checks that vertices in supplied :class:`bg.edge.BGEdge` instance actually are present in current :class:`BreakpointGraph` if **merge** option of provided. Otherwise a new edge is added to the current :class:`BreakpointGraph`.
:param bgedge: instance of :class:`bg.edge.BGEdge` infromation form which is to be added to current :class:`BreakpointGraph`
:type bgedge: :class:`bg.edge.BGEdge`
:param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices
:type merge: ``Boolean``
:return: ``None``, performs inplace changes |
def is_source_code_missing_open_brackets(source_code):
"""
:param str source_code:
:return: whether this source code snippet (e.g. one line) is complete/even w.r.t. opening/closing brackets
:rtype: bool
"""
open_brackets = "[{("
close_brackets = "]})"
last_close_bracket = [-1] # stack
counters = [0] * len(open_brackets)
# Go in reverse order through the tokens.
# Thus, we first should see the closing brackets, and then the matching opening brackets.
for t_type, t_content in reversed(list(parse_py_statements(source_code))):
if t_type != "op":
continue # we are from now on only interested in ops (including brackets)
if t_content in open_brackets:
idx = open_brackets.index(t_content)
if last_close_bracket[-1] == idx: # ignore if we haven't seen the closing one
counters[idx] -= 1
del last_close_bracket[-1]
elif t_content in close_brackets:
idx = close_brackets.index(t_content)
counters[idx] += 1
last_close_bracket += [idx]
return not all([c == 0 for c in counters]) | :param str source_code:
:return: whether this source code snippet (e.g. one line) is complete/even w.r.t. opening/closing brackets
:rtype: bool |
def build_all(self,
verbose=False,
hide_base_schemas=True,
hide_implicit_types=True,
hide_implicit_preds=True):
"""
Extract all ontology entities from an RDF graph and construct Python representations of them.
"""
if verbose:
printDebug("Scanning entities...", "green")
printDebug("----------", "comment")
self.build_ontologies()
if verbose:
printDebug("Ontologies.........: %d" % len(self.all_ontologies), "comment")
self.build_classes(hide_base_schemas, hide_implicit_types)
if verbose:
printDebug("Classes............: %d" % len(self.all_classes), "comment")
self.build_properties(hide_implicit_preds)
if verbose:
printDebug("Properties.........: %d" % len(self.all_properties), "comment")
if verbose:
printDebug("..annotation.......: %d" % len(self.all_properties_annotation), "comment")
if verbose:
printDebug("..datatype.........: %d" % len(self.all_properties_datatype), "comment")
if verbose:
printDebug("..object...........: %d" % len(self.all_properties_object), "comment")
self.build_skos_concepts()
if verbose:
printDebug("Concepts (SKOS)....: %d" % len(self.all_skos_concepts), "comment")
self.build_shapes()
if verbose:
printDebug("Shapes (SHACL).....: %d" % len(self.all_shapes), "comment")
# self.__computeTopLayer()
self.__computeInferredProperties()
if verbose:
printDebug("----------", "comment") | Extract all ontology entities from an RDF graph and construct Python representations of them. |
def parse_line(self, line):
"""Parse a single line of JSON and write modified JSON back."""
prefix = ""
# ignore comma at start of lines
if line.startswith(","):
line, prefix = line[1:], ","
j = json.loads(line)
yield j
self.io.write_line(prefix + json.dumps(j)) | Parse a single line of JSON and write modified JSON back. |
def svd_solve(U, s, V, b, s_tol=1e-15):
"""
Solve the system :math:`A X = b` for :math:`X`.
Here :math:`A` is a positive semi-definite matrix using the singular value
decomposition. This truncates the SVD so only dimensions corresponding to
non-negative and sufficiently large singular values are used.
Parameters
----------
U: ndarray
The :code:`U` factor of :code:`U, s, V = svd(A)` positive
semi-definite matrix.
s: ndarray
The :code:`s` factor of :code:`U, s, V = svd(A)` positive
semi-definite matrix.
V: ndarray
The :code:`V` factor of :code:`U, s, V = svd(A)` positive
semi-definite matrix.
b: ndarray
An array or matrix
s_tol: float
Cutoff for small singular values. Singular values smaller than
:code:`s_tol` are clamped to :code:`s_tol`.
Returns
-------
X: ndarray
The result of :math:`X = A^-1 b`
okind: ndarray
The indices of :code:`s` that are kept in the factorisation
"""
# Test shapes for efficient computations
n = U.shape[0]
assert(b.shape[0] == n)
m = b.shape[1] if np.ndim(b) > 1 else 1
# Auto clamp SVD based on threshold
sclamp = np.maximum(s, s_tol)
# Inversion factors
ss = 1. / np.sqrt(sclamp)
U2 = U * ss[np.newaxis, :]
V2 = ss[:, np.newaxis] * V
if m < n:
# Few queries
X = U2.dot(V2.dot(b)) # O(n^2 (2m))
else:
X = U2.dot(V2).dot(b) # O(n^2 (m + n))
return X | Solve the system :math:`A X = b` for :math:`X`.
Here :math:`A` is a positive semi-definite matrix using the singular value
decomposition. This truncates the SVD so only dimensions corresponding to
non-negative and sufficiently large singular values are used.
Parameters
----------
U: ndarray
The :code:`U` factor of :code:`U, s, V = svd(A)` positive
semi-definite matrix.
s: ndarray
The :code:`s` factor of :code:`U, s, V = svd(A)` positive
semi-definite matrix.
V: ndarray
The :code:`V` factor of :code:`U, s, V = svd(A)` positive
semi-definite matrix.
b: ndarray
An array or matrix
s_tol: float
Cutoff for small singular values. Singular values smaller than
:code:`s_tol` are clamped to :code:`s_tol`.
Returns
-------
X: ndarray
The result of :math:`X = A^-1 b`
okind: ndarray
The indices of :code:`s` that are kept in the factorisation |
def G(self, y, t):
"""Noise coefficient matrix G of the complete network system
dy = f(y, t)dt + G(y, t).dot(dW)
(for an ODE network system without noise this function is not used)
Args:
y (array of shape (d,)): where d is the dimension of the overall
state space of the complete network system.
Returns:
G (array of shape (d, m)): where m is the number of independent
Wiener processes driving the complete network system. The noise
coefficient matrix G defines the stochastic term of the system.
"""
if self._independent_noise:
# then G matrix consists of submodel Gs diagonally concatenated:
res = np.zeros((self.dimension, self.nnoises))
offset = 0
for j, m in enumerate(self.submodels):
slicej = slice(self._si[j], self._si[j+1])
ix = (slicej, slice(offset, offset + self._nsubnoises[j]))
res[ix] = m.G(y[slicej], t) # submodel noise coefficient matrix
offset += self._nsubnoises[j]
else:
# identical driving: G consists of submodel Gs stacked vertically
res = np.empty((self.dimension, self.nnoises))
for j, m in enumerate(self.submodels):
slicej = slice(self._si[j], self._si[j+1])
ix = (slicej, slice(None))
res[ix] = m.G(y[slicej], t) # submodel noise coefficient matrix
return res | Noise coefficient matrix G of the complete network system
dy = f(y, t)dt + G(y, t).dot(dW)
(for an ODE network system without noise this function is not used)
Args:
y (array of shape (d,)): where d is the dimension of the overall
state space of the complete network system.
Returns:
G (array of shape (d, m)): where m is the number of independent
Wiener processes driving the complete network system. The noise
coefficient matrix G defines the stochastic term of the system. |
def predict(self, text:str, n_words:int=1, no_unk:bool=True, temperature:float=1., min_p:float=None, sep:str=' ',
decoder=decode_spec_tokens):
"Return the `n_words` that come after `text`."
ds = self.data.single_dl.dataset
self.model.reset()
xb,yb = self.data.one_item(text)
new_idx = []
for _ in range(n_words): #progress_bar(range(n_words), leave=False):
res = self.pred_batch(batch=(xb,yb))[0][-1]
#if len(new_idx) == 0: self.model[0].select_hidden([0])
if no_unk: res[self.data.vocab.stoi[UNK]] = 0.
if min_p is not None:
if (res >= min_p).float().sum() == 0:
warn(f"There is no item with probability >= {min_p}, try a lower value.")
else: res[res < min_p] = 0.
if temperature != 1.: res.pow_(1 / temperature)
idx = torch.multinomial(res, 1).item()
new_idx.append(idx)
xb = xb.new_tensor([idx])[None]
return text + sep + sep.join(decoder(self.data.vocab.textify(new_idx, sep=None))) | Return the `n_words` that come after `text`. |
def get_bromo_fnames_da(d_em_kHz, d_bg_kHz, a_em_kHz, a_bg_kHz,
ID='1+2+3+4+5+6', t_tot='480', num_p='30', pM='64',
t_step=0.5e-6, D=1.2e-11, dir_=''):
"""Get filenames for donor and acceptor timestamps for the given parameters
"""
clk_p = t_step/32. # with t_step=0.5us -> 156.25 ns
E_sim = 1.*a_em_kHz/(a_em_kHz + d_em_kHz)
FRET_val = 100.*E_sim
print("Simulated FRET value: %.1f%%" % FRET_val)
d_em_kHz_str = "%04d" % d_em_kHz
a_em_kHz_str = "%04d" % a_em_kHz
d_bg_kHz_str = "%04.1f" % d_bg_kHz
a_bg_kHz_str = "%04.1f" % a_bg_kHz
print("D: EM %s BG %s " % (d_em_kHz_str, d_bg_kHz_str))
print("A: EM %s BG %s " % (a_em_kHz_str, a_bg_kHz_str))
fname_d = ('ph_times_{t_tot}s_D{D}_{np}P_{pM}pM_'
'step{ts_us}us_ID{ID}_EM{em}kHz_BG{bg}kHz.npy').format(
em=d_em_kHz_str, bg=d_bg_kHz_str, t_tot=t_tot, pM=pM,
np=num_p, ID=ID, ts_us=t_step*1e6, D=D)
fname_a = ('ph_times_{t_tot}s_D{D}_{np}P_{pM}pM_'
'step{ts_us}us_ID{ID}_EM{em}kHz_BG{bg}kHz.npy').format(
em=a_em_kHz_str, bg=a_bg_kHz_str, t_tot=t_tot, pM=pM,
np=num_p, ID=ID, ts_us=t_step*1e6, D=D)
print(fname_d)
print(fname_a)
name = ('BroSim_E{:.1f}_dBG{:.1f}k_aBG{:.1f}k_'
'dEM{:.0f}k').format(FRET_val, d_bg_kHz, a_bg_kHz, d_em_kHz)
return dir_+fname_d, dir_+fname_a, name, clk_p, E_sim | Get filenames for donor and acceptor timestamps for the given parameters |
def is_active(self, timperiods):
"""
Know if this result modulation is active now
:return: True is we are in the period, otherwise False
:rtype: bool
"""
now = int(time.time())
timperiod = timperiods[self.modulation_period]
if not timperiod or timperiod.is_time_valid(now):
return True
return False | Know if this result modulation is active now
:return: True is we are in the period, otherwise False
:rtype: bool |
def mtf_resnet_base():
"""Set of hyperparameters."""
hparams = common_hparams.basic_params1()
hparams.no_data_parallelism = True
hparams.use_fixed_batch_size = True
hparams.batch_size = 32
hparams.max_length = 3072
hparams.hidden_size = 256
hparams.label_smoothing = 0.0
# 8-way model-parallelism
hparams.add_hparam("mesh_shape", "batch:8")
hparams.add_hparam("layout", "batch:batch")
hparams.add_hparam("filter_size", 1024)
hparams.add_hparam("num_layers", 6)
# Share weights between input and target embeddings
hparams.shared_embedding = True
hparams.shared_embedding_and_softmax_weights = True
hparams.optimizer = "Adafactor"
hparams.learning_rate_schedule = "rsqrt_decay"
hparams.learning_rate_warmup_steps = 10000
hparams.add_hparam("d_kv", 32)
# Image related hparams
hparams.add_hparam("img_len", 32)
hparams.add_hparam("num_channels", 3)
hparams.add_hparam("row_blocks", 1)
hparams.add_hparam("col_blocks", 1)
hparams.add_hparam("rows_size", 32)
hparams.add_hparam("cols_size", 32)
# Model-specific parameters
hparams.add_hparam("layer_sizes", [3, 4, 6, 3])
hparams.add_hparam("filter_sizes", [64, 64, 128, 256, 512])
hparams.add_hparam("is_cifar", False)
# Variable init
hparams.initializer = "normal_unit_scaling"
hparams.initializer_gain = 2.
# TODO(nikip): Change optimization scheme?
hparams.learning_rate = 0.1
return hparams | Set of hyperparameters. |
def analyte_2_massname(s):
"""
Converts analytes in format 'Al27' to '27Al'.
Parameters
----------
s : str
of format [0-9]{1,3}[A-z]{1,3}
Returns
-------
str
Name in format [A-z]{1,3}[0-9]{1,3}
"""
el = re.match('.*?([A-z]{1,3}).*?', s).groups()[0]
m = re.match('.*?([0-9]{1,3}).*?', s).groups()[0]
return m + el | Converts analytes in format 'Al27' to '27Al'.
Parameters
----------
s : str
of format [0-9]{1,3}[A-z]{1,3}
Returns
-------
str
Name in format [A-z]{1,3}[0-9]{1,3} |
def coordination_geometry_symmetry_measures(self, coordination_geometry,
tested_permutations=False,
points_perfect=None,
optimization=None):
"""
Returns the symmetry measures of a given coordination_geometry for a set of permutations depending on
the permutation setup. Depending on the parameters of the LocalGeometryFinder and on the coordination
geometry, different methods are called.
:param coordination_geometry: Coordination geometry for which the symmetry measures are looked for
:return: the symmetry measures of a given coordination_geometry for a set of permutations
:raise: NotImplementedError if the permutation_setup does not exists
"""
if tested_permutations:
tested_permutations = set()
if self.permutations_safe_override:
raise ValueError('No permutations safe override anymore')
csms = []
permutations = []
algos = []
local2perfect_maps = []
perfect2local_maps = []
for algo in coordination_geometry.algorithms:
if algo.algorithm_type == EXPLICIT_PERMUTATIONS:
return self.coordination_geometry_symmetry_measures_standard(
coordination_geometry, algo,
points_perfect=points_perfect,
optimization=optimization)
if algo.algorithm_type == SEPARATION_PLANE:
cgsm = self.coordination_geometry_symmetry_measures_separation_plane(
coordination_geometry,
algo,
tested_permutations=tested_permutations,
points_perfect=points_perfect)
csm, perm, algo, local2perfect_map, perfect2local_map = cgsm
csms.extend(csm)
permutations.extend(perm)
algos.extend(algo)
local2perfect_maps.extend(local2perfect_map)
perfect2local_maps.extend(perfect2local_map)
return csms, permutations, algos, local2perfect_maps, perfect2local_maps | Returns the symmetry measures of a given coordination_geometry for a set of permutations depending on
the permutation setup. Depending on the parameters of the LocalGeometryFinder and on the coordination
geometry, different methods are called.
:param coordination_geometry: Coordination geometry for which the symmetry measures are looked for
:return: the symmetry measures of a given coordination_geometry for a set of permutations
:raise: NotImplementedError if the permutation_setup does not exists |
def alignment_display(self):
"""Fills screen with uppercase E's for screen focus and alignment."""
self.dirty.update(range(self.lines))
for y in range(self.lines):
for x in range(self.columns):
self.buffer[y][x] = self.buffer[y][x]._replace(data="E") | Fills screen with uppercase E's for screen focus and alignment. |
def _check_retcode(cmd):
'''
Simple internal wrapper for cmdmod.retcode
'''
return salt.modules.cmdmod.retcode(cmd, output_loglevel='quiet', ignore_retcode=True) == 0 | Simple internal wrapper for cmdmod.retcode |
def pkg_desc(self):
"""Print slack-desc by repository
"""
options = [
"-p",
"--desc"
]
flag = ["--color="]
colors = [
"red",
"green",
"yellow",
"cyan",
"grey"
]
tag = ""
for arg in self.args:
if arg.startswith(flag[0]):
tag = arg[len(flag[0]):]
self.args.remove(arg)
break
if tag and tag not in colors:
print("\nslpkg: Error: Available colors {0}\n".format(
colors))
raise SystemExit()
if (len(self.args) == 3 and self.args[0] in options and
self.args[1] in self.meta.repositories and tag in colors):
PkgDesc(self.args[2], self.args[1], tag).view()
elif (len(self.args) == 3 and self.args[0] in options and
self.args[1] in self.meta.repositories):
PkgDesc(self.args[2], self.args[1], paint="").view()
elif (len(self.args) > 1 and self.args[0] in options and
self.args[1] not in self.meta.repositories):
usage(self.args[1])
else:
usage("") | Print slack-desc by repository |
def remove_qc_reports(portal):
"""Removes the action Quality Control from Reports
"""
logger.info("Removing Reports > Quality Control ...")
ti = portal.reports.getTypeInfo()
actions = map(lambda action: action.id, ti._actions)
for index, action in enumerate(actions, start=0):
if action == 'qualitycontrol':
ti.deleteActions([index])
break
logger.info("Removing Reports > Quality Control [DONE]") | Removes the action Quality Control from Reports |
def remove_isoforms(ids):
"""
This is more or less a hack to remove the GMAP multiple mappings. Multiple
GMAP mappings can be seen given the names .mrna1, .mrna2, etc.
"""
key = lambda x: x.rsplit(".", 1)[0]
iso_number = lambda x: get_number(x.split(".")[-1])
ids = sorted(ids, key=key)
newids = []
for k, ii in groupby(ids, key=key):
min_i = min(list(ii), key=iso_number)
newids.append(min_i)
return newids | This is more or less a hack to remove the GMAP multiple mappings. Multiple
GMAP mappings can be seen given the names .mrna1, .mrna2, etc. |
def read_reg(self, addr):
""" Read memory address in target """
# we don't call check_command here because read_reg() function is called
# when detecting chip type, and the way we check for success (STATUS_BYTES_LENGTH) is different
# for different chip types (!)
val, data = self.command(self.ESP_READ_REG, struct.pack('<I', addr))
if byte(data, 0) != 0:
raise FatalError.WithResult("Failed to read register address %08x" % addr, data)
return val | Read memory address in target |
def _cnvkit_metrics(cnns, target_bed, antitarget_bed, cov_interval, items):
"""Estimate noise of a sample using a flat background.
Only used for panel/targeted data due to memory issues with whole genome
samples.
"""
if cov_interval == "genome":
return cnns
target_cnn = [x["file"] for x in cnns if x["cnntype"] == "target"][0]
background_file = "%s-flatbackground.cnn" % utils.splitext_plus(target_cnn)[0]
background_file = cnvkit_background([], background_file, items, target_bed, antitarget_bed)
cnr_file, data = _cnvkit_fix_base(cnns, background_file, items, "-flatbackground")
cns_file = _cnvkit_segment(cnr_file, cov_interval, data)
metrics_file = "%s-metrics.txt" % utils.splitext_plus(target_cnn)[0]
if not utils.file_exists(metrics_file):
with file_transaction(data, metrics_file) as tx_metrics_file:
cmd = [_get_cmd(), "metrics", "-o", tx_metrics_file, "-s", cns_file, "--", cnr_file]
do.run(_prep_cmd(cmd, tx_metrics_file), "CNVkit metrics")
metrics = _read_metrics_file(metrics_file)
out = []
for cnn in cnns:
cnn["metrics"] = metrics
out.append(cnn)
return out | Estimate noise of a sample using a flat background.
Only used for panel/targeted data due to memory issues with whole genome
samples. |
def _QueryHashes(self, digests):
"""Queries VirusTotal for a specfic hashes.
Args:
digests (list[str]): hashes to look up.
Returns:
dict[str, object]: JSON response or None on error.
"""
url_parameters = {'apikey': self._api_key, 'resource': ', '.join(digests)}
try:
json_response = self.MakeRequestAndDecodeJSON(
self._VIRUSTOTAL_API_REPORT_URL, 'GET', params=url_parameters)
except errors.ConnectionError as exception:
json_response = None
logger.error('Unable to query VirusTotal with error: {0!s}.'.format(
exception))
return json_response | Queries VirusTotal for a specfic hashes.
Args:
digests (list[str]): hashes to look up.
Returns:
dict[str, object]: JSON response or None on error. |
def add_flow_exception(exc):
"""Add an exception that should not be logged.
The argument must be a subclass of Exception.
"""
global _flow_exceptions
if not isinstance(exc, type) or not issubclass(exc, Exception):
raise TypeError('Expected an Exception subclass, got %r' % (exc,))
as_set = set(_flow_exceptions)
as_set.add(exc)
_flow_exceptions = tuple(as_set) | Add an exception that should not be logged.
The argument must be a subclass of Exception. |
def _nfw_func(self, x):
"""
Classic NFW function in terms of arctanh and arctan
:param x: r/Rs
:return:
"""
c = 0.000001
if isinstance(x, np.ndarray):
x[np.where(x<c)] = c
nfwvals = np.ones_like(x)
inds1 = np.where(x < 1)
inds2 = np.where(x > 1)
nfwvals[inds1] = (1 - x[inds1] ** 2) ** -.5 * np.arctanh((1 - x[inds1] ** 2) ** .5)
nfwvals[inds2] = (x[inds2] ** 2 - 1) ** -.5 * np.arctan((x[inds2] ** 2 - 1) ** .5)
return nfwvals
elif isinstance(x, float) or isinstance(x, int):
x = max(x, c)
if x == 1:
return 1
if x < 1:
return (1 - x ** 2) ** -.5 * np.arctanh((1 - x ** 2) ** .5)
else:
return (x ** 2 - 1) ** -.5 * np.arctan((x ** 2 - 1) ** .5) | Classic NFW function in terms of arctanh and arctan
:param x: r/Rs
:return: |
def is_dir(path):
"""Determine if a Path or string is a directory on the file system."""
try:
return path.expanduser().absolute().is_dir()
except AttributeError:
return os.path.isdir(os.path.abspath(os.path.expanduser(str(path)))) | Determine if a Path or string is a directory on the file system. |
def minify_print(
ast,
obfuscate=False,
obfuscate_globals=False,
shadow_funcname=False,
drop_semi=False):
"""
Simple minify print function; returns a string rendering of an input
AST of an ES5 program
Arguments
ast
The AST to minify print
obfuscate
If True, obfuscate identifiers nested in each scope with a
shortened identifier name to further reduce output size.
Defaults to False.
obfuscate_globals
Also do the same to identifiers nested on the global scope; do
not enable unless the renaming of global variables in a not
fully deterministic manner into something else is guaranteed to
not cause problems with the generated code and other code that
in the same environment that it will be executed in.
Defaults to False for the reason above.
drop_semi
Drop semicolons whenever possible (e.g. the final semicolons of
a given block).
"""
return ''.join(chunk.text for chunk in minify_printer(
obfuscate, obfuscate_globals, shadow_funcname, drop_semi)(ast)) | Simple minify print function; returns a string rendering of an input
AST of an ES5 program
Arguments
ast
The AST to minify print
obfuscate
If True, obfuscate identifiers nested in each scope with a
shortened identifier name to further reduce output size.
Defaults to False.
obfuscate_globals
Also do the same to identifiers nested on the global scope; do
not enable unless the renaming of global variables in a not
fully deterministic manner into something else is guaranteed to
not cause problems with the generated code and other code that
in the same environment that it will be executed in.
Defaults to False for the reason above.
drop_semi
Drop semicolons whenever possible (e.g. the final semicolons of
a given block). |
def dropEvent(self, event):
"""
Listens for query's being dragged and dropped onto this tree.
:param event | <QDropEvent>
"""
# overload the current filtering options
data = event.mimeData()
if data.hasFormat('application/x-orb-table') and \
data.hasFormat('application/x-orb-query'):
tableName = self.tableTypeName()
if nativestring(data.data('application/x-orb-table')) == tableName:
data = nativestring(data.data('application/x-orb-query'))
query = Q.fromXmlString(data)
self.setQuery(query)
return
super(XOrbTreeWidget, self).dropEvent(event) | Listens for query's being dragged and dropped onto this tree.
:param event | <QDropEvent> |
def _prompt(letters='yn', default=None):
"""
Wait for the user to type a character (and hit Enter). If the user enters
one of the characters in `letters`, return that character. If the user
hits Enter without entering a character, and `default` is specified,
returns `default`. Otherwise, asks the user to enter a character again.
"""
while True:
try:
input_text = sys.stdin.readline().strip()
except KeyboardInterrupt:
sys.exit(0)
if input_text and input_text in letters:
return input_text
if default is not None and input_text == '':
return default
print('Come again?') | Wait for the user to type a character (and hit Enter). If the user enters
one of the characters in `letters`, return that character. If the user
hits Enter without entering a character, and `default` is specified,
returns `default`. Otherwise, asks the user to enter a character again. |
def from_config(cls, cp, section, variable_args):
"""Returns a distribution based on a configuration file.
The parameters
for the distribution are retrieved from the section titled
"[`section`-`variable_args`]" in the config file.
The file to construct the distribution from must be provided by setting
`filename`. Boundary arguments can be provided in the same way as
described in `get_param_bounds_from_config`.
.. code-block:: ini
[{section}-{tag}]
name = fromfile
filename = ra_prior.hdf
min-ra = 0
max-ra = 6.28
Parameters
----------
cp : pycbc.workflow.WorkflowConfigParser
A parsed configuration file that contains the distribution
options.
section : str
Name of the section in the configuration file.
variable_args : str
The names of the parameters for this distribution, separated by
`prior.VARARGS_DELIM`. These must appear in the "tag" part
of the section header.
Returns
-------
BoundedDist
A distribution instance from the pycbc.inference.prior module.
"""
return bounded.bounded_from_config(cls, cp, section, variable_args,
bounds_required=False) | Returns a distribution based on a configuration file.
The parameters
for the distribution are retrieved from the section titled
"[`section`-`variable_args`]" in the config file.
The file to construct the distribution from must be provided by setting
`filename`. Boundary arguments can be provided in the same way as
described in `get_param_bounds_from_config`.
.. code-block:: ini
[{section}-{tag}]
name = fromfile
filename = ra_prior.hdf
min-ra = 0
max-ra = 6.28
Parameters
----------
cp : pycbc.workflow.WorkflowConfigParser
A parsed configuration file that contains the distribution
options.
section : str
Name of the section in the configuration file.
variable_args : str
The names of the parameters for this distribution, separated by
`prior.VARARGS_DELIM`. These must appear in the "tag" part
of the section header.
Returns
-------
BoundedDist
A distribution instance from the pycbc.inference.prior module. |
def goodnode(self, nodelist):
''' Goes through the provided list
and returns the first server node
that does not return an error.
'''
l = len(nodelist)
for n in range(self.current_node(l), l):
self.msg.message("Trying node " + str(n) + ": " + nodelist[n])
try:
req = urllib.request.Request(url=nodelist[n])
urllib.request.urlopen(req)
except HTTPError as e:
self.msg.error_message(e)
self.currentnode = int(self.currentnode) + 1
else:
self.msg.message("Using " + nodelist[n])
return nodelist[n] | Goes through the provided list
and returns the first server node
that does not return an error. |
def remnant_mass(eta, ns_g_mass, ns_sequence, chi, incl, shift):
"""
Function that determines the remnant disk mass of
an NS-BH system using the fit to numerical-relativity
results discussed in Foucart PRD 86, 124007 (2012).
Parameters
-----------
eta: float
the symmetric mass ratio of the binary
ns_g_mass: float
NS gravitational mass (in solar masses)
ns_sequence: 3D-array
contains the sequence data in the form NS gravitational
mass (in solar masses), NS baryonic mass (in solar
masses), NS compactness (dimensionless)
chi: float
the BH dimensionless spin parameter
incl: float
the inclination angle between the BH spin and the orbital
angular momentum in radians
shift: float
an amount to be subtracted to the remnant mass predicted
by the model (in solar masses)
Returns
----------
remnant_mass: float
The remnant mass in solar masses
"""
# Sanity checks
if not (eta>0. and eta<=0.25 and abs(chi)<=1):
print('The BH spin magnitude must be <=1 and eta must be between 0 and 0.25')
print('This script was launched with ns_mass={0}, eta={1}, chi={2}, inclination={3}\n'.format(ns_g_mass, eta, chi, incl))
raise Exception('Unphysical parameters!')
# Binary mass ratio define to be > 1
q = (1+math.sqrt(1-4*eta)-2*eta)/eta*0.5
# NS compactness and rest mass
ns_compactness = ns_g_mass_to_ns_compactness(ns_g_mass, ns_sequence)
ns_b_mass = ns_g_mass_to_ns_b_mass(ns_g_mass, ns_sequence)
# Sanity checks
if not (ns_compactness>0 and q>=1):
print('A positive NS compactness and a mass ratio that is >1 must be obtained.')
print('This script was launched with ns_mass={0}, eta={1}, chi={2}, inclination={3}'.format(ns_b_mass, eta, chi, incl))
print('and obtained ns_compactness={0} and q={1}.'.format(ns_compactness, q))
print('SOMETHING WENT WRONG!!\n')
raise Exception('Unphysical parameters!')
# Calculate the dimensionless parameter kappa
kappa = q*ns_compactness
# Effective equatorial spin parameter needed to determine the torus mass*)
chi_eff = bh_effective_spin(chi, incl)
#Sanity checks
if not abs(chi_eff)<=1:
print('The effective BH spin magnitude must be <=1')
print('This script was launched with ns_mass={0}, eta={1}, chi={2}, inclination={3}'.format(ns_b_mass, eta, chi, incl))
print('and obtained chi_eff={0}.'.format(chi_eff))
print('SOMETHING WENT WRONG!!\n')
raise Exception('Unphysical parameters!')
# Taking the 1st element with full_output=1 avoids some annoying messages on stdout
xi = scipy.optimize.fsolve(xi_eq, 100., args=(kappa,chi_eff,q), full_output=1)[0]
# Fit parameters and tidal correction
alpha = 0.296 # +/- 0.011
beta = 0.171 # +/- 0.008
# The remnant mass over the NS rest mass
remnant_mass = alpha*xi*(1-2*ns_compactness)-beta*kappa*PG_ISSO_solver(chi_eff,0)
# The remnant mass in the same units as the NS rest mass (presumably solar masses)
remnant_mass = remnant_mass*ns_b_mass - shift
return remnant_mass | Function that determines the remnant disk mass of
an NS-BH system using the fit to numerical-relativity
results discussed in Foucart PRD 86, 124007 (2012).
Parameters
-----------
eta: float
the symmetric mass ratio of the binary
ns_g_mass: float
NS gravitational mass (in solar masses)
ns_sequence: 3D-array
contains the sequence data in the form NS gravitational
mass (in solar masses), NS baryonic mass (in solar
masses), NS compactness (dimensionless)
chi: float
the BH dimensionless spin parameter
incl: float
the inclination angle between the BH spin and the orbital
angular momentum in radians
shift: float
an amount to be subtracted to the remnant mass predicted
by the model (in solar masses)
Returns
----------
remnant_mass: float
The remnant mass in solar masses |
Subsets and Splits