text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def _find_valid_index(self, how):
"""
Retrieves the index of the first valid value.
Parameters
----------
how : {'first', 'last'}
Use this parameter to change between the first or last valid index.
Returns
-------
idx_first_valid : type of index
"""
assert how in ['first', 'last']
if len(self) == 0: # early stop
return None
is_valid = ~self.isna()
if self.ndim == 2:
is_valid = is_valid.any(1) # reduce axis 1
if how == 'first':
idxpos = is_valid.values[::].argmax()
if how == 'last':
idxpos = len(self) - 1 - is_valid.values[::-1].argmax()
chk_notna = is_valid.iat[idxpos]
idx = self.index[idxpos]
if not chk_notna:
return None
return idx | 0.002286 |
def present(name, Name=None,
ScheduleExpression=None,
EventPattern=None,
Description=None,
RoleArn=None,
State=None,
Targets=None,
region=None, key=None, keyid=None, profile=None):
'''
Ensure trail exists.
name
The name of the state definition
Name
Name of the event rule. Defaults to the value of the 'name' param if
not provided.
ScheduleExpression
The scheduling expression. For example, ``cron(0 20 * * ? *)``,
"rate(5 minutes)"
EventPattern
The event pattern.
Description
A description of the rule
State
Indicates whether the rule is ENABLED or DISABLED.
RoleArn
The Amazon Resource Name (ARN) of the IAM role associated with the
rule.
Targets
A list of rresources to be invoked when the rule is triggered.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
'''
ret = {'name': Name,
'result': True,
'comment': '',
'changes': {}
}
Name = Name if Name else name
if isinstance(Targets, six.string_types):
Targets = salt.utils.json.loads(Targets)
if Targets is None:
Targets = []
r = __salt__['boto_cloudwatch_event.exists'](Name=Name,
region=region, key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to create event rule: {0}.'.format(r['error']['message'])
return ret
if not r.get('exists'):
if __opts__['test']:
ret['comment'] = 'CloudWatch event rule {0} is set to be created.'.format(Name)
ret['result'] = None
return ret
r = __salt__['boto_cloudwatch_event.create_or_update'](Name=Name,
ScheduleExpression=ScheduleExpression,
EventPattern=EventPattern,
Description=Description,
RoleArn=RoleArn,
State=State,
region=region, key=key, keyid=keyid, profile=profile)
if not r.get('created'):
ret['result'] = False
ret['comment'] = 'Failed to create event rule: {0}.'.format(r['error']['message'])
return ret
_describe = __salt__['boto_cloudwatch_event.describe'](Name,
region=region, key=key, keyid=keyid, profile=profile)
if 'error' in _describe:
ret['result'] = False
ret['comment'] = 'Failed to create event rule: {0}.'.format(_describe['error']['message'])
ret['changes'] = {}
return ret
ret['changes']['old'] = {'rule': None}
ret['changes']['new'] = _describe
ret['comment'] = 'CloudTrail {0} created.'.format(Name)
if bool(Targets):
r = __salt__['boto_cloudwatch_event.put_targets'](Rule=Name,
Targets=Targets,
region=region, key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to create event rule: {0}.'.format(r['error']['message'])
ret['changes'] = {}
return ret
ret['changes']['new']['rule']['Targets'] = Targets
return ret
ret['comment'] = os.linesep.join([ret['comment'], 'CloudWatch event rule {0} is present.'.format(Name)])
ret['changes'] = {}
# trail exists, ensure config matches
_describe = __salt__['boto_cloudwatch_event.describe'](Name=Name,
region=region, key=key, keyid=keyid, profile=profile)
if 'error' in _describe:
ret['result'] = False
ret['comment'] = 'Failed to update event rule: {0}.'.format(_describe['error']['message'])
ret['changes'] = {}
return ret
_describe = _describe.get('rule')
r = __salt__['boto_cloudwatch_event.list_targets'](Rule=Name,
region=region, key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to update event rule: {0}.'.format(r['error']['message'])
ret['changes'] = {}
return ret
_describe['Targets'] = r.get('targets', [])
need_update = False
rule_vars = {'ScheduleExpression': 'ScheduleExpression',
'EventPattern': 'EventPattern',
'Description': 'Description',
'RoleArn': 'RoleArn',
'State': 'State',
'Targets': 'Targets'}
for invar, outvar in six.iteritems(rule_vars):
if _describe[outvar] != locals()[invar]:
need_update = True
ret['changes'].setdefault('new', {})[invar] = locals()[invar]
ret['changes'].setdefault('old', {})[invar] = _describe[outvar]
if need_update:
if __opts__['test']:
msg = 'CloudWatch event rule {0} set to be modified.'.format(Name)
ret['comment'] = msg
ret['result'] = None
return ret
ret['comment'] = os.linesep.join([ret['comment'], 'CloudWatch event rule to be modified'])
r = __salt__['boto_cloudwatch_event.create_or_update'](Name=Name,
ScheduleExpression=ScheduleExpression,
EventPattern=EventPattern,
Description=Description,
RoleArn=RoleArn,
State=State,
region=region, key=key, keyid=keyid, profile=profile)
if not r.get('created'):
ret['result'] = False
ret['comment'] = 'Failed to update event rule: {0}.'.format(r['error']['message'])
ret['changes'] = {}
return ret
if _describe['Targets'] != Targets:
removes = [i.get('Id') for i in _describe['Targets']]
log.error(Targets)
if bool(Targets):
for target in Targets:
tid = target.get('Id', None)
if tid is not None and tid in removes:
ix = removes.index(tid)
removes.pop(ix)
r = __salt__['boto_cloudwatch_event.put_targets'](Rule=Name,
Targets=Targets,
region=region, key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to update event rule: {0}.'.format(r['error']['message'])
ret['changes'] = {}
return ret
if bool(removes):
r = __salt__['boto_cloudwatch_event.remove_targets'](Rule=Name,
Ids=removes,
region=region, key=key, keyid=keyid, profile=profile)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to update event rule: {0}.'.format(r['error']['message'])
ret['changes'] = {}
return ret
return ret | 0.006122 |
def priority(s):
"""Return priority for a given object."""
# REZ: Previously this value was calculated in place many times which is
# expensive. Do it once early.
# REZ: Changed this to output a list, so that can nicely sort "validate"
# items by the sub-priority of their schema
type_of_s = type(s)
if type_of_s in (list, tuple, set, frozenset):
return [ITERABLE]
if type_of_s is dict:
return [DICT]
if issubclass(type_of_s, type):
return [TYPE]
if hasattr(s, 'validate'):
p = [VALIDATOR]
if hasattr(s, "_schema"):
p.extend(priority(s._schema))
return p
if callable(s):
return [CALLABLE]
else:
return [COMPARABLE] | 0.001335 |
def is_filtered(self, require=None, ignore=None):
"""Return ``True`` for filtered calls
:param iterable ignore: if set, the filters to ignore, make sure to
include 'PASS', when setting, default is ``['PASS']``
:param iterable require: if set, the filters to require for returning
``True``
"""
ignore = ignore or ["PASS"]
if "FT" not in self.data or not self.data["FT"]:
return False
for ft in self.data["FT"]:
if ft in ignore:
continue # skip
if not require:
return True
elif ft in require:
return True
return False | 0.002853 |
def gridnet(np, pfile, plenfile, tlenfile, gordfile, outlet=None, workingdir=None,
mpiexedir=None, exedir=None, log_file=None, runtime_file=None, hostfile=None):
"""Run gridnet"""
fname = TauDEM.func_name('gridnet')
return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir),
{'-p': pfile, '-o': outlet}, workingdir,
None,
{'-plen': plenfile, '-tlen': tlenfile, '-gord': gordfile},
{'mpipath': mpiexedir, 'hostfile': hostfile, 'n': np},
{'logfile': log_file, 'runtimefile': runtime_file}) | 0.010511 |
def head(self, lines=10):
"""
Return the top lines of the file.
"""
self.file.seek(0)
for i in range(lines):
if self.seek_next_line() == -1:
break
end_pos = self.file.tell()
self.file.seek(0)
data = self.file.read(end_pos)
for t in self.LINE_TERMINATORS:
if data.endswith(t):
# Only terminators _between_ lines should be preserved.
# Otherwise terminator of the last line will be treated as separtaing line and empty line.
data = data[:-len(t)]
break
if data:
return self.splitlines(data)
else:
return [] | 0.006784 |
def pokeStorable(self, storable, objname, obj, container, visited=None, _stack=None, **kwargs):
"""
Arguments:
storable (StorableHandler): storable instance.
objname (any): record reference.
obj (any): object to be serialized.
container (any): container.
visited (dict): map of the previously serialized objects that are
passed by references; keys are the objects' IDs.
_stack (CallStack): stack of parent object names.
Trailing keyword arguments are passed to the :class:`Storable` instance's
:attr:`~Storable.poke`.
"""
#print((objname, storable.storable_type)) # debug
storable.poke(self, objname, obj, container, visited=visited, _stack=_stack, **kwargs)
try:
record = self.getRecord(objname, container)
except KeyError:
# fake storable; silently skip
if self.verbose:
print("skipping `{}` (type: {})".format(objname, storable.storable_type))
if 1 < self.verbose:
print(traceback.format_exc())
else:
self.setRecordAttr('type', storable.storable_type, record)
if storable.version is not None:
self.setRecordAttr('version', from_version(storable.version), record) | 0.005848 |
def vhost_remove(cls, name):
""" Delete a vhost in a webaccelerator """
oper = cls.call('hosting.rproxy.vhost.delete', name)
cls.echo('Deleting your virtual host %s' % name)
cls.display_progress(oper)
cls.echo('Your virtual host have been removed')
return oper | 0.006494 |
def relationship(self, node):
"""
Retrieve the relationship object for this first relationship between self and node.
:param node:
:return: StructuredRel
"""
self._check_node(node)
my_rel = _rel_helper(lhs='us', rhs='them', ident='r', **self.definition)
q = "MATCH " + my_rel + " WHERE id(them)={them} and id(us)={self} RETURN r LIMIT 1"
rels = self.source.cypher(q, {'them': node.id})[0]
if not rels:
return
rel_model = self.definition.get('model') or StructuredRel
return self._set_start_end_cls(rel_model.inflate(rels[0][0]), node) | 0.007752 |
def batch(samples):
"""CWL: batch together per sample, joint and germline calls for ensemble combination.
Sets up groups of same sample/batch variant calls for ensemble calling, as
long as we have more than one caller per group.
"""
samples = [utils.to_single_data(x) for x in samples]
sample_order = [dd.get_sample_name(x) for x in samples]
batch_groups = collections.defaultdict(list)
for data in samples:
batch_samples = tuple(data.get("batch_samples", [dd.get_sample_name(data)]))
batch_groups[(batch_samples, dd.get_phenotype(data))].append(data)
out = []
for (batch_samples, phenotype), gsamples in batch_groups.items():
if len(gsamples) > 1:
batches = set([])
for d in gsamples:
batches |= set(dd.get_batches(d))
gsamples.sort(key=dd.get_variantcaller_order)
cur = copy.deepcopy(gsamples[0])
cur.update({"batch_id": sorted(list(batches))[0] if batches else "_".join(batch_samples),
"batch_samples": batch_samples,
"variants": {"variantcallers": [dd.get_variantcaller(d) for d in gsamples],
"calls": [d.get("vrn_file") for d in gsamples]}})
out.append(cur)
def by_original_order(d):
return min([sample_order.index(s) for s in d["batch_samples"] if s in sample_order])
return sorted(out, key=by_original_order) | 0.004759 |
def return_daily_messages_count(self, sender):
""" Returns the number of messages sent in the last 24 hours so we can ensure the user does not exceed his messaging limits """
h24 = now() - timedelta(days=1)
return Message.objects.filter(sender=sender, sent_at__gte=h24).count() | 0.009967 |
def send_command(self, cmd, priority=False):
"""
Flushes a command to the server as a bytes payload.
"""
if priority:
self._pending.insert(0, cmd)
else:
self._pending.append(cmd)
self._pending_size += len(cmd)
if self._pending_size > DEFAULT_PENDING_SIZE:
yield self._flush_pending() | 0.005319 |
def _eval(self, memory, addr, n, **kwargs):
"""
Gets n solutions for an address.
"""
return memory.state.solver.eval_upto(addr, n, exact=kwargs.pop('exact', self._exact), **kwargs) | 0.014151 |
def _commit_run_log(self):
"""" Commit the current run log to the backend. """
logger.debug('Committing run log for job {0}'.format(self.name))
self.backend.commit_log(self.run_log) | 0.009756 |
def nt_quote_arg(arg):
"""Quote a command line argument according to Windows parsing rules"""
result = []
needquote = False
nb = 0
needquote = (" " in arg) or ("\t" in arg)
if needquote:
result.append('"')
for c in arg:
if c == '\\':
nb += 1
elif c == '"':
# double preceding backslashes, then add a \"
result.append('\\' * (nb*2) + '\\"')
nb = 0
else:
if nb:
result.append('\\' * nb)
nb = 0
result.append(c)
if nb:
result.append('\\' * nb)
if needquote:
result.append('\\' * nb) # double the trailing backslashes
result.append('"')
return ''.join(result) | 0.001309 |
def get_interface_detail_output_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config) | 0.002743 |
def jsonHook(encoded):
"""Custom JSON decoder that allows construction of a new ``Smi``
instance from a decoded JSON object.
:param encoded: a JSON decoded object literal (a dict)
:returns: "encoded" or one of the these objects: :class:`Smi`,
:class:`MzmlScan`, :class:`MzmlProduct`, :class:`MzmlPrecursor`
"""
if '__Smi__' in encoded:
return Smi._fromJSON(encoded['__Smi__'])
elif '__MzmlScan__' in encoded:
return MzmlScan._fromJSON(encoded['__MzmlScan__'])
elif '__MzmlProduct__' in encoded:
return MzmlProduct._fromJSON(encoded['__MzmlProduct__'])
elif '__MzmlPrecursor__' in encoded:
return MzmlPrecursor._fromJSON(encoded['__MzmlPrecursor__'])
else:
return encoded | 0.002427 |
def rebuildSmooth(self):
"""
Rebuilds a smooth path based on the inputed points and set \
parameters for this item.
:return <QPainterPath>
"""
# collect the control points
points = self.controlPoints()
# create the path
path = QPainterPath()
if len(points) == 3:
x0, y0 = points[0]
x1, y1 = points[1]
xN, yN = points[2]
path.moveTo(x0, y0)
path.quadTo(x1, y1, xN, yN)
elif len(points) == 4:
x0, y0 = points[0]
x1, y1 = points[1]
x2, y2 = points[2]
xN, yN = points[3]
path.moveTo(x0, y0)
path.cubicTo(x1, y1, x2, y2, xN, yN)
elif len(points) == 6:
x0, y0 = points[0]
x1, y1 = points[1]
x2, y2 = points[2]
x3, y3 = points[3]
x4, y4 = points[4]
xN, yN = points[5]
xC = (x2 + x3) / 2.0
yC = (y2 + y3) / 2.0
path.moveTo(x0, y0)
path.cubicTo(x1, y1, x2, y2, xC, yC)
path.cubicTo(x3, y3, x4, y4, xN, yN)
else:
x0, y0 = points[0]
xN, yN = points[-1]
path.moveTo(x0, y0)
path.lineTo(xN, yN)
return path | 0.002237 |
def _ClientPathToString(client_path, prefix=""):
"""Returns a path-like String of client_path with optional prefix."""
return os.path.join(prefix, client_path.client_id, client_path.vfs_path) | 0.015385 |
def set(self, param, value):
""" sets the param to the value provided """
self.raw_dict[param] = value
self.manifest.set(self.feature_name, param, value) | 0.011299 |
def handle_input(self, smtp_command, data=None):
"""Processes the given SMTP command with the (optional data).
[PUBLIC API]
"""
self._command_arguments = data
self.please_close_connection_after_response(False)
# SMTP commands must be treated as case-insensitive
command = smtp_command.upper()
try:
try:
self.state.execute(command)
except StateMachineError:
if command not in self.valid_commands:
self.reply(500, 'unrecognized command "%s"' % smtp_command)
else:
msg = 'Command "%s" is not allowed here' % smtp_command
allowed_transitions = self.state.allowed_actions()
if len(allowed_transitions) > 0:
msg += ', expected on of %s' % allowed_transitions
self.reply(503, msg)
except InvalidDataError:
e = sys.exc_info()[1]
self.reply(501, e.msg())
except InvalidParametersError:
# TODO: Get rid of InvalidParametersError, shouldn't be
# necessary anymore
e = sys.exc_info()[1]
if not e.response_sent:
msg = 'Syntactically invalid %s argument(s)' % smtp_command
self.reply(501, msg)
except PolicyDenial:
e = sys.exc_info()[1]
if not e.response_sent:
self.reply(e.code, e.reply_text)
finally:
if self.should_close_connection_after_response():
self.close_connection()
self._command_arguments = None | 0.001157 |
def remove_ephemeral_listener(self, uid):
"""Remove ephemeral listener with given uid."""
self.ephemeral_listeners[:] = (listener for listener in self.ephemeral_listeners
if listener['uid'] != uid) | 0.011905 |
def addMethod(self, m):
"""
Adds a L{Method} to the interface
"""
if m.nargs == -1:
m.nargs = len([a for a in marshal.genCompleteTypes(m.sigIn)])
m.nret = len([a for a in marshal.genCompleteTypes(m.sigOut)])
self.methods[m.name] = m
self._xml = None | 0.006231 |
def pluralize(self, measure, singular, plural):
""" Returns a string that contains the measure (amount) and its plural
or singular form depending on the amount.
Parameters:
:param measure: Amount, value, always a numerical value
:param singular: The singular form of the chosen word
:param plural: The plural form of the chosen word
Returns:
String
"""
if measure == 1:
return "{} {}".format(measure, singular)
else:
return "{} {}".format(measure, plural) | 0.003419 |
def _mirror_groups(self):
"""
Mirrors the user's LDAP groups in the Django database and updates the
user's membership.
"""
target_group_names = frozenset(self._get_groups().get_group_names())
current_group_names = frozenset(
self._user.groups.values_list("name", flat=True).iterator()
)
# These were normalized to sets above.
MIRROR_GROUPS_EXCEPT = self.settings.MIRROR_GROUPS_EXCEPT
MIRROR_GROUPS = self.settings.MIRROR_GROUPS
# If the settings are white- or black-listing groups, we'll update
# target_group_names such that we won't modify the membership of groups
# beyond our purview.
if isinstance(MIRROR_GROUPS_EXCEPT, (set, frozenset)):
target_group_names = (target_group_names - MIRROR_GROUPS_EXCEPT) | (
current_group_names & MIRROR_GROUPS_EXCEPT
)
elif isinstance(MIRROR_GROUPS, (set, frozenset)):
target_group_names = (target_group_names & MIRROR_GROUPS) | (
current_group_names - MIRROR_GROUPS
)
if target_group_names != current_group_names:
existing_groups = list(
Group.objects.filter(name__in=target_group_names).iterator()
)
existing_group_names = frozenset(group.name for group in existing_groups)
new_groups = [
Group.objects.get_or_create(name=name)[0]
for name in target_group_names
if name not in existing_group_names
]
self._user.groups.set(existing_groups + new_groups) | 0.00242 |
def xpath_on_node(self, node, xpath, **kwargs):
"""
Return result of performing the given XPath query on the given node.
All known namespace prefix-to-URI mappings in the document are
automatically included in the XPath invocation.
If an empty/default namespace (i.e. None) is defined, this is
converted to the prefix name '_' so it can be used despite empty
namespace prefixes being unsupported by XPath.
"""
namespaces_dict = {}
if 'namespaces' in kwargs:
namespaces_dict.update(kwargs['namespaces'])
# Empty namespace prefix is not supported, convert to '_' prefix
if None in namespaces_dict:
default_ns_uri = namespaces_dict.pop(None)
namespaces_dict['_'] = default_ns_uri
# If no default namespace URI defined, use root's namespace (if any)
if not '_' in namespaces_dict:
root = self.get_impl_root(node)
qname, ns_uri, prefix, local_name = self._unpack_name(
root.tag, root)
if ns_uri:
namespaces_dict['_'] = ns_uri
# Include XMLNS namespace if it's not already defined
if not 'xmlns' in namespaces_dict:
namespaces_dict['xmlns'] = nodes.Node.XMLNS_URI
return node.findall(xpath, namespaces_dict) | 0.002952 |
def send_message(self, id: str, message: str) -> Dict[str, Any]:
"""Send a message to a channel
For formatting options, see the documentation:
https://discordapp.com/developers/docs/resources/channel#create-message
Args:
id: channel snowflake id
message: your message (string)
Returns:
Dictionary object of the new message
"""
if not self.connected:
raise ValueError('Websocket not connected')
return self._query(f'channels/{id}/messages', 'POST', {'content': message}) | 0.005102 |
def get_request_token_secret(self, client_key, token, request):
"""Get request token secret.
The request token object should a ``secret`` attribute.
"""
log.debug('Get request token secret of %r for %r',
token, client_key)
tok = request.request_token or self._grantgetter(token=token)
if tok and tok.client_key == client_key:
request.request_token = tok
return tok.secret
return None | 0.004149 |
def _manage_child_object(self, nurest_object, method=HTTP_METHOD_GET, async=False, callback=None, handler=None, response_choice=None, commit=False):
""" Low level child management. Send given HTTP method with given nurest_object to given ressource of current object
Args:
nurest_object: the NURESTObject object to manage
method: the HTTP method to use (GET, POST, PUT, DELETE)
callback: the callback to call at the end
handler: a custom handler to call when complete, before calling the callback
commit: True to auto commit changes in the current object
Returns:
Returns the object and connection (object, connection)
"""
url = None
if method == HTTP_METHOD_POST:
url = self.get_resource_url_for_child_type(nurest_object.__class__)
else:
url = self.get_resource_url()
if response_choice is not None:
url += '?responseChoice=%s' % response_choice
request = NURESTRequest(method=method, url=url, data=nurest_object.to_dict())
user_info = {'nurest_object': nurest_object, 'commit': commit}
if not handler:
handler = self._did_perform_standard_operation
if async:
return self.send_request(request=request, async=async, local_callback=handler, remote_callback=callback, user_info=user_info)
else:
connection = self.send_request(request=request, user_info=user_info)
return handler(connection) | 0.007561 |
def add_user(self, user_id, custom_properties=None, headers=None, endpoint_url=None):
"""
Creates a new identified user if he doesn't exist.
:param str user_id: identified user's ID
:param dict custom_properties: user properties
:param dict headers: custom request headers (if isn't set default values are used)
:param str endpoint_url: where to send the request (if isn't set default value is used)
:return: Response
"""
endpoint_url = endpoint_url or self._endpoint_url
url = endpoint_url + '/users'
headers = headers or self._default_headers()
payload = {"user_id": user_id}
if custom_properties is not None:
payload["user_properties"] = custom_properties
response = requests.post(url, headers=headers, json=payload)
return response | 0.005727 |
def _analyze_tree(self, tree):
"""Analyze given tree and create mapping of indexes to character
addresses.
"""
addresses = []
for text in self._iter_texts(tree):
for i, char in enumerate(text.content):
if char in whitespace:
char = ' '
addresses.append(CharAddress(char, text.element, text.attr, i))
# remove leading and trailing whitespace
while addresses and addresses[0].char == ' ':
del addresses[0]
while addresses and addresses[-1].char == ' ':
del addresses[-1]
return addresses | 0.003091 |
def resource_groups(self):
"""Instance depends on the API version:
* 2016-02-01: :class:`ResourceGroupsOperations<azure.mgmt.resource.resources.v2016_02_01.operations.ResourceGroupsOperations>`
* 2016-09-01: :class:`ResourceGroupsOperations<azure.mgmt.resource.resources.v2016_09_01.operations.ResourceGroupsOperations>`
* 2017-05-10: :class:`ResourceGroupsOperations<azure.mgmt.resource.resources.v2017_05_10.operations.ResourceGroupsOperations>`
* 2018-02-01: :class:`ResourceGroupsOperations<azure.mgmt.resource.resources.v2018_02_01.operations.ResourceGroupsOperations>`
* 2018-05-01: :class:`ResourceGroupsOperations<azure.mgmt.resource.resources.v2018_05_01.operations.ResourceGroupsOperations>`
"""
api_version = self._get_api_version('resource_groups')
if api_version == '2016-02-01':
from .v2016_02_01.operations import ResourceGroupsOperations as OperationClass
elif api_version == '2016-09-01':
from .v2016_09_01.operations import ResourceGroupsOperations as OperationClass
elif api_version == '2017-05-10':
from .v2017_05_10.operations import ResourceGroupsOperations as OperationClass
elif api_version == '2018-02-01':
from .v2018_02_01.operations import ResourceGroupsOperations as OperationClass
elif api_version == '2018-05-01':
from .v2018_05_01.operations import ResourceGroupsOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) | 0.007973 |
def client_factory(self):
"""
Custom client factory to set proxy options.
"""
if self._service.production:
url = self.production_url
else:
url = self.testing_url
proxy_options = dict()
https_proxy_setting = os.environ.get('PAYEX_HTTPS_PROXY') or os.environ.get('https_proxy')
http_proxy_setting = os.environ.get('PAYEX_HTTP_PROXY') or os.environ.get('http_proxy')
if https_proxy_setting:
proxy_options['https'] = https_proxy_setting
if http_proxy_setting:
proxy_options['http'] = http_proxy_setting
return client.Client(url, proxy=proxy_options) | 0.011158 |
def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
if (name and isinstance(name, basestring) and name[0] != '<' and
name[-1] != '>'):
return os.path.basename(name) | 0.003876 |
def open(self):
'''
attempts to open the database. if it gets a locked message, it will wait one second and try
again. if it is still locked, it will return an error
:return: None, None if successful
None, error if error
'''
cycle = 2
count = 0
while(True):
try:
DB.db = plyvel.DB(DB.dbpath, create_if_missing=True).prefixed_db(bytes(DB.project, 'utf-8'))
if DB.db:
break
except Exception as e:
# wait for other program to unlock the db
count+=1
time.sleep(1)
if count >= cycle:
return None, "DB error: {}".format(e)
return None, None | 0.006345 |
def generateUniqueId(context, **kw):
""" Generate pretty content IDs.
"""
# get the config for this portal type from the system setup
config = get_config(context, **kw)
# get the variables map for later string interpolation
variables = get_variables(context, **kw)
# The new generate sequence number
number = 0
# get the sequence type from the global config
sequence_type = config.get("sequence_type", "generated")
# Sequence Type is "Counter", so we use the length of the backreferences or
# contained objects of the evaluated "context" defined in the config
if sequence_type in ["counter"]:
number = get_counted_number(context, config, variables, **kw)
# Sequence Type is "Generated", so the ID is constructed according to the
# configured split length
if sequence_type in ["generated"]:
number = get_generated_number(context, config, variables, **kw)
# store the new sequence number to the variables map for str interpolation
if isinstance(number, Alphanumber):
variables["alpha"] = number
variables["seq"] = to_int(number)
# The ID formatting template from user config, e.g. {sampleId}-R{seq:02d}
id_template = config.get("form", "")
# Interpolate the ID template
try:
new_id = id_template.format(**variables)
except KeyError, e:
logger.error('KeyError: {} not in id_template {}'.format(
e, id_template))
raise
normalized_id = api.normalize_filename(new_id)
logger.info("generateUniqueId: {}".format(normalized_id))
return normalized_id | 0.000618 |
def xcorr(x, y, maxlags):
"""
Streamlined version of matplotlib's `xcorr`, without the plots.
:param x, y: NumPy arrays to cross-correlate
:param maxlags: Max number of lags; result will be `2*maxlags+1` in length
"""
xlen = len(x)
ylen = len(y)
assert xlen == ylen
c = np.correlate(x, y, mode=2)
# normalize
c /= np.sqrt(np.dot(x, x) * np.dot(y, y))
lags = np.arange(-maxlags, maxlags + 1)
c = c[xlen - 1 - maxlags:xlen + maxlags]
return c | 0.001996 |
def generate_between_segment_description(
self,
between_expression,
get_between_description_format,
get_single_item_description
):
"""
Generates the between segment description
:param between_expression:
:param get_between_description_format:
:param get_single_item_description:
:return: The between segment description
"""
description = ""
between_segments = between_expression.split('-')
between_segment_1_description = get_single_item_description(between_segments[0])
between_segment_2_description = get_single_item_description(between_segments[1])
between_segment_2_description = between_segment_2_description.replace(
":00", ":59")
between_description_format = get_between_description_format(between_expression)
description += between_description_format.format(between_segment_1_description, between_segment_2_description)
return description | 0.006783 |
def swap(self, key, items):
"""Set key to a copy of items and return the list that was previously
stored if the key was set. If not key was set, returns an empty list.
"""
if not isinstance(items, list):
raise ValueError("items must be a list")
return_value = []
with self._lock:
if key in self._dict:
return_value = self._dict[key]
# Make a copy since we don't want users keeping a reference that is
# outside the lock
self._dict[key] = items.copy()
return return_value | 0.003322 |
def pointlist(points, sr):
"""Convert a list of the form [[x, y] ...] to a list of Point instances
with the given x, y coordinates."""
assert all(isinstance(pt, Point) or len(pt) == 2
for pt in points), "Point(s) not in [x, y] form"
return [coord if isinstance(coord, Point)
else Point(coord[0], coord[1], sr)
for coord in points] | 0.010101 |
def do_identity(args):
"""Executes the config commands subcommands.
"""
if args.subcommand == 'policy' and args.policy_cmd == 'create':
_do_identity_policy_create(args)
elif args.subcommand == 'policy' and args.policy_cmd == 'list':
_do_identity_policy_list(args)
elif args.subcommand == 'role' and args.role_cmd == 'create':
_do_identity_role_create(args)
elif args.subcommand == 'role' and args.role_cmd == 'list':
_do_identity_role_list(args)
else:
raise AssertionError(
'"{}" is not a valid subcommand of "identity"'.format(
args.subcommand)) | 0.001558 |
def urlencode2(query, doseq=0, safe="", querydelimiter="&"):
"""Encode a sequence of two-element tuples or dictionary into a URL query string.
If any values in the query arg are sequences and doseq is true, each
sequence element is converted to a separate parameter.
If the query arg is a sequence of two-element tuples, the order of the
parameters in the output will match the order of parameters in the
input.
"""
if hasattr(query,"items"):
# mapping objects
query = query.items()
else:
# it's a bother at times that strings and string-like objects are
# sequences...
try:
# non-sequence items should not work with len()
# non-empty strings will fail this
if len(query) and not isinstance(query[0], tuple):
raise TypeError
# zero-length sequences of all types will get here and succeed,
# but that's a minor nit - since the original implementation
# allowed empty dicts that type of behavior probably should be
# preserved for consistency
except TypeError:
ty,va,tb = sys.exc_info()
raise TypeError("not a valid non-string sequence or mapping object " + tb)
l = []
if not doseq:
# preserve old behavior
for k, v in query:
k = quote_plus(str(k), safe=safe)
v = quote_plus(str(v), safe=safe)
l.append(k + '=' + v)
else:
for k, v in query:
k = quote_plus(str(k), safe=safe)
if isinstance(v, str):
v = quote_plus(v, safe=safe)
l.append(k + '=' + v)
elif _is_unicode(v):
# is there a reasonable way to convert to ASCII?
# encode generates a string, but "replace" or "ignore"
# lose information and "strict" can raise UnicodeError
v = quote_plus(v.encode("ASCII","replace"))
l.append(k + '=' + v)
else:
try:
# is this a sufficient test for sequence-ness?
len(v)
except TypeError:
# not a sequence
v = quote_plus(str(v), safe=safe)
l.append(k + '=' + v)
else:
# loop over the sequence
for elt in v:
l.append(k + '=' + quote_plus(str(elt)))
return querydelimiter.join(l) | 0.00317 |
def directoryAdd(self, dir_key, key):
'''Adds directory entry `key` to directory at `dir_key`.
If the directory `dir_key` does not exist, it is created.
'''
key = str(key)
dir_items = self.get(dir_key) or []
if key not in dir_items:
dir_items.append(key)
self.put(dir_key, dir_items) | 0.009346 |
def upload(self, href, vobject_item):
"""Upload a new or replace an existing item."""
if self.is_fake:
return
content = vobject_item.serialize()
try:
item = self.get(href)
etesync_item = item.etesync_item
etesync_item.content = content
except api.exceptions.DoesNotExist:
etesync_item = self.collection.get_content_class().create(self.collection, content)
etesync_item.save()
return self.get(href) | 0.005814 |
def delete_project(self, project):
'''Delete a project. It will recursively delete all the content.
Args:
project (str): The UUID of the project to be deleted.
Returns:
None
Raises:
StorageArgumentException: Invalid arguments
StorageForbiddenException: 403
StorageNotFoundException: 404
HTTPError: other non-20x error codes
'''
if not is_valid_uuid(project):
raise StorageArgumentException(
'Invalid UUID for project: {0}'.format(project))
self._authenticated_request \
.to_endpoint('project/{}/'.format(project)) \
.delete() | 0.002825 |
def create_legacy_pad(scope, input_name, output_name, H_in, W_in, k_h, k_w,
s_h, s_w, p_h, p_w, padded_value, container):
'''
This function adds one Pad operator into its last argument, which is a Container object. By feeding the output of
the created Pad operator into Pool operator under valid padding mode, we can achieve the same functionality of
CoreML' pooling under IncludeLastPixel padding mode.
:param scope:
:param input_name:
:param output_name:
:param H_in: input dimension along H-axis
:param W_in: input dimension along W-axis
:param k_h: kernel's H-axis dimension
:param k_w: kernel's W-axis dimension
:param s_h: stride along H-axis
:param s_w: stride along W-axis
:param p_h: padding amount at the beginning and the end of H-axis
:param p_w: padding amount at the beginning and the end of W-axis
:param padded_value: value used to fill padded area
:param container: Container object
'''
# Add a Pad operator to pre-process 4-D tensor
pad_t, pad_b = calculate_legacy_pad_amount(H_in, p_h, k_h, s_h)
pad_l, pad_r = calculate_legacy_pad_amount(W_in, p_w, k_w, s_w)
# CoreML pooling operator pads only their H- and W-axes. Here we assume the shape of the tensor to be padded
# is [N, C, H, W], so we have 8 padding amounts
# pads = [N_begin_index, C_begin_index, H_begin_index, W_begin_index,
# N_end_index, C_end_index, H_end_index, W_end_index]
# Because only H- and W-axes are padded in CoreML, we leave padding amounts of N- and C-axes zeros.
pads = [0, 0, pad_t, pad_l, 0, 0, pad_b, pad_r]
apply_pad(scope, input_name, output_name, container, pads=pads, value=padded_value) | 0.003431 |
def _reset_page_refs(self):
"""Invalidate all pages in document dictionary."""
if self.isClosed:
return
for page in self._page_refs.values():
if page:
page._erase()
page = None
self._page_refs.clear() | 0.006944 |
def angle_wrap(angle, radians=False):
'''Wraps the input angle to 360.0 degrees.
Parameters
----------
angle : float
The angle to wrap around 360.0 deg.
radians : bool
If True, will assume that the input is in radians. The output will then
also be in radians.
Returns
-------
float
Wrapped angle. If radians is True: input is assumed to be in radians,
output is also in radians.
'''
if radians:
wrapped = angle % (2.0*pi_value)
if wrapped < 0.0:
wrapped = 2.0*pi_value + wrapped
else:
wrapped = angle % 360.0
if wrapped < 0.0:
wrapped = 360.0 + wrapped
return wrapped | 0.001387 |
def log(self, level, message, exc_info=None, reference=None):
# pylint: disable=W0212
"""
Logs a message, possibly with an exception
:param level: Severity of the message (Python logging level)
:param message: Human readable message
:param exc_info: The exception context (sys.exc_info()), if any
:param reference: The ServiceReference associated to the log
"""
if not isinstance(reference, pelix.framework.ServiceReference):
# Ensure we have a clean Service Reference
reference = None
if exc_info is not None:
# Format the exception to avoid memory leaks
try:
exception_str = "\n".join(traceback.format_exception(*exc_info))
except (TypeError, ValueError, AttributeError):
exception_str = "<Invalid exc_info>"
else:
exception_str = None
# Store the LogEntry
entry = LogEntry(
level, message, exception_str, self.__bundle, reference
)
self.__reader._store_entry(entry) | 0.003613 |
def shard_data(source_fnames: List[str],
target_fname: str,
source_vocabs: List[vocab.Vocab],
target_vocab: vocab.Vocab,
num_shards: int,
buckets: List[Tuple[int, int]],
length_ratio_mean: float,
length_ratio_std: float,
output_prefix: str) -> Tuple[List[Tuple[List[str], str, 'DataStatistics']], 'DataStatistics']:
"""
Assign int-coded source/target sentence pairs to shards at random.
:param source_fnames: The path to the source text (and optional token-parallel factor files).
:param target_fname: The file name of the target file.
:param source_vocabs: Source vocabulary (and optional source factor vocabularies).
:param target_vocab: Target vocabulary.
:param num_shards: The total number of shards.
:param buckets: Bucket list.
:param length_ratio_mean: Mean length ratio.
:param length_ratio_std: Standard deviation of length ratios.
:param output_prefix: The prefix under which the shard files will be created.
:return: Tuple of source (and source factor) file names, target file names and statistics for each shard,
as well as global statistics.
"""
os.makedirs(output_prefix, exist_ok=True)
sources_shard_fnames = [[os.path.join(output_prefix, C.SHARD_SOURCE % i) + ".%d" % f for i in range(num_shards)]
for f in range(len(source_fnames))]
target_shard_fnames = [os.path.join(output_prefix, C.SHARD_TARGET % i)
for i in range(num_shards)] # type: List[str]
data_stats_accumulator = DataStatisticsAccumulator(buckets, source_vocabs[0], target_vocab,
length_ratio_mean, length_ratio_std)
per_shard_stat_accumulators = [DataStatisticsAccumulator(buckets, source_vocabs[0], target_vocab, length_ratio_mean,
length_ratio_std) for shard_idx in range(num_shards)]
with ExitStack() as exit_stack:
sources_shards = [[exit_stack.enter_context(smart_open(f, mode="wt")) for f in sources_shard_fnames[i]] for i in
range(len(source_fnames))]
target_shards = [exit_stack.enter_context(smart_open(f, mode="wt")) for f in target_shard_fnames]
source_readers, target_reader = create_sequence_readers(source_fnames, target_fname,
source_vocabs, target_vocab)
random_shard_iter = iter(lambda: random.randrange(num_shards), None)
for (sources, target), random_shard_index in zip(parallel_iter(source_readers, target_reader),
random_shard_iter):
random_shard_index = cast(int, random_shard_index)
source_len = len(sources[0])
target_len = len(target)
buck_idx, buck = get_parallel_bucket(buckets, source_len, target_len)
data_stats_accumulator.sequence_pair(sources[0], target, buck_idx)
per_shard_stat_accumulators[random_shard_index].sequence_pair(sources[0], target, buck_idx)
if buck is None:
continue
for i, line in enumerate(sources):
sources_shards[i][random_shard_index].write(ids2strids(line) + "\n")
target_shards[random_shard_index].write(ids2strids(target) + "\n")
per_shard_stats = [shard_stat_accumulator.statistics for shard_stat_accumulator in per_shard_stat_accumulators]
sources_shard_fnames_by_shards = zip(*sources_shard_fnames) # type: List[List[str]]
return list(
zip(sources_shard_fnames_by_shards, target_shard_fnames, per_shard_stats)), data_stats_accumulator.statistics | 0.00575 |
def visit_named_list(self, _, children):
"""Manage a list, represented by a ``.resources.List`` instance.
This list is populated with data from the result of the ``FILTERS``.
Arguments
---------
_ (node) : parsimonious.nodes.Node.
children : list
- 0: for ``FILTERS``: list of instances of ``.resources.Field``.
- 1: for ``LIST``: a ``List`` resource
Example
-------
>>> DataQLParser(r'foo(1)[name]', default_rule='NAMED_LIST').data
<List[foo] .foo(1)>
<Field[name] />
</List[foo]>
"""
filters, resource = children
resource.name = filters[0].name
resource.filters = filters
return resource | 0.002639 |
def resolve_child_module_registries_lineage(registry):
"""
For a given child module registry, attempt to resolve the lineage.
Return an iterator, yielding from parent down to the input registry,
inclusive of the input registry.
"""
children = [registry]
while isinstance(registry, BaseChildModuleRegistry):
if registry.parent in children:
# this should never normally occur under normal usage where
# classes have been properly subclassed with methods defined
# to specificiation and with standard entry point usage, but
# non-standard definitions/usage can definitely trigger this
# self-referential loop.
raise TypeError(
"registry '%s' was already recorded in the lineage, "
"indicating that it may be some (grand)child of itself, which "
"is an illegal reference in the registry system; previously "
"resolved lineage is: %r" % (registry.parent.registry_name, [
r.registry_name for r in reversed(children)
])
)
pl = len(registry.parent.registry_name)
if len(registry.parent.registry_name) > len(registry.registry_name):
logger.warning(
"the parent registry '%s' somehow has a longer name than its "
"child registry '%s'; the underlying registry class may be "
"constructed in an invalid manner",
registry.parent.registry_name,
registry.registry_name,
)
elif registry.registry_name[:pl] != registry.parent.registry_name:
logger.warning(
"child registry '%s' does not share the same common prefix as "
"its parent registry '%s'; there may be errors with how the "
"related registries are set up or constructed",
registry.registry_name,
registry.parent.registry_name,
)
children.append(registry.parent)
registry = registry.parent
# the lineage down from parent to child.
return iter(reversed(children)) | 0.000458 |
def Print(self, x, data, message, **kwargs): # pylint: disable=invalid-name
"""Calls tf.Print.
Args:
x: LaidOutTensor.
data: list of LaidOutTensor.
message: str.
**kwargs: keyword arguments to tf.print.
Returns:
LaidOutTensor.
"""
del data, message, kwargs
tf.logging.warning("Warning - mtf.Print not implemented for this mesh type")
return x | 0.004963 |
def internal_assert(condition, message=None, item=None, extra=None):
"""Raise InternalException if condition is False.
If condition is a function, execute it on DEVELOP only."""
if DEVELOP and callable(condition):
condition = condition()
if not condition:
if message is None:
message = "assertion failed"
if item is None:
item = condition
raise CoconutInternalException(message, item, extra) | 0.002123 |
def connect(self, *args, **kwargs):
"""Extend `client.SimpleClient.connect()` with defaults"""
defaults = {}
for i, k in enumerate(('host', 'port', 'channel', 'use_ssl', 'password')):
if i < len(args):
defaults[k] = args[i]
elif k in kwargs:
defaults[k] = kwargs[k]
else:
def_k = 'default_' + k
defaults[k] = getattr(self, def_k, None)
if defaults['use_ssl'] is None:
defaults['use_ssl'] = False
if defaults['host'] is None:
raise TypeError("missing 1 required positional argument: 'host'")
logging.info("Connecting to %s:%s and joining channels %s",
defaults['host'],
defaults['port'],
defaults['channel'],
)
client.SimpleClient.connect(self, **defaults) | 0.00325 |
def get_data_link(self, instance, link):
"""
Gets a single data link.
:param str instance: A Yamcs instance name.
:param str link: The name of the data link.
:rtype: .Link
"""
response = self.get_proto('/links/{}/{}'.format(instance, link))
message = yamcsManagement_pb2.LinkInfo()
message.ParseFromString(response.content)
return Link(message) | 0.004706 |
def do_refresh(self, line):
"refresh {table_name}"
table = self.get_table(line)
while True:
desc = table.describe()
status = desc['Table']['TableStatus']
if status == 'ACTIVE':
break
else:
print status, "..."
time.sleep(5)
print ""
self.pprint(desc) | 0.005168 |
def _param_bounds(self, gamma, q):
"""
bounds parameters
:param gamma:
:param q:
:return:
"""
if gamma < 1.4:
gamma = 1.4
if gamma > 2.9:
gamma = 2.9
if q < 0.01:
q = 0.01
return float(gamma), q | 0.006431 |
def _to_tonnetz(chromagram):
"""Project a chromagram on the tonnetz.
Returned value is normalized to prevent numerical instabilities.
"""
if np.sum(np.abs(chromagram)) == 0.:
# The input is an empty chord, return zero.
return np.zeros(6)
_tonnetz = np.dot(__TONNETZ_MATRIX, chromagram)
one_norm = np.sum(np.abs(_tonnetz)) # Non-zero value
_tonnetz = _tonnetz / float(one_norm) # Normalize tonnetz vector
return _tonnetz | 0.010482 |
def min_max(obj, val, is_max):
""" min/max validator for float and integer
"""
n = getattr(obj, 'maximum' if is_max else 'minimum', None)
if n == None:
return
_eq = getattr(obj, 'exclusiveMaximum' if is_max else 'exclusiveMinimum', False)
if is_max:
to_raise = val >= n if _eq else val > n
else:
to_raise = val <= n if _eq else val < n
if to_raise:
raise ValidationError('condition failed: {0}, v:{1} compared to o:{2}'.format('maximum' if is_max else 'minimum', val, n)) | 0.007449 |
def GetHistAvg(tag_name, start_time, end_time, period,
desc_as_label=False, label=None):
"""
Retrieves data from eDNA history for a given tag. The data will be
averaged over the specified "period".
:param tag_name: fully-qualified (site.service.tag) eDNA tag
:param start_time: must be in format mm/dd/yy hh:mm:ss
:param end_time: must be in format mm/dd/yy hh:mm:ss
:param period: in units of seconds (e.g. 10)
:param desc_as_label: use the tag description as the column name instead
of the full tag
:param label: supply a custom label to use as the DataFrame column name
:return: a pandas DataFrame with timestamp, value, and status
"""
return GetHist(tag_name, start_time, end_time, mode="avg", period=period,
desc_as_label=desc_as_label, label=label) | 0.001161 |
def validate_wrap(self, value):
''' Checks that ``value`` is a pymongo ``ObjectId`` or a string
representation of one'''
if (not isinstance(value, ObjectId)
and not isinstance(value, basestring)
and not isinstance(value, bytes)
):
self._fail_validation_type(value, ObjectId)
if isinstance(value, ObjectId):
return
#: bytes
if len(value) == 12:
return
# hex
if len(value) != 24:
self._fail_validation(value, 'hex object ID is the wrong length') | 0.004975 |
def _cropbox(self, image, x, y, x2, y2):
"""
Crops the image to a set of x,y coordinates (x,y) is top left, (x2,y2) is bottom left
"""
image['options']['crop'] = '%sx%s+%s+%s' % (x2 - x, y2 - y, x, y)
image['size'] = (x2 - x, y2 - y) # update image size
return image | 0.009524 |
def LOOPNZ(cpu, target):
"""
Loops if ECX counter is nonzero.
:param cpu: current CPU.
:param target: destination operand.
"""
counter_name = {16: 'CX', 32: 'ECX', 64: 'RCX'}[cpu.address_bit_size]
counter = cpu.write_register(counter_name, cpu.read_register(counter_name) - 1)
cpu.PC = Operators.ITEBV(cpu.address_bit_size, counter != 0, (cpu.PC + target.read()) & ((1 << target.size) - 1), cpu.PC + cpu.instruction.size) | 0.00823 |
def resolve_indirect (data, key, splithosts=False):
"""Replace name of environment variable with its value."""
value = data[key]
env_value = os.environ.get(value)
if env_value:
if splithosts:
data[key] = split_hosts(env_value)
else:
data[key] = env_value
else:
del data[key] | 0.005848 |
def _init_record(self, record_type_idstr):
"""Override this from osid.Extensible because Forms use a different
attribute in record_type_data."""
record_type_data = self._record_type_data_sets[Id(record_type_idstr).get_identifier()]
module = importlib.import_module(record_type_data['module_path'])
record = getattr(module, record_type_data['form_record_class_name'])
if record is not None:
self._records[record_type_idstr] = record(self)
return True
else:
return False | 0.005357 |
def get_thumbnails(self, *args, **kwargs):
"""
Return an iterator which returns ThumbnailFile instances.
"""
# First, delete any related thumbnails.
source_cache = self.get_source_cache()
if source_cache:
thumbnail_storage_hash = utils.get_storage_hash(
self.thumbnail_storage)
for thumbnail_cache in source_cache.thumbnails.all():
# Only iterate files which are stored using the current
# thumbnail storage.
if thumbnail_cache.storage_hash == thumbnail_storage_hash:
yield ThumbnailFile(name=thumbnail_cache.name,
storage=self.thumbnail_storage) | 0.002695 |
def __load_paths(self, base_path=None):
"""
Set the paths of the different folders
"""
if base_path is None:
base_path = dirname(dirname(dirname(__file__)))
if not base_path.endswith(sep):
base_path += sep
self.__paths = {
'base': base_path,
'cert': base_path + 'certs' + sep,
'lib': base_path + 'lib' + sep,
'extlib': base_path + 'extlib' + sep,
} | 0.004202 |
def _run_guest(userid, image_path, os_version, profile,
cpu, memory, network_info, disks_list):
"""Deploy and provision a virtual machine.
Input parameters:
:userid: USERID of the guest, no more than 8.
:image_name: path of the image file
:os_version: os version of the image file
:profile: profile of the userid
:cpu: the number of vcpus
:memory: memory
:network_info: dict of network info.members:
:ip_addr: ip address of vm
:gateway: gateway of net
:vswitch_name: switch name
:cidr: CIDR
:disks_list: list of disks to add.eg:
disks_list = [{'size': '3g',
'is_boot_disk': True,
'disk_pool': 'ECKD:xcateckd'}]
"""
# Import image if not exists
import_image(image_path, os_version)
# Start time
spawn_start = time.time()
# Create userid
print("Creating userid %s ..." % userid)
ret = sdk_client.send_request('guest_create', userid, cpu, memory,
disk_list=disks_list,
user_profile=profile)
if ret['overallRC']:
print 'guest_create error:%s' % ret
return -1
# Deploy image to root disk
image_name = os.path.basename(image_path)
print("Deploying %s to %s ..." % (image_name, userid))
ret = sdk_client.send_request('guest_deploy', userid, image_name)
if ret['overallRC']:
print 'guest_deploy error:%s' % ret
return -2
# Create network device and configure network interface
print("Configuring network interface for %s ..." % userid)
ret = sdk_client.send_request('guest_create_network_interface', userid,
os_version, [network_info])
if ret['overallRC']:
print 'guest_create_network error:%s' % ret
return -3
# Couple to vswitch
ret = sdk_client.send_request('guest_nic_couple_to_vswitch', userid,
'1000', network_info['vswitch_name'])
if ret['overallRC']:
print 'guest_nic_couple error:%s' % ret
return -4
# Grant user
ret = sdk_client.send_request('vswitch_grant_user',
network_info['vswitch_name'],
userid)
if ret['overallRC']:
print 'vswitch_grant_user error:%s' % ret
return -5
# Power on the vm
print("Starting guest %s" % userid)
ret = sdk_client.send_request('guest_start', userid)
if ret['overallRC']:
print 'guest_start error:%s' % ret
return -6
# End time
spawn_time = time.time() - spawn_start
print "Instance-%s pawned succeeded in %s seconds" % (userid, spawn_time) | 0.002833 |
def event(tagmatch='*',
count=-1,
quiet=False,
sock_dir=None,
pretty=False,
node='minion'):
r'''
Watch Salt's event bus and block until the given tag is matched
.. versionadded:: 2016.3.0
.. versionchanged:: 2019.2.0
``tagmatch`` can now be either a glob or regular expression.
This is useful for utilizing Salt's event bus from shell scripts or for
taking simple actions directly from the CLI.
Enable debug logging to see ignored events.
:param tagmatch: the event is written to stdout for each tag that matches
this glob or regular expression.
:param count: this number is decremented for each event that matches the
``tagmatch`` parameter; pass ``-1`` to listen forever.
:param quiet: do not print to stdout; just block
:param sock_dir: path to the Salt master's event socket file.
:param pretty: Output the JSON all on a single line if ``False`` (useful
for shell tools); pretty-print the JSON output if ``True``.
:param node: Watch the minion-side or master-side event bus.
CLI Example:
.. code-block:: bash
salt-call --local state.event pretty=True
'''
sevent = salt.utils.event.get_event(
node,
sock_dir or __opts__['sock_dir'],
__opts__['transport'],
opts=__opts__,
listen=True)
while True:
ret = sevent.get_event(full=True, auto_reconnect=True)
if ret is None:
continue
if salt.utils.stringutils.expr_match(ret['tag'], tagmatch):
if not quiet:
salt.utils.stringutils.print_cli(
str('{0}\t{1}').format( # future lint: blacklisted-function
salt.utils.stringutils.to_str(ret['tag']),
salt.utils.json.dumps(
ret['data'],
sort_keys=pretty,
indent=None if not pretty else 4)
)
)
sys.stdout.flush()
if count > 0:
count -= 1
log.debug('Remaining event matches: %s', count)
if count == 0:
break
else:
log.debug('Skipping event tag: %s', ret['tag'])
continue | 0.000838 |
def ccbox(message="Shall I continue?", title=""):
"""
Original doc:
Display a message box with choices of Continue and Cancel.
The default is "Continue".
Returns returns 1 if "Continue" is chosen, or if
the dialog is cancelled (which is interpreted as
choosing the default). Otherwise returns 0.
If invoked without a message parameter, displays a generic request for a confirmation
that the user wishes to continue. So it can be used this way:
if ccbox(): pass # continue
else: sys.exit(0) # exit the program
"""
choices = ["Continue", "Cancel"]
if title == None:
title = ""
return boolbox(message, title, choices) | 0.004317 |
def parse_file(self,filename):
"""Parse file (helper function)"""
try:
return self.rProgram.ignore(cStyleComment).parseFile(filename, parseAll=True)
except SemanticException as err:
print(err)
exit(3)
except ParseException as err:
print(err)
exit(3) | 0.011429 |
def convert(ast):
"""Convert BEL1 AST Function to BEL2 AST Function"""
if ast and ast.type == "Function":
# Activity function conversion
if (
ast.name != "molecularActivity"
and ast.name in spec["namespaces"]["Activity"]["list"]
):
print("name", ast.name, "type", ast.type)
ast = convert_activity(ast)
return ast # Otherwise - this will trigger on the BEL2 molecularActivity
# translocation conversion
elif ast.name in ["tloc", "translocation"]:
ast = convert_tloc(ast)
fus_flag = False
for idx, arg in enumerate(ast.args):
if arg.__class__.__name__ == "Function":
# Fix substitution -> variation()
if arg.name in ["sub", "substitution"]:
ast.args[idx] = convert_sub(arg)
elif arg.name in ["trunc", "truncation"]:
ast.args[idx] = convert_trunc(arg)
elif arg.name in ["pmod", "proteinModification"]:
ast.args[idx] = convert_pmod(arg)
elif arg.name in ["fus", "fusion"]:
fus_flag = True
# Recursively process Functions
ast.args[idx] = convert(ast.args[idx])
if fus_flag:
ast = convert_fus(ast)
return ast | 0.001452 |
def split_task_parameters(line):
""" Split a string of comma separated words."""
if line is None:
result = []
else:
result = [parameter.strip() for parameter in line.split(",")]
return result | 0.004484 |
def team_2_json(self):
"""
transform ariane_clip3 team object to Ariane server JSON obj
:return: Ariane JSON obj
"""
LOGGER.debug("Team.team_2_json")
json_obj = {
'teamID': self.id,
'teamName': self.name,
'teamDescription': self.description,
'teamColorCode': self.color_code,
'teamOSInstancesID': self.osi_ids,
'teamApplicationsID': self.app_ids
}
return json.dumps(json_obj) | 0.003914 |
def transloadsForPeer(self, peer):
"""
Returns an iterator of transloads that apply to a particular peer.
"""
for tl in self.transloads.itervalues():
if peer in tl.peers:
yield tl | 0.008368 |
def saveThumbnail(self,fileName,filePath):
""" URL to the thumbnail used for the item """
if self._thumbnail is None:
self.__init()
param_dict = {}
if self._thumbnail is not None:
imgUrl = self.root + "/info/" + self._thumbnail
onlineFileName, file_ext = splitext(self._thumbnail)
fileNameSafe = "".join(x for x in fileName if x.isalnum()) + file_ext
result = self._get(url=imgUrl,
param_dict=param_dict,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
out_folder=filePath,
file_name=fileNameSafe)
return result
else:
return None | 0.006764 |
def sys_set_renderer(renderer: int) -> None:
"""Change the current rendering mode to renderer.
.. deprecated:: 2.0
RENDERER_GLSL and RENDERER_OPENGL are not currently available.
"""
lib.TCOD_sys_set_renderer(renderer)
if tcod.console._root_console is not None:
tcod.console.Console._get_root() | 0.00304 |
def configuration_option(*param_decls, **attrs):
"""
Adds configuration file support to a click application.
This will create an option of type `click.File` expecting the path to a
configuration file. When specified, it overwrites the default values for
all other click arguments or options with the corresponding value from the
configuration file.
The default name of the option is `--config`.
By default, the configuration will be read from a configuration directory
as determined by `click.get_app_dir`.
This decorator accepts the same arguments as `click.option` and
`click.Path`. In addition, the following keyword arguments are available:
cmd_name : str
The command name. This is used to determine the configuration
directory. Default: `ctx.info_name`
config_file_name : str
The name of the configuration file. Default: `'config'``
implicit: bool
If 'True' then implicitly create a value for the configuration option
using the above parameters. If a configuration file exists in this
path it will be applied even if no configuration option was suppplied
as a CLI argument or environment variable.
If 'False` only apply a configuration file that has been explicitely
specified.
Default: `False`
provider : callable
A callable that parses the configuration file and returns a dictionary
of the configuration parameters. Will be called as
`provider(file_path, cmd_name)`. Default: `configobj_provider()`
"""
param_decls = param_decls or ('--config', )
option_name = param_decls[0]
def decorator(f):
attrs.setdefault('is_eager', True)
attrs.setdefault('help', 'Read configuration from FILE.')
attrs.setdefault('expose_value', False)
implicit = attrs.pop('implicit', True)
cmd_name = attrs.pop('cmd_name', None)
config_file_name = attrs.pop('config_file_name', 'config')
provider = attrs.pop('provider', configobj_provider())
path_default_params = {
'exists': False,
'file_okay': True,
'dir_okay': False,
'writable': False,
'readable': True,
'resolve_path': False
}
path_params = {
k: attrs.pop(k, v)
for k, v in path_default_params.items()
}
attrs['type'] = click.Path(**path_params)
saved_callback = attrs.pop('callback', None)
partial_callback = functools.partial(
configuration_callback, cmd_name, option_name, config_file_name, saved_callback, provider, implicit)
attrs['callback'] = partial_callback
return click.option(*param_decls, **attrs)(f)
return decorator | 0.001073 |
def _code_cell(self, source):
"""Split the source into input and output."""
input, output = self._prompt.to_cell(source)
return {'cell_type': 'code',
'input': input,
'output': output} | 0.008368 |
def from_mapping(cls, mapping):
"""Create a bag from a dict of elem->count.
Each key in the dict is added if the value is > 0.
Raises:
ValueError: If any count is < 0.
"""
out = cls()
for elem, count in mapping.items():
out._set_count(elem, count)
return out | 0.039286 |
def checker_from_dict(self, dct):
"""Return a checker instance from a dict object."""
checker_identifier = list(dct.keys())[0]
checker_class = self.get_checker(checker_identifier)
if checker_class:
return checker_class(**dct[checker_identifier])
return None | 0.006472 |
def post(self, url, data, params=None, headers=None, connection=None):
"""
Synchronous POST request. ``data`` must be a JSONable value.
"""
params = params or {}
headers = headers or {}
endpoint = self._build_endpoint_url(url, None)
self._authenticate(params, headers)
data = json.dumps(data, cls=JSONEncoder)
return make_post_request(endpoint, data, params, headers,
connection=connection) | 0.00404 |
def get(self, project, date):
"""
Get the cache data for a specified project for the specified date.
Returns None if the data cannot be found in the cache.
:param project: PyPi project name to get data for
:type project: str
:param date: date to get data for
:type date: datetime.datetime
:return: dict of per-date data for project
:rtype: :py:obj:`dict` or ``None``
"""
fpath = self._path_for_file(project, date)
logger.debug('Cache GET project=%s date=%s - path=%s',
project, date.strftime('%Y-%m-%d'), fpath)
try:
with open(fpath, 'r') as fh:
data = json.loads(fh.read())
except:
logger.debug('Error getting from cache for project=%s date=%s',
project, date.strftime('%Y-%m-%d'))
return None
data['cache_metadata']['date'] = datetime.strptime(
data['cache_metadata']['date'],
'%Y%m%d'
)
data['cache_metadata']['updated'] = datetime.fromtimestamp(
data['cache_metadata']['updated']
)
return data | 0.002534 |
def deploy(www_dir, bucket_name):
""" Deploy to the configured S3 bucket. """
# Set up the connection to an S3 bucket.
conn = boto.connect_s3()
bucket = conn.get_bucket(bucket_name)
# Deploy each changed file in www_dir
os.chdir(www_dir)
for root, dirs, files in os.walk('.'):
for f in files:
# Use full relative path. Normalize to remove dot.
file_path = os.path.normpath(os.path.join(root, f))
if has_changed_since_last_deploy(file_path, bucket):
deploy_file(file_path, bucket)
else:
logger.info("Skipping {0}".format(file_path))
# Make the whole bucket public
bucket.set_acl('public-read')
# Configure it to be a website
bucket.configure_website('index.html', 'error.html')
# Print the endpoint, so you know the URL
msg = "Your website is now live at {0}".format(
bucket.get_website_endpoint())
logger.info(msg)
logger.info("If you haven't done so yet, point your domain name there!") | 0.000953 |
def windowed_df(pos, ac1, ac2, size=None, start=None, stop=None, step=None,
windows=None, is_accessible=None, fill=np.nan):
"""Calculate the density of fixed differences between two populations in
windows over a single chromosome/contig.
Parameters
----------
pos : array_like, int, shape (n_items,)
Variant positions, using 1-based coordinates, in ascending order.
ac1 : array_like, int, shape (n_variants, n_alleles)
Allele counts array for the first population.
ac2 : array_like, int, shape (n_variants, n_alleles)
Allele counts array for the second population.
size : int, optional
The window size (number of bases).
start : int, optional
The position at which to start (1-based).
stop : int, optional
The position at which to stop (1-based).
step : int, optional
The distance between start positions of windows. If not given,
defaults to the window size, i.e., non-overlapping windows.
windows : array_like, int, shape (n_windows, 2), optional
Manually specify the windows to use as a sequence of (window_start,
window_stop) positions, using 1-based coordinates. Overrides the
size/start/stop/step parameters.
is_accessible : array_like, bool, shape (len(contig),), optional
Boolean array indicating accessibility status for all positions in the
chromosome/contig.
fill : object, optional
The value to use where a window is completely inaccessible.
Returns
-------
df : ndarray, float, shape (n_windows,)
Per-base density of fixed differences in each window.
windows : ndarray, int, shape (n_windows, 2)
The windows used, as an array of (window_start, window_stop) positions,
using 1-based coordinates.
n_bases : ndarray, int, shape (n_windows,)
Number of (accessible) bases in each window.
counts : ndarray, int, shape (n_windows,)
Number of variants in each window.
See Also
--------
allel.model.locate_fixed_differences
"""
# check inputs
pos = SortedIndex(pos, copy=False)
is_accessible = asarray_ndim(is_accessible, 1, allow_none=True)
# locate fixed differences
loc_df = locate_fixed_differences(ac1, ac2)
# count number of fixed differences in windows
n_df, windows, counts = windowed_statistic(
pos, values=loc_df, statistic=np.count_nonzero, size=size, start=start,
stop=stop, step=step, windows=windows, fill=0
)
# calculate value per base
df, n_bases = per_base(n_df, windows, is_accessible=is_accessible,
fill=fill)
return df, windows, n_bases, counts | 0.000366 |
def _convert_fastq(srafn, outdir, single=False):
"convert sra to fastq"
cmd = "fastq-dump --split-files --gzip {srafn}"
cmd = "%s %s" % (utils.local_path_export(), cmd)
sraid = os.path.basename(utils.splitext_plus(srafn)[0])
if not srafn:
return None
if not single:
out_file = [os.path.join(outdir, "%s_1.fastq.gz" % sraid),
os.path.join(outdir, "%s_2.fastq.gz" % sraid)]
if not utils.file_exists(out_file[0]):
with utils.chdir(outdir):
do.run(cmd.format(**locals()), "Covert to fastq %s" % sraid)
if not utils.file_exists(out_file[0]):
raise IOError("SRA %s didn't convert, something happened." % srafn)
return [out for out in out_file if utils.file_exists(out)]
else:
raise ValueError("Not supported single-end sra samples for now.") | 0.001148 |
def user(netease, name, id):
"""Download a user\'s playlists by id."""
if name:
netease.download_user_playlists_by_search(name)
if id:
netease.download_user_playlists_by_id(id) | 0.004878 |
def sample(self, k=None, with_replacement=True, weights=None):
"""Return a new table where k rows are randomly sampled from the
original table.
Args:
``k`` -- specifies the number of rows (``int``) to be sampled from
the table. Default is k equal to number of rows in the table.
``with_replacement`` -- (``bool``) By default True;
Samples ``k`` rows with replacement from table, else samples
``k`` rows without replacement.
``weights`` -- Array specifying probability the ith row of the
table is sampled. Defaults to None, which samples each row
with equal probability. ``weights`` must be a valid probability
distribution -- i.e. an array the length of the number of rows,
summing to 1.
Raises:
ValueError -- if ``weights`` is not length equal to number of rows
in the table; or, if ``weights`` does not sum to 1.
Returns:
A new instance of ``Table`` with ``k`` rows resampled.
>>> jobs = Table().with_columns(
... 'job', make_array('a', 'b', 'c', 'd'),
... 'wage', make_array(10, 20, 15, 8))
>>> jobs
job | wage
a | 10
b | 20
c | 15
d | 8
>>> jobs.sample() # doctest: +SKIP
job | wage
b | 20
b | 20
a | 10
d | 8
>>> jobs.sample(with_replacement=True) # doctest: +SKIP
job | wage
d | 8
b | 20
c | 15
a | 10
>>> jobs.sample(k = 2) # doctest: +SKIP
job | wage
b | 20
c | 15
>>> ws = make_array(0.5, 0.5, 0, 0)
>>> jobs.sample(k=2, with_replacement=True, weights=ws) # doctest: +SKIP
job | wage
a | 10
a | 10
>>> jobs.sample(k=2, weights=make_array(1, 0, 1, 0))
Traceback (most recent call last):
...
ValueError: probabilities do not sum to 1
# Weights must be length of table.
>>> jobs.sample(k=2, weights=make_array(1, 0, 0))
Traceback (most recent call last):
...
ValueError: a and p must have same size
"""
n = self.num_rows
if k is None:
k = n
index = np.random.choice(n, k, replace=with_replacement, p=weights)
columns = [[c[i] for i in index] for c in self.columns]
sample = self._with_columns(columns)
return sample | 0.001151 |
def reject(self):
"""Override Qt method"""
if not self.is_fade_running():
key = Qt.Key_Escape
self.key_pressed = key
self.sig_key_pressed.emit() | 0.00995 |
def fetch_resources(uri, rel):
"""
Retrieves embeddable resource from given ``uri``.
For now only local resources (images, fonts) are supported.
:param str uri: path or url to image or font resource
:returns: path to local resource file.
:rtype: str
:raises: :exc:`~easy_pdf.exceptions.UnsupportedMediaPathException`
"""
if settings.STATIC_URL and uri.startswith(settings.STATIC_URL):
path = os.path.join(settings.STATIC_ROOT, uri.replace(settings.STATIC_URL, ""))
elif settings.MEDIA_URL and uri.startswith(settings.MEDIA_URL):
path = os.path.join(settings.MEDIA_ROOT, uri.replace(settings.MEDIA_URL, ""))
else:
path = os.path.join(settings.STATIC_ROOT, uri)
if not os.path.isfile(path):
raise UnsupportedMediaPathException(
"media urls must start with {} or {}".format(
settings.MEDIA_ROOT, settings.STATIC_ROOT
)
)
return path.replace("\\", "/") | 0.003049 |
def generate(self, path, label):
"""Creates default data from the corpus at `path`, marking all
works with `label`.
:param path: path to a corpus directory
:type path: `str`
:param label: label to categorise each work as
:type label: `str`
"""
for filename in os.listdir(path):
self[filename] = label | 0.005291 |
def add_contents(self, dest, contents):
"""Add file contents to the archive under ``dest``.
If ``dest`` is a path, it will be added compressed and world-readable
(user-writeable). You may also pass a :py:class:`~zipfile.ZipInfo` for
custom behavior.
"""
assert not self._closed, "Archive closed"
if not isinstance(dest, zipfile.ZipInfo):
dest = zinfo(dest) # see for some caveats
# Ensure we apply the compression
dest.compress_type = self.zip_compression
# Mark host OS as Linux for all archives
dest.create_system = 3
self._zip_file.writestr(dest, contents) | 0.002985 |
def normalize(self, expr, operation):
"""
Return a normalized expression transformed to its normal form in the
given AND or OR operation.
The new expression arguments will satisfy these conditions:
- operation(*args) == expr (here mathematical equality is meant)
- the operation does not occur in any of its arg.
- NOT is only appearing in literals (aka. Negation normal form).
The operation must be an AND or OR operation or a subclass.
"""
# ensure that the operation is not NOT
assert operation in (self.AND, self.OR,)
# Move NOT inwards.
expr = expr.literalize()
# Simplify first otherwise _rdistributive() may take forever.
expr = expr.simplify()
operation_example = operation(self.TRUE, self.FALSE)
expr = self._rdistributive(expr, operation_example)
# Canonicalize
expr = expr.simplify()
return expr | 0.002066 |
def art(artname, number=1, text=""):
"""
Return 1-line art.
:param artname: artname
:type artname : str
:return: ascii art as str
"""
if isinstance(artname, str) is False:
raise artError(ART_TYPE_ERROR)
artname = artname.lower()
arts = sorted(art_dic.keys())
if artname == "random" or artname == "rand" or artname == "rnd":
filtered_arts = list(set(arts) - set(RANDOM_FILTERED_ARTS))
artname = random.choice(filtered_arts)
elif artname not in art_dic.keys():
distance_list = list(map(lambda x: distance_calc(artname, x),
arts))
min_distance = min(distance_list)
selected_art = arts[distance_list.index(min_distance)]
threshold = max(len(artname), len(selected_art)) / 2
if min_distance < threshold:
artname = selected_art
else:
raise artError(ART_NAME_ERROR)
art_value = art_dic[artname]
if isinstance(number, int) is False:
raise artError(NUMBER_TYPE_ERROR)
if isinstance(art_value, str):
return (art_value + " ") * number
if isinstance(text, str) is False:
raise artError(TEXT_TYPE_ERROR)
return (art_value[0] + text + art_value[1] + " ") * number | 0.000789 |
def get_instance(self, payload):
"""
Build an instance of NewKeyInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.new_key.NewKeyInstance
:rtype: twilio.rest.api.v2010.account.new_key.NewKeyInstance
"""
return NewKeyInstance(self._version, payload, account_sid=self._solution['account_sid'], ) | 0.0075 |
def report(self, min_confidence=0, sort_by_size=False,
make_whitelist=False):
"""
Print ordered list of Item objects to stdout.
"""
for item in self.get_unused_code(
min_confidence=min_confidence, sort_by_size=sort_by_size):
print(item.get_whitelist_string() if make_whitelist
else item.get_report(add_size=sort_by_size))
self.found_dead_code_or_error = True
return self.found_dead_code_or_error | 0.005906 |
def get_plugin_icon(self):
"""Return widget icon"""
path = osp.join(self.PLUGIN_PATH, self.IMG_PATH)
return ima.icon('pylint', icon_path=path) | 0.011834 |
def get(self):
"""Copies file from local filesystem to self.save_dir.
Returns:
Full path of the copied file.
Raises:
EnvironmentError if the file can't be found or the save_dir
is not writable.
"""
if self.local_file.endswith('.whl'):
self.temp_dir = tempfile.mkdtemp()
save_dir = self.temp_dir
else:
save_dir = self.save_dir
save_file = '{0}/{1}'.format(save_dir, os.path.basename(
self.local_file))
if not os.path.exists(save_file) or not os.path.samefile(
self.local_file, save_file):
shutil.copy2(self.local_file, save_file)
logger.info('Local file: {0} copyed to {1}.'.format(
self.local_file, save_file))
return save_file | 0.002401 |
def resnet_imagenet_34_td_unit_05_05():
"""Set of hyperparameters."""
hp = resnet_imagenet_34()
hp.use_td = "unit"
hp.targeting_rate = 0.5
hp.keep_prob = 0.5
return hp | 0.038889 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.