text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def chi_square(h1, h2): # 23 us @array, 49 us @list \w 100
r"""
Chi-square distance.
Measure how unlikely it is that one distribution (histogram) was drawn from the
other. The Chi-square distance between two histograms :math:`H` and :math:`H'` of size
:math:`m` is defined as:
.. math::
d_{\chi^2}(H, H') = \sum_{m=1}^M
\frac{
(H_m - H'_m)^2
}{
H_m + H'_m
}
*Attributes:*
- semimetric
*Attributes for normalized histograms:*
- :math:`d(H, H')\in[0, 2]`
- :math:`d(H, H) = 0`
- :math:`d(H, H') = d(H', H)`
*Attributes for not-normalized histograms:*
- :math:`d(H, H')\in[0, \infty)`
- :math:`d(H, H) = 0`
- :math:`d(H, H') = d(H', H)`
*Attributes for not-equal histograms:*
- not applicable
Parameters
----------
h1 : sequence
The first histogram.
h2 : sequence
The second histogram.
Returns
-------
chi_square : float
Chi-square distance.
"""
h1, h2 = __prepare_histogram(h1, h2)
old_err_state = scipy.seterr(invalid='ignore') # divide through zero only occurs when the bin is zero in both histograms, in which case the division is 0/0 and leads to (and should lead to) 0
result = scipy.square(h1 - h2) / (h1 + h2)
scipy.seterr(**old_err_state)
result[scipy.isnan(result)] = 0 # faster than scipy.nan_to_num, which checks for +inf and -inf also
return scipy.sum(result) | 0.011605 |
def solve(self):
"""Perform the solve.
"""
with log_duration(self._print, "memcache get (resolve) took %s"):
solver_dict = self._get_cached_solve()
if solver_dict:
self.from_cache = True
self._set_result(solver_dict)
else:
self.from_cache = False
solver = self._solve()
solver_dict = self._solver_to_dict(solver)
self._set_result(solver_dict)
with log_duration(self._print, "memcache set (resolve) took %s"):
self._set_cached_solve(solver_dict) | 0.003344 |
def substances_to_frame(substances, properties=None):
"""Construct a pandas :class:`~pandas.DataFrame` from a list of :class:`~pubchempy.Substance` objects.
Optionally specify a list of the desired :class:`~pubchempy.Substance` properties.
"""
import pandas as pd
if isinstance(substances, Substance):
substances = [substances]
properties = set(properties) | set(['sid']) if properties else None
return pd.DataFrame.from_records([s.to_dict(properties) for s in substances], index='sid') | 0.007648 |
def _py24_25_compat(self):
"""
Python 2.4/2.5 have grave difficulties with threads/fork. We
mandatorily quiesce all running threads during fork using a
monkey-patch there.
"""
if sys.version_info < (2, 6):
# import_module() is used to avoid dep scanner.
os_fork = import_module('mitogen.os_fork')
mitogen.os_fork._notice_broker_or_pool(self) | 0.004706 |
def user_can_delete_attachments(self):
"""Checks if the current logged in user is allowed to delete attachments
"""
context = self.context
user = api.get_current_user()
if not self.is_ar_editable():
return False
return (self.user_can_add_attachments() and
not user.allowed(context, ["Client"])) or \
self.user_can_update_attachments() | 0.007109 |
def _matchOther(self, obj, **kwargs):
"""Perform _match but on another object, not self."""
if obj is not None:
# Need to check that the returned UI element wasn't destroyed first:
if self._findFirstR(**kwargs):
return obj._match(**kwargs)
return False | 0.009494 |
def post(self, path='', retry=0, **data):
"""
Post an item to the Graph API.
:param path: A string describing the path to the item.
:param retry: An integer describing how many times the request may be retried.
:param data: Graph API parameters such as 'message' or 'source'.
See `Facebook's Graph API documentation <http://developers.facebook.com/docs/reference/api/>`_
for an exhaustive list of options.
"""
response = self._query(
method='POST',
path=path,
data=data,
retry=retry
)
if response is False:
raise FacebookError('Could not post to "%s"' % path)
return response | 0.005442 |
def target_create(self, data, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/targets#create-target"
api_path = "/api/v2/targets.json"
return self.call(api_path, method="POST", data=data, **kwargs) | 0.012821 |
def image_url(self) -> Optional[str]:
r"""(:class:`~typing.Optional`\ [:class:`str`]) The image url.
It may be :const:`None` if it's not an image.
"""
images = self.attributes.get('imageinfo', [])
if images and isinstance(images, collections.abc.Sequence):
return images[0]['url']
return None | 0.005666 |
def http_post(self, url, data=None):
"""POST to URL and get result as a response object.
:param url: URL to POST.
:type url: str
:param data: Data to send in the form body.
:type data: str
:rtype: requests.Response
"""
if not url.startswith('https://'):
raise ValueError('Protocol must be HTTPS, invalid URL: %s' % url)
return requests.post(url, data, verify=True) | 0.004454 |
def to_snake_case(name):
""" Given a name in camelCase return in snake_case """
s1 = FIRST_CAP_REGEX.sub(r'\1_\2', name)
return ALL_CAP_REGEX.sub(r'\1_\2', s1).lower() | 0.005587 |
def add(self, key, value):
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
"""
self._data.setdefault(key.lower(), []).append((key, value)) | 0.00597 |
def properties_list(self, io_handler):
"""
Lists the properties of the framework
"""
# Get the framework
framework = self._context.get_framework()
# Head of the table
headers = ("Property Name", "Value")
# Lines
lines = [item for item in framework.get_properties().items()]
# Sort lines
lines.sort()
# Print the table
io_handler.write(self._utils.make_table(headers, lines)) | 0.004149 |
def cgetter(self, fcget: typing.Optional[typing.Callable[[typing.Any], typing.Any]]) -> "AdvancedProperty":
"""Descriptor to change the class wide getter on a property.
:param fcget: new class-wide getter.
:type fcget: typing.Optional[typing.Callable[[typing.Any, ], typing.Any]]
:return: AdvancedProperty
:rtype: AdvancedProperty
"""
self.__fcget = fcget
return self | 0.009259 |
def get_sprite_at_position(self, x, y):
"""Returns the topmost visible interactive sprite for given coordinates"""
over = None
for sprite in self.all_mouse_sprites():
if sprite.interactive and sprite.check_hit(x, y):
over = sprite
return over | 0.009901 |
def unset_role(username, role, **kwargs):
'''
Remove role from username.
username
Username for role removal
role
Role to remove
no_save_config
If True, don't save configuration commands to startup configuration.
If False, save configuration to startup configuration.
Default: False
.. code-block:: bash
salt '*' nxos.cmd unset_role username=daniel role=vdc-admin
'''
role_line = 'no username {0} role {1}'.format(username, role)
return config(role_line, **kwargs) | 0.001812 |
def generate_form_data(self, **kwargs):
"""Create a form dictionary with the key being the element name
and the value being a list of form element objects.
"""
# Add elements that are missing from the form.
self.children = add_missing_children(
self.contained_children,
self.children
)
# Add children to the keyword arguments.
kwargs['children'] = self.children
# Create the form object.
return FormGenerator(**kwargs) | 0.003831 |
def parse_stdout(self, filelike):
"""Parse the content written by the script to standard out.
The standard output will contain a list of relative filepaths where the generated CIF files have been written.
:param filelike: filelike object of stdout
:returns: an exit code in case of an error, None otherwise
"""
from aiida.orm import CifData
content = filelike.read().strip()
if not content:
return self.exit_codes.ERROR_EMPTY_OUTPUT_FILE
try:
cifs = {}
for line in content.split('\n'):
filename = line.strip()
output_name = os.path.splitext(os.path.basename(filename))[0]
with self.retrieved.open(filename) as handle:
cifs[output_name] = CifData(file=handle)
except Exception: # pylint: disable=broad-except
self.logger.exception('Failed to open a generated from the stdout file\n%s', traceback.format_exc())
return self.exit_codes.ERROR_PARSING_OUTPUT_DATA
self.out('cifs', cifs)
return | 0.003571 |
def light_general_attention(key, context, hidden_size, projected_align=False):
""" It is a implementation of the Luong et al. attention mechanism with general score. Based on the paper:
https://arxiv.org/abs/1508.04025 "Effective Approaches to Attention-based Neural Machine Translation"
Args:
key: A tensorflow tensor with dimensionality [None, None, key_size]
context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size]
hidden_size: Number of units in hidden representation
projected_align: Using dense layer for hidden representation of context.
If true, between input and attention mechanism insert a dense layer with dimensionality [hidden_size].
If false, a dense layer is not used.
Returns:
output: Tensor at the output with dimensionality [None, None, hidden_size]
"""
batch_size = tf.shape(context)[0]
max_num_tokens, token_size = context.get_shape().as_list()[-2:]
r_context = tf.reshape(context, shape=[-1, max_num_tokens, token_size])
# projected_key: [None, None, hidden_size]
projected_key = tf.layers.dense(key, hidden_size, kernel_initializer=xav())
r_projected_key = tf.reshape(projected_key, shape=[-1, hidden_size, 1])
# projected context: [None, None, hidden_size]
projected_context = \
tf.layers.dense(r_context, hidden_size, kernel_initializer=xav())
attn = tf.nn.softmax(tf.matmul(projected_context, r_projected_key), dim=1)
if projected_align:
log.info("Using projected attention alignment")
t_context = tf.transpose(projected_context, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, hidden_size])
else:
log.info("Using without projected attention alignment")
t_context = tf.transpose(r_context, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, token_size])
return output | 0.003438 |
def _start_thread(self):
"""Start a new working thread unless the maximum number of threads
has been reached or the request queue is empty.
"""
with self.lock:
if self.threads and self.queue.empty():
return
if len(self.threads) >= self.max_threads:
return
thread_n = self.last_thread_n + 1
self.last_thread_n = thread_n
thread = threading.Thread(target = self._run,
name = "{0!r} #{1}".format(self, thread_n),
args = (thread_n,))
self.threads.append(thread)
thread.daemon = True
thread.start() | 0.014104 |
def nodes_with_tag(tag):
"""Sets a list of nodes that have the given tag assigned and calls node()"""
nodes = lib.get_nodes_with_tag(tag, env.chef_environment,
littlechef.include_guests)
nodes = [n['name'] for n in nodes]
if not len(nodes):
print("No nodes found with tag '{0}'".format(tag))
sys.exit(0)
return node(*nodes) | 0.005076 |
def register(self, *pclss):
"""
:param pclss: A list of :class:`Processor` or its children classes
"""
for pcls in pclss:
if pcls.cid() not in self._processors:
self._processors[pcls.cid()] = pcls | 0.007813 |
def get_all(self, start=0, count=-1, sort=''):
"""
Gets a list of logical interconnects based on optional sorting and filtering and is constrained by start
and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
Returns:
list: A list of logical interconnects.
"""
return self._helper.get_all(start, count, sort=sort) | 0.008114 |
def execute(self, array_in, expression, **kwargs):
"""Creates and returns a masked view of the input array."""
context = self.get_context(array_in, expression, kwargs)
context.update(kwargs)
return ma.masked_where(self.evaluate_expression(expression, context), array_in) | 0.009901 |
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_area_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value = ET.SubElement(area, "area_value")
area_value.text = kwargs.pop('area_value')
callback = kwargs.pop('callback', self._callback)
return callback(config) | 0.005118 |
def pre_disambiguate(self, docs):
""" Teostab pärisnimede eelühestamine. Üldiseks eesmärgiks on vähendada mitmesust
suurtähega algavate sonade morf analüüsil, nt eemaldada pärisnime analüüs, kui
suurtäht tähistab tõenäoliselt lausealgust.
"""
# 1) Leiame pärisnimelemmade sagedusleksikoni
lexicon = self.__create_proper_names_lexicon(docs)
# 2) Teeme esialgse kustutamise: kui sõnal on mitu erineva korpuse-
# sagedusega pärisnimeanalüüsi, siis jätame alles vaid kõige
# sagedasema analyysi ...
self.__disambiguate_proper_names_1(docs, lexicon)
# 3) Eemaldame yleliigsed lause alguse pärisnimeanalüüsid;
# Kõigepealt leiame: kindlad pärisnimed, lause alguses esinevad
# p2risnimed ja lause keskel esinevad pärisnimed
certainNames = self.__find_certain_proper_names(docs)
sentInitialNames = self.__find_sentence_initial_proper_names(docs)
sentCentralNames = self.__find_sentence_central_proper_names(docs)
# 3.1) Võrdleme lause alguses ja keskel esinevaid lemmasid: leiame
# lemmad, mis esinesid ainult lause alguses ...
onlySentenceInitial = sentInitialNames.difference(sentCentralNames)
# 3.2) Võrdleme ainult lause alguses esinevaid ning kindlaid pärisnime-
# lemmasid: kui sõna esines vaid lause alguses ega ole kindel
# pärisnimelemma, pole tõenäoliselt tegu pärisnimega ...
notProperNames = onlySentenceInitial.difference(certainNames)
# 3.3) Eemaldame yleliigsed p2risnimeanalyysid (kui selliseid leidus)
if len(notProperNames) > 0:
self.__remove_redundant_proper_names(docs, notProperNames)
# 4) Leiame uue pärisnimelemmade sagedusleksikoni (sagedused on
# tõenäoliselt vahepeal muutunud);
lexicon = self.__create_proper_names_lexicon(docs)
# 5) Teeme üleliigsete mittepärisnimeanalüüside kustutamise sõnadelt,
# millel on lisaks pärisnimeanalüüsidele ka teisi analüüse:
# lausealgusesse jätame alles vaid pärisnimeanalüüsid, kui neid
# esineb korpuses ka mujal;
# lause keskele jätame igal juhul alles vaid pärisnimeanalüüsid;
self.__disambiguate_proper_names_2(docs, lexicon)
return docs | 0.007544 |
def _get_session(self):
"""Start session with email server."""
if self.port in (465, "465"):
session = self._get_ssl()
elif self.port in (587, "587"):
session = self._get_tls()
try:
session.login(self.from_, self._auth)
except SMTPResponseException as e:
raise MessageSendError(e.smtp_error.decode("unicode_escape"))
return session | 0.004662 |
def list_json_files(directory, recursive=False):
"""Return a list of file paths for JSON files within `directory`.
Args:
directory: A path to a directory.
recursive: If ``True``, this function will descend into all
subdirectories.
Returns:
A list of JSON file paths directly under `directory`.
"""
json_files = []
for top, dirs, files in os.walk(directory):
dirs.sort()
# Get paths to each file in `files`
paths = (os.path.join(top, f) for f in sorted(files))
# Add all the .json files to our return collection
json_files.extend(x for x in paths if is_json(x))
if not recursive:
break
return json_files | 0.001364 |
def get_context_data(self, **kwargs):
"""
Add query in context.
"""
context = super(BaseEntryChannel, self).get_context_data(**kwargs)
context.update({'query': self.query})
return context | 0.008511 |
def _migrate(data: Mapping[str, Any]) -> SettingsData:
"""
Check the version integer of the JSON file data a run any necessary
migrations to get us to the latest file format. Returns dictionary of
settings and version migrated to
"""
next = dict(data)
version = next.pop('_version', 0)
target_version = len(_MIGRATIONS)
migrations = _MIGRATIONS[version:]
if len(migrations) > 0:
log.info(
"Migrating advanced settings from version {} to {}"
.format(version, target_version))
for m in migrations:
next = m(next)
return next, target_version | 0.00159 |
def get_host_port(spec, default_port):
"parse 'hostname:22' into a host and port, with the port optional"
args = (spec.split(':', 1) + [default_port])[:2]
args[1] = int(args[1])
return args[0], args[1] | 0.004608 |
def _logpdf(self, **kwargs):
"""Returns the log of the pdf at the given values. The keyword
arguments must contain all of parameters in self's params. Unrecognized
arguments are ignored.
"""
if kwargs in self:
return sum([self._lognorm[p] +
self._expnorm[p]*(kwargs[p]-self._mean[p])**2.
for p in self._params])
else:
return -numpy.inf | 0.004396 |
def importSafeElementTree(module_names=None):
"""Find a working ElementTree implementation that is not vulnerable
to XXE, using `defusedxml`.
>>> XXESafeElementTree = importSafeElementTree()
@param module_names: The names of modules to try to use as
a safe ElementTree. Defaults to C{L{xxe_safe_elementtree_modules}}
@returns: An ElementTree module that is not vulnerable to XXE.
"""
if module_names is None:
module_names = xxe_safe_elementtree_modules
try:
return importElementTree(module_names)
except ImportError:
raise ImportError('Unable to find a ElementTree module '
'that is not vulnerable to XXE. '
'Tried importing %r' % (module_names, )) | 0.001297 |
def register_yaml():
"""Register a encoder/decoder for YAML serialization.
It is slower than JSON, but allows for more data types
to be serialized. Useful if you need to send data such as dates"""
try:
import yaml
registry.register('yaml', yaml.safe_dump, yaml.safe_load,
content_type='application/x-yaml',
content_encoding='utf-8')
except ImportError:
def not_available(*args, **kwargs):
"""In case a client receives a yaml message, but yaml
isn't installed."""
raise SerializerNotInstalled(
"No decoder installed for YAML. Install the PyYAML library")
registry.register('yaml', None, not_available, 'application/x-yaml') | 0.001282 |
def validate_account_alias(iam_client, account_alias):
"""Exit if list_account_aliases doesn't include account_alias."""
# Super overkill here using pagination when an account can only
# have a single alias, but at least this implementation should be
# future-proof
current_account_aliases = []
paginator = iam_client.get_paginator('list_account_aliases')
response_iterator = paginator.paginate()
for page in response_iterator:
current_account_aliases.extend(page.get('AccountAliases', []))
if account_alias in current_account_aliases:
LOGGER.info('Verified current AWS account alias matches required '
'alias %s.',
account_alias)
else:
LOGGER.error('Current AWS account aliases "%s" do not match '
'required account alias %s in Runway config.',
','.join(current_account_aliases),
account_alias)
sys.exit(1) | 0.001018 |
def convert_to_order_dict(map_list):
""" convert mapping in list to ordered dict
@param (list) map_list
[
{"a": 1},
{"b": 2}
]
@return (OrderDict)
OrderDict({
"a": 1,
"b": 2
})
"""
ordered_dict = OrderedDict()
for map_dict in map_list:
ordered_dict.update(map_dict)
return ordered_dict | 0.00611 |
def installFS(rh, vaddr, mode, fileSystem, diskType):
"""
Install a filesystem on a virtual machine's dasd.
Input:
Request Handle:
userid - Userid that owns the disk
Virtual address as known to the owning system.
Access mode to use to get the disk.
Disk Type - 3390 or 9336
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - Output of the SMCLI command.
"""
rh.printSysLog("Enter vmUtils.installFS, userid: " + rh.userid +
", vaddr: " + str(vaddr) + ", mode: " + mode + ", file system: " +
fileSystem + ", disk type: " + diskType)
results = {
'overallRC': 0,
'rc': 0,
'rs': 0,
'errno': 0,
}
out = ''
diskAccessed = False
# Get access to the disk.
cmd = ["sudo",
"/opt/zthin/bin/linkdiskandbringonline",
rh.userid,
vaddr,
mode]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
diskAccessed = True
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
results = msgs.msg['0421'][0]
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
if results['overallRC'] == 0:
"""
sample output:
linkdiskandbringonline maint start time: 2017-03-03-16:20:48.011
Success: Userid maint vdev 193 linked at ad35 device name dasdh
linkdiskandbringonline exit time: 2017-03-03-16:20:52.150
"""
match = re.search('Success:(.+?)\n', out)
if match:
parts = match.group(1).split()
if len(parts) > 9:
device = "/dev/" + parts[9]
else:
strCmd = ' '.join(cmd)
rh.printLn("ES", msgs.msg['0416'][1] % (modId,
'Success:', 10, strCmd, out))
results = msgs.msg['0416'][0]
rh.updateResults(results)
else:
strCmd = ' '.join(cmd)
rh.printLn("ES", msgs.msg['0417'][1] % (modId,
'Success:', strCmd, out))
results = msgs.msg['0417'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "3390":
# dasdfmt the disk
cmd = ["sudo",
"/sbin/dasdfmt",
"-y",
"-b", "4096",
"-d", "cdl",
"-f", device]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
strCmd = " ".join(cmd)
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "3390":
# Settle the devices so we can do the partition.
strCmd = ("which udevadm &> /dev/null && " +
"udevadm settle || udevsettle")
rh.printSysLog("Invoking: " + strCmd)
try:
subprocess.check_output(
strCmd,
stderr=subprocess.STDOUT,
close_fds=True,
shell=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
strCmd = " ".join(cmd)
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "3390":
# Prepare the partition with fdasd
cmd = ["sudo", "/sbin/fdasd", "-a", device]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd,
stderr=subprocess.STDOUT, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "9336":
# Delete the existing partition in case the disk already
# has a partition in it.
cmd = "sudo /sbin/fdisk " + device + " << EOF\nd\nw\nEOF"
rh.printSysLog("Invoking: /sbin/fdsik " + device +
" << EOF\\nd\\nw\\nEOF ")
try:
out = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
close_fds=True,
shell=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, cmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, cmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0 and diskType == "9336":
# Prepare the partition with fdisk
cmd = "sudo /sbin/fdisk " + device + " << EOF\nn\np\n1\n\n\nw\nEOF"
rh.printSysLog("Invoking: sudo /sbin/fdisk " + device +
" << EOF\\nn\\np\\n1\\n\\n\\nw\\nEOF")
try:
out = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
close_fds=True,
shell=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, cmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, cmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0:
# Settle the devices so we can do the partition.
strCmd = ("which udevadm &> /dev/null && " +
"udevadm settle || udevsettle")
rh.printSysLog("Invoking: " + strCmd)
try:
subprocess.check_output(
strCmd,
stderr=subprocess.STDOUT,
close_fds=True,
shell=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
strCmd = " ".join(cmd)
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
if results['overallRC'] == 0:
# Install the file system into the disk.
device = device + "1" # Point to first partition
if fileSystem != 'swap':
if fileSystem == 'xfs':
cmd = ["sudo", "mkfs.xfs", "-f", device]
else:
cmd = ["sudo", "mkfs", "-F", "-t", fileSystem, device]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd,
stderr=subprocess.STDOUT, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
rh.printLn("N", "File system: " + fileSystem +
" is installed.")
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
else:
rh.printLn("N", "File system type is swap. No need to install " +
"a filesystem.")
if diskAccessed:
# Give up the disk.
cmd = ["sudo", "/opt/zthin/bin/offlinediskanddetach",
rh.userid,
vaddr]
strCmd = ' '.join(cmd)
rh.printSysLog("Invoking: " + strCmd)
try:
out = subprocess.check_output(cmd, close_fds=True)
if isinstance(out, bytes):
out = bytes.decode(out)
except CalledProcessError as e:
rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd,
e.returncode, e.output))
results = msgs.msg['0415'][0]
results['rs'] = e.returncode
rh.updateResults(results)
except Exception as e:
# All other exceptions.
rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd,
type(e).__name__, str(e)))
results = msgs.msg['0421'][0]
rh.updateResults(results)
rh.printSysLog("Exit vmUtils.installFS, rc: " + str(results['rc']))
return results | 0.003624 |
def loadImgs(self, ids=[]):
"""
Load anns with the specified ids.
:param ids (int array) : integer ids specifying img
:return: imgs (object array) : loaded img objects
"""
if type(ids) == list:
return [self.imgs[id] for id in ids]
elif type(ids) == int:
return [self.imgs[ids]] | 0.00551 |
def group_factory(bridge, number, name, led_type):
""" Make a group.
:param bridge: Member of this bridge.
:param number: Group number (1-4).
:param name: Name of group.
:param led_type: Either `RGBW`, `WRGB`, `RGBWW`, `WHITE`, `DIMMER` or `BRIDGE_LED`.
:returns: New group.
"""
if led_type in [RGBW, BRIDGE_LED]:
return RgbwGroup(bridge, number, name, led_type)
elif led_type == RGBWW:
return RgbwwGroup(bridge, number, name)
elif led_type == WHITE:
return WhiteGroup(bridge, number, name)
elif led_type == DIMMER:
return DimmerGroup(bridge, number, name)
elif led_type == WRGB:
return WrgbGroup(bridge, number, name)
else:
raise ValueError('Invalid LED type: %s', led_type) | 0.002577 |
def set(self, key: str, value: object) -> None:
"""
Caches a value for the lifetime of the current turn.
:param key:
:param value:
:return:
"""
if not key or not isinstance(key, str):
raise KeyError('"key" must be a valid string.')
self._services[key] = value | 0.005952 |
def check_frequency(freq, res, aniso, epermH, epermV, mpermH, mpermV, verb):
r"""Calculate frequency-dependent parameters.
This check-function is called from one of the modelling routines in
:mod:`model`. Consult these modelling routines for a detailed description
of the input parameters.
Parameters
----------
freq : array_like
Frequencies f (Hz).
res : array_like
Horizontal resistivities rho_h (Ohm.m); #res = #depth + 1.
aniso : array_like
Anisotropies lambda = sqrt(rho_v/rho_h) (-); #aniso = #res.
epermH, epermV : array_like
Relative horizontal/vertical electric permittivities
epsilon_h/epsilon_v (-);
#epermH = #epermV = #res.
mpermH, mpermV : array_like
Relative horizontal/vertical magnetic permeabilities mu_h/mu_v (-);
#mpermH = #mpermV = #res.
verb : {0, 1, 2, 3, 4}
Level of verbosity.
Returns
-------
freq : float
Frequency, checked for size and assured min_freq.
etaH, etaV : array
Parameters etaH/etaV, same size as provided resistivity.
zetaH, zetaV : array
Parameters zetaH/zetaV, same size as provided resistivity.
"""
global _min_freq
# Check if the user provided a model for etaH/etaV/zetaH/zetaV
if isinstance(res, dict):
res = res['res']
# Check frequency
freq = _check_var(freq, float, 1, 'freq')
# Minimum frequency to avoid division by zero at freq = 0 Hz.
# => min_freq can be set with utils.set_min
freq = _check_min(freq, _min_freq, 'Frequencies', 'Hz', verb)
if verb > 2:
_prnt_min_max_val(freq, " frequency [Hz] : ", verb)
# Calculate eta and zeta (horizontal and vertical)
c = 299792458 # Speed of light m/s
mu_0 = 4e-7*np.pi # Magn. permeability of free space [H/m]
epsilon_0 = 1./(mu_0*c*c) # Elec. permittivity of free space [F/m]
etaH = 1/res + np.outer(2j*np.pi*freq, epermH*epsilon_0)
etaV = 1/(res*aniso*aniso) + np.outer(2j*np.pi*freq, epermV*epsilon_0)
zetaH = np.outer(2j*np.pi*freq, mpermH*mu_0)
zetaV = np.outer(2j*np.pi*freq, mpermV*mu_0)
return freq, etaH, etaV, zetaH, zetaV | 0.000449 |
def add(self, submission, archive, _):
""" Add a new submission to the repo (add the to queue, will be saved async)"""
self.queue.put((submission, submission["result"], submission["grade"], submission["problems"], submission["tests"], submission["custom"], archive)) | 0.014184 |
def set_params(self, **params):
"""Set parameters on this object
Safe setter method - attributes should not be modified directly as some
changes are not valid.
Valid parameters:
- n_landmark
- n_svd
Parameters
----------
params : key-value pairs of parameter name and new values
Returns
-------
self
"""
# update parameters
reset_landmarks = False
if 'n_landmark' in params and params['n_landmark'] != self.n_landmark:
self.n_landmark = params['n_landmark']
reset_landmarks = True
if 'n_svd' in params and params['n_svd'] != self.n_svd:
self.n_svd = params['n_svd']
reset_landmarks = True
# update superclass parameters
super().set_params(**params)
# reset things that changed
if reset_landmarks:
self._reset_landmarks()
return self | 0.00206 |
def ip_hide_ext_community_list_holder_extcommunity_list_ext_community_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip = ET.SubElement(config, "ip", xmlns="urn:brocade.com:mgmt:brocade-common-def")
hide_ext_community_list_holder = ET.SubElement(ip, "hide-ext-community-list-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy")
extcommunity_list = ET.SubElement(hide_ext_community_list_holder, "extcommunity-list")
extcommunity_list_num_key = ET.SubElement(extcommunity_list, "extcommunity-list-num")
extcommunity_list_num_key.text = kwargs.pop('extcommunity_list_num')
ext_community_action = ET.SubElement(extcommunity_list, "ext-community-action")
ext_community_action.text = kwargs.pop('ext_community_action')
callback = kwargs.pop('callback', self._callback)
return callback(config) | 0.008686 |
def getAttributeUri(self, index):
"""
Returns the numeric ID for the namespace URI of an attribute
"""
offset = self._get_attribute_offset(index)
uri = self.m_attributes[offset + ATTRIBUTE_IX_NAMESPACE_URI]
return uri | 0.007519 |
def Overlay_setShowScrollBottleneckRects(self, show):
"""
Function path: Overlay.setShowScrollBottleneckRects
Domain: Overlay
Method name: setShowScrollBottleneckRects
Parameters:
Required arguments:
'show' (type: boolean) -> True for showing scroll bottleneck rects
No return value.
Description: Requests that backend shows scroll bottleneck rects
"""
assert isinstance(show, (bool,)
), "Argument 'show' must be of type '['bool']'. Received type: '%s'" % type(
show)
subdom_funcs = self.synchronous_command(
'Overlay.setShowScrollBottleneckRects', show=show)
return subdom_funcs | 0.042254 |
def write_alias_config_hash(alias_config_hash='', empty_hash=False):
"""
Write self.alias_config_hash to the alias hash file.
Args:
empty_hash: True if we want to write an empty string into the file. Empty string in the alias hash file
means that we have to perform a full load of the command table in the next run.
"""
with open(GLOBAL_ALIAS_HASH_PATH, 'w') as alias_config_hash_file:
alias_config_hash_file.write('' if empty_hash else alias_config_hash) | 0.009328 |
def _store_inferential_results(self,
value_array,
index_names,
attribute_name,
series_name=None,
column_names=None):
"""
Store the estimation results that relate to statistical inference, such
as parameter estimates, standard errors, p-values, etc.
Parameters
----------
value_array : 1D or 2D ndarray.
Contains the values that are to be stored on the model instance.
index_names : list of strings.
Contains the names that are to be displayed on the 'rows' for each
value being stored. There should be one element for each value of
`value_array.`
series_name : string or None, optional.
The name of the pandas series being created for `value_array.` This
kwarg should be None when `value_array` is a 1D ndarray.
attribute_name : string.
The attribute name that will be exposed on the model instance and
related to the passed `value_array.`
column_names : list of strings, or None, optional.
Same as `index_names` except that it pertains to the columns of a
2D ndarray. When `value_array` is a 2D ndarray, There should be one
element for each column of `value_array.` This kwarg should be None
otherwise.
Returns
-------
None. Stores a pandas series or dataframe on the model instance.
"""
if len(value_array.shape) == 1:
assert series_name is not None
new_attribute_value = pd.Series(value_array,
index=index_names,
name=series_name)
elif len(value_array.shape) == 2:
assert column_names is not None
new_attribute_value = pd.DataFrame(value_array,
index=index_names,
columns=column_names)
setattr(self, attribute_name, new_attribute_value)
return None | 0.003114 |
def clear(self):
""" Removes all VisItems
"""
model = self.tree.model()
# Don't use model.clear(). it will delete the column sizes
model.removeRows(0, 1)
model.setRowCount(1)
self._setColumnCountForContents() | 0.007576 |
def easeInBack(n, s=1.70158):
"""A tween function that backs up first at the start and then goes to the destination.
Args:
n (float): The time progress, starting at 0.0 and ending at 1.0.
Returns:
(float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
"""
_checkRange(n)
return n * n * ((s + 1) * n - s) | 0.007692 |
def get_cloud_masks(self, threshold=None, non_valid_value=False):
""" The binary cloud mask is computed on the fly. Be cautious. The pixels without valid data are assigned
non_valid_value.
:param threshold: A float from [0,1] specifying threshold
:type threshold: float
:param non_valid_value: Value which will be assigned to pixels without valid data
:type non_valid_value: int in range `[-254, 255]`
:return: Binary cloud masks of shape `(times, height, width)` and `dtype=numpy.int8`
:rtype: numpy.ndarray
"""
self.get_probability_masks()
cloud_masks = self.cloud_detector.get_mask_from_prob(self.probability_masks, threshold)
cloud_masks[~self.valid_data] = non_valid_value
return cloud_masks | 0.007472 |
def read(self):
"""Return a single byte from the output buffer
"""
if self._output_buffer:
b, self._output_buffer = (self._output_buffer[0:1],
self._output_buffer[1:])
return b
return b'' | 0.007092 |
def off(self):
"""!
\~english
Close Audio output. set pin mode to output
@return a boolean value. if True means close audio output is OK otherwise failed to close.
\~chinese
关闭音频输出。 将引脚模式设置为输出
@return 布尔值。 如果为 True 关闭音频输出成功,否则关闭不成功。
"""
isOK = True
try:
if self.channelR!=None:
sub.call(["gpio","-g","mode", "{}".format(self.channelR), self.PIN_MODE_OUTPUT ])
except:
isOK = False
print("Close audio right channel failed.")
try:
if self.channelL!=None:
sub.call(["gpio","-g","mode", "{}".format(self.channelL), self.PIN_MODE_OUTPUT ])
except:
isOK = False
print("Close audio left channel failed.")
return isOK | 0.022975 |
def checkpat(self, pattern):
"""
check for errors in a regex pattern
"""
if pattern is None:
return
try:
re.match(pattern, "")
except re.error:
print3("\nBad user-defined singular pattern:\n\t%s\n" % pattern)
raise BadUserDefinedPatternError | 0.005935 |
def _get_context_id(self):
"""If this async is in a context set the context id."""
from furious.context import get_current_context
context_id = self._options.get('context_id')
if context_id:
return context_id
try:
context = get_current_context()
except errors.NotInContextError:
context = None
self.update_options(context_id=None)
if context:
context_id = context.id
self.update_options(context_id=context_id)
return context_id | 0.003515 |
def _wrapped_method_with_watch_fn(self, f, *args, **kwargs):
"""A wrapped method with a watch function.
When this method is called, it will call the underlying method with
the same arguments, *except* that if the ``watch`` argument isn't
:data:`None`, it will be replaced with a wrapper around that watch
function, so that the watch function will be called in the reactor
thread. This means that the watch function can safely use Twisted
APIs.
"""
bound_args = signature(f).bind(*args, **kwargs)
orig_watch = bound_args.arguments.get("watch")
if orig_watch is not None:
wrapped_watch = partial(self._call_in_reactor_thread, orig_watch)
wrapped_watch = wraps(orig_watch)(wrapped_watch)
bound_args.arguments["watch"] = wrapped_watch
return f(**bound_args.arguments) | 0.002227 |
def minify_urls(filepath, ext='asc', url_regex=None, output_ext='.urls_minified', access_token=None):
""" Use bitly or similar minifier to shrink all URLs in text files within a folder structure.
Used for the NLPIA manuscript directory for Manning Publishing
bitly API: https://dev.bitly.com/links.html
Args:
path (str): Directory or file path
ext (str): File name extension to filter text files by. default='.asc'
output_ext (str): Extension to append to filenames of altered files default='' (in-place replacement of URLs)
FIXME: NotImplementedError! Untested!
"""
access_token = access_token or secrets.bitly.access_token
output_ext = output_ext or ''
url_regex = regex.compile(url_regex) if isinstance(url_regex, str) else url_regex
filemetas = []
for filemeta in find_files(filepath, ext=ext):
filemetas += [filemeta]
altered_text = ''
with open(filemeta['path'], 'rt') as fin:
text = fin.read()
end = 0
for match in url_regex.finditer(text):
url = match.group()
start = match.start()
altered_text += text[:start]
resp = requests.get('https://api-ssl.bitly.com/v3/shorten?access_token={}&longUrl={}'.format(
access_token, url), allow_redirects=True, timeout=5)
js = resp.json()
short_url = js['shortUrl']
altered_text += short_url
end = start + len(url)
altered_text += text[end:]
with open(filemeta['path'] + (output_ext or ''), 'wt') as fout:
fout.write(altered_text)
return altered_text | 0.003623 |
def utc_to_local(utc_dt):
"""
:param utc_dt: datetime in UTC
:return: datetime in the local timezone
"""
# get integer timestamp to avoid precision lost
timestamp = calendar.timegm(utc_dt.timetuple())
local_dt = datetime.datetime.fromtimestamp(timestamp)
assert utc_dt.resolution >= datetime.timedelta(microseconds=1)
return local_dt.replace(microsecond=utc_dt.microsecond) | 0.002445 |
def get_assessment_taken_admin_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken admin service for the given bank.
arg: bank_id (osid.id.Id): the ``Id`` of the bank
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenAdminSession) - an
``AssessmentTakenSearchSession``
raise: NotFound - ``bank_id`` not found
raise: NullArgument - ``bank_id`` or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_assessment_taken_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_assessment_taken_admin():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssessmentTakenAdminSession(bank_id, proxy, self._runtime) | 0.004023 |
def fig_intro(params, ana_params, T=[800, 1000], fraction=0.05, rasterized=False):
'''set up plot for introduction'''
ana_params.set_PLOS_2column_fig_style(ratio=0.5)
#load spike as database
networkSim = CachedNetwork(**params.networkSimParams)
if analysis_params.bw:
networkSim.colors = phlp.get_colors(len(networkSim.X))
#set up figure and subplots
fig = plt.figure()
gs = gridspec.GridSpec(3, 4)
fig.subplots_adjust(left=0.05, right=0.95, wspace=0.5, hspace=0.)
#network diagram
ax0_1 = fig.add_subplot(gs[:, 0], frameon=False)
ax0_1.set_title('point-neuron network', va='bottom')
network_sketch(ax0_1, yscaling=1.3)
ax0_1.xaxis.set_ticks([])
ax0_1.yaxis.set_ticks([])
phlp.annotate_subplot(ax0_1, ncols=4, nrows=1, letter='A', linear_offset=0.065)
#network raster
ax1 = fig.add_subplot(gs[:, 1], frameon=True)
phlp.remove_axis_junk(ax1)
phlp.annotate_subplot(ax1, ncols=4, nrows=1, letter='B', linear_offset=0.065)
x, y = networkSim.get_xy(T, fraction=fraction)
# networkSim.plot_raster(ax1, T, x, y, markersize=0.1, alpha=1.,legend=False, pop_names=True)
networkSim.plot_raster(ax1, T, x, y, markersize=0.2, marker='_', alpha=1.,legend=False, pop_names=True, rasterized=rasterized)
ax1.set_ylabel('')
ax1.xaxis.set_major_locator(plt.MaxNLocator(4))
ax1.set_title('spiking activity', va='bottom')
a = ax1.axis()
ax1.vlines(x['TC'][0], a[2], a[3], 'k', lw=0.25)
#population
ax2 = fig.add_subplot(gs[:, 2], frameon=False)
ax2.xaxis.set_ticks([])
ax2.yaxis.set_ticks([])
plot_population(ax2, params, isometricangle=np.pi/24, plot_somas=False,
plot_morphos=True, num_unitsE=1, num_unitsI=1,
clip_dendrites=True, main_pops=True, title='',
rasterized=rasterized)
ax2.set_title('multicompartment\nneurons', va='bottom', fontweight='normal')
phlp.annotate_subplot(ax2, ncols=4, nrows=1, letter='C', linear_offset=0.065)
#LFP traces in all channels
ax3 = fig.add_subplot(gs[:, 3], frameon=True)
phlp.remove_axis_junk(ax3)
plot_signal_sum(ax3, params, fname=os.path.join(params.savefolder, 'LFPsum.h5'),
unit='mV', vlimround=0.8,
T=T, ylim=[ax2.axis()[2], ax2.axis()[3]],
rasterized=False)
ax3.set_title('LFP', va='bottom')
ax3.xaxis.set_major_locator(plt.MaxNLocator(4))
phlp.annotate_subplot(ax3, ncols=4, nrows=1, letter='D', linear_offset=0.065)
a = ax3.axis()
ax3.vlines(x['TC'][0], a[2], a[3], 'k', lw=0.25)
#draw some arrows:
ax = plt.gca()
ax.annotate("", xy=(0.27, 0.5), xytext=(.24, 0.5),
xycoords="figure fraction",
arrowprops=dict(facecolor='black', arrowstyle='simple'),
)
ax.annotate("", xy=(0.52, 0.5), xytext=(.49, 0.5),
xycoords="figure fraction",
arrowprops=dict(facecolor='black', arrowstyle='simple'),
)
ax.annotate("", xy=(0.78, 0.5), xytext=(.75, 0.5),
xycoords="figure fraction",
arrowprops=dict(facecolor='black', arrowstyle='simple'),
)
return fig | 0.013489 |
def guess_decode_from_terminal(text, term):
"""Decode *text* coming from terminal *term*.
First try the terminal encoding, if given.
Then try UTF-8. Then try the preferred locale encoding.
Fall back to latin-1, which always works.
"""
if getattr(term, 'encoding', None):
try:
text = text.decode(term.encoding)
except UnicodeDecodeError:
pass
else:
return text, term.encoding
return guess_decode(text) | 0.002041 |
def end_position(variant_obj):
"""Calculate end position for a variant."""
alt_bases = len(variant_obj['alternative'])
num_bases = max(len(variant_obj['reference']), alt_bases)
return variant_obj['position'] + (num_bases - 1) | 0.004149 |
def is_connected(self, attempts=3):
"""Try to reconnect if neccessary.
:param attempts: The amount of tries to reconnect if neccessary.
:type attempts: ``int``
"""
if self.gce is None:
while attempts > 0:
self.logger.info("Attempting to connect ...")
try:
self.connect()
except ComputeEngineManagerException:
attempts -= 1
continue
self.logger.info("Connection established.")
return True
self.logger.error("Unable to connect to Google Compute Engine.")
return False
return True | 0.002825 |
def _merge_beam_dim(tensor):
"""Reshapes first two dimensions in to single dimension.
Args:
tensor: Tensor to reshape of shape [A, B, ...]
Returns:
Reshaped tensor of shape [A*B, ...]
"""
shape = common_layers.shape_list(tensor)
shape[0] *= shape[1] # batch -> batch * beam_size
shape.pop(1) # Remove beam dim
return tf.reshape(tensor, shape) | 0.016216 |
def json_data(self):
"""Return json description of a question."""
return {
"number": self.number,
"type": self.type,
"participant_id": self.participant_id,
"question": self.question,
"response": self.response,
} | 0.00678 |
def get_distutils_display_options():
""" Returns a set of all the distutils display options in their long and
short forms. These are the setup.py arguments such as --name or --version
which print the project's metadata and then exit.
Returns
-------
opts : set
The long and short form display option arguments, including the - or --
"""
short_display_opts = set('-' + o[1] for o in Distribution.display_options
if o[1])
long_display_opts = set('--' + o[0] for o in Distribution.display_options)
# Include -h and --help which are not explicitly listed in
# Distribution.display_options (as they are handled by optparse)
short_display_opts.add('-h')
long_display_opts.add('--help')
# This isn't the greatest approach to hardcode these commands.
# However, there doesn't seem to be a good way to determine
# whether build *will be* run as part of the command at this
# phase.
display_commands = set([
'clean', 'register', 'setopt', 'saveopts', 'egg_info',
'alias'])
return short_display_opts.union(long_display_opts.union(display_commands)) | 0.000853 |
def find_project_config_file(project_root: str) -> str:
"""Return absolute path to project-specific config file, if it exists.
:param project_root: Absolute path to project root directory.
A project config file is a file named `YCONFIG_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found.
"""
if project_root:
project_config_file = os.path.join(project_root, YCONFIG_FILE)
if os.path.isfile(project_config_file):
return project_config_file | 0.001712 |
def correct_bounding_box_list_for_nonzero_origin(bbox_list, full_box_list):
"""The bounding box calculated from an image has coordinates relative to the
lower-left point in the PDF being at zero. Similarly, Ghostscript reports a
bounding box relative to a zero lower-left point. If the MediaBox (or full
page box) has been shifted, like when cropping a previously cropped
document, then we need to correct the bounding box by an additive
translation on all the points."""
corrected_box_list = []
for bbox, full_box in zip(bbox_list, full_box_list):
left_x = full_box[0]
lower_y = full_box[1]
corrected_box_list.append([bbox[0]+left_x, bbox[1]+lower_y,
bbox[2]+left_x, bbox[3]+lower_y])
return corrected_box_list | 0.004957 |
def create_response(self, status=201):
"""Generate a Response object for a POST request. By default, the newly created
object will be passed to the specified ResponseHandler and will be serialized
as the response body.
"""
self.response = self.get_response_handler()
self.response.process(self.obj)
return self._response(self.response.get_response_data(), status=status) | 0.009346 |
def format_uuid(
uuid,
max_length=10):
"""
Format a UUID string
:param str uuid: UUID to format
:param int max_length: Maximum length of result string (> 3)
:return: Formatted UUID
:rtype: str
:raises ValueError: If *max_length* is not larger than 3
This function formats a UUID so it is not longer than *max_length*
characters. The resulting string is returned. It does so by replacing
characters at the end of the *uuid* with three dots, if necessary.
The idea is that the start of the *uuid* is the most important part
to be able to identify the related entity.
The default *max_length* is 10, which will result in a string
containing the first 7 characters of the *uuid* passed in. Most of
the time, such a string is still unique within a collection of UUIDs.
"""
if max_length <= 3:
raise ValueError("max length must be larger than 3")
if len(uuid) > max_length:
uuid = "{}...".format(uuid[0:max_length-3])
return uuid | 0.000965 |
def SH_raxml(reftree, querytree, phy_file, shout="SH_out.txt"):
"""
SH test using RAxML
querytree can be a single tree or a bunch of trees (eg. from bootstrapping)
"""
assert op.isfile(reftree)
shout = must_open(shout, "a")
raxml_work = op.abspath(op.join(op.dirname(phy_file), "raxml_work"))
mkdir(raxml_work)
raxml_cl = RaxmlCommandline(cmd=RAXML_BIN("raxmlHPC"), \
sequences=phy_file, algorithm="h", model="GTRGAMMA", \
name="SH", starting_tree=reftree, bipartition_filename=querytree, \
working_dir=raxml_work)
logging.debug("Running SH test in RAxML: %s" % raxml_cl)
o, stderr = raxml_cl()
# hard coded
try:
pval = re.search('(Significantly.*:.*)', o).group(0)
except:
print("SH test failed.", file=sys.stderr)
else:
pval = pval.strip().replace("\t"," ").replace("%","\%")
print("{0}\t{1}".format(op.basename(querytree), pval), file=shout)
logging.debug("SH p-value appended to %s" % shout.name)
shout.close()
return shout.name | 0.010417 |
def val(self, name):
"""
retrieves a value, substituting actual
values for ConfigValue templates.
"""
v = getattr(self, name)
if hasattr(v, 'retrieve_value'):
v = v.retrieve_value(self.__dict__)
return v | 0.00738 |
def get_domain_and_name(self, domain_or_name):
"""
Given a ``str`` or :class:`boto.sdb.domain.Domain`, return a
``tuple`` with the following members (in order):
* In instance of :class:`boto.sdb.domain.Domain` for the requested
domain
* The domain's name as a ``str``
:type domain_or_name: ``str`` or :class:`boto.sdb.domain.Domain`
:param domain_or_name: The domain or domain name to get the domain
and name for.
:raises: :class:`boto.exception.SDBResponseError` when an invalid
domain name is specified.
:rtype: tuple
:return: A ``tuple`` with contents outlined as per above.
"""
if (isinstance(domain_or_name, Domain)):
return (domain_or_name, domain_or_name.name)
else:
return (self.get_domain(domain_or_name), domain_or_name) | 0.006303 |
def detail(self, detail=None, ret_r=False):
'''code's detail'''
if detail or ret_r:
self._detail = detail
return self
return self._detail | 0.010811 |
def _AddForemanRule(self):
"""Adds a foreman rule for this hunt."""
if data_store.RelationalDBEnabled():
# Relational DB uses ForemanCondition objects.
foreman_condition = foreman_rules.ForemanCondition(
creation_time=rdfvalue.RDFDatetime.Now(),
expiration_time=self.context.init_start_time + self.context.duration,
description="Hunt %s %s" %
(self.session_id, self.runner_args.hunt_name),
client_rule_set=self.runner_args.client_rule_set,
hunt_id=self.session_id.Basename(),
hunt_name=self.runner_args.hunt_name)
# Make sure the rule makes sense.
foreman_condition.Validate()
data_store.REL_DB.WriteForemanRule(foreman_condition)
else:
foreman_rule = foreman_rules.ForemanRule(
created=rdfvalue.RDFDatetime.Now(),
# TODO: Hunt context should differentiate between initial
# and last start time (similarly to normal hunt objects).
expires=self.context.create_time + self.context.duration,
description="Hunt %s %s" %
(self.session_id, self.runner_args.hunt_name),
client_rule_set=self.runner_args.client_rule_set)
foreman_rule.actions.Append(
hunt_id=self.session_id,
hunt_name=self.runner_args.hunt_name,
client_limit=self.runner_args.client_limit)
# Make sure the rule makes sense.
foreman_rule.Validate()
with aff4.FACTORY.Open(
"aff4:/foreman",
mode="rw",
token=self.token,
aff4_type=aff4_grr.GRRForeman) as foreman:
rules = foreman.Get(
foreman.Schema.RULES, default=foreman.Schema.RULES())
rules.Append(foreman_rule)
foreman.Set(rules) | 0.006807 |
def gdbgui():
"""Render the main gdbgui interface"""
interpreter = "lldb" if app.config["LLDB"] else "gdb"
gdbpid = request.args.get("gdbpid", 0)
initial_gdb_user_command = request.args.get("initial_gdb_user_command", "")
add_csrf_token_to_session()
THEMES = ["monokai", "light"]
# fmt: off
initial_data = {
"csrf_token": session["csrf_token"],
"gdbgui_version": __version__,
"gdbpid": gdbpid,
"initial_gdb_user_command": initial_gdb_user_command,
"interpreter": interpreter,
"initial_binary_and_args": app.config["initial_binary_and_args"],
"p": pbkdf2_hex(str(app.config.get("l")), "Feo8CJol")
if app.config.get("l")
else "",
"project_home": app.config["project_home"],
"remap_sources": app.config["remap_sources"],
"rr": app.config["rr"],
"show_gdbgui_upgrades": app.config["show_gdbgui_upgrades"],
"themes": THEMES,
"signals": SIGNAL_NAME_TO_OBJ,
"using_windows": USING_WINDOWS,
}
# fmt: on
return render_template(
"gdbgui.html",
version=__version__,
debug=app.debug,
interpreter=interpreter,
initial_data=initial_data,
themes=THEMES,
) | 0.000789 |
def get_std_xy_dataset_statistics(x_values, y_values, expect_negative_correlation = False, STDev_cutoff = 1.0):
'''Calls parse_csv and returns the analysis in a format similar to get_xy_dataset_statistics in klab.stats.misc.'''
assert(len(x_values) == len(y_values))
csv_lines = ['ID,X,Y'] + [','.join(map(str, [c + 1, x_values[c], y_values[c]])) for c in xrange(len(x_values))]
data = parse_csv(csv_lines, expect_negative_correlation = expect_negative_correlation, STDev_cutoff = STDev_cutoff)
assert(len(data['predictions']) == 1)
assert(1 in data['predictions'])
assert(data['predictions'][1]['name'] == 'Y')
summary_data = data['predictions'][1]
stats = {}
for spair in field_name_mapper:
stats[spair[1]] = summary_data[spair[0]]
if stats['std_warnings']:
stats['std_warnings'] = '\n'.join(stats['std_warnings'])
else:
stats['std_warnings'] = None
return stats | 0.0138 |
def describe(inlist):
"""
Returns some descriptive statistics of the passed list (assumed to be 1D).
Usage: ldescribe(inlist)
Returns: n, mean, standard deviation, skew, kurtosis
"""
n = len(inlist)
mm = (min(inlist), max(inlist))
m = mean(inlist)
sd = stdev(inlist)
sk = skew(inlist)
kurt = kurtosis(inlist)
return n, mm, m, sd, sk, kurt | 0.002681 |
def xpm(Pdb=Pdb):
"""
To be used inside an except clause, enter a post-mortem pdb
related to the just catched exception.
"""
info = sys.exc_info()
print(traceback.format_exc())
post_mortem(info[2], Pdb) | 0.004348 |
def item_gewest_adapter(obj, request):
"""
Adapter for rendering an object of
:class:`crabpy.gateway.crab.Gewest` to json.
"""
return {
'id': obj.id,
'namen': obj._namen,
'centroid': obj.centroid,
'bounding_box': obj.bounding_box
} | 0.003484 |
def consume(self, istream, ostream, batch=False):
"""Read points from istream and output to ostream."""
datapoints = [] # List of 2-tuples
if batch:
sleep = max(0.01, self.option.sleep)
fd = istream.fileno()
while True:
try:
if select.select([fd], [], [], sleep):
try:
line = istream.readline()
if line == '':
break
datapoints.append(self.consume_line(line))
except ValueError:
continue
if self.option.sort_by_column:
datapoints = sorted(datapoints, key=itemgetter(self.option.sort_by_column - 1))
if len(datapoints) > 1:
datapoints = datapoints[-self.maximum_points:]
self.update([dp[0] for dp in datapoints], [dp[1] for dp in datapoints])
self.render(ostream)
time.sleep(sleep)
except KeyboardInterrupt:
break
else:
for line in istream:
try:
datapoints.append(self.consume_line(line))
except ValueError:
pass
if self.option.sort_by_column:
datapoints = sorted(datapoints, key=itemgetter(self.option.sort_by_column - 1))
self.update([dp[0] for dp in datapoints], [dp[1] for dp in datapoints])
self.render(ostream) | 0.003582 |
def save_data(self, filename):
"""
Save the assimilated data to a file.
Args:
filename (str): filename to save the assimilated data to. Note
that if the filename ends with gz or bz2, the relevant gzip
or bz2 compression will be applied.
"""
with zopen(filename, "wt") as f:
json.dump(list(self._data), f, cls=MontyEncoder) | 0.004773 |
async def sonar_config(self, trigger_pin, echo_pin, cb=None,
ping_interval=50, max_distance=200, cb_type=None):
"""
Configure the pins,ping interval and maximum distance for an HC-SR04
type device.
Single pin configuration may be used. To do so, set both the trigger
and echo pins to the same value.
Up to a maximum of 6 SONAR devices is supported
If the maximum is exceeded a message is sent to the console and the
request is ignored.
NOTE: data is measured in centimeters
:param trigger_pin: The pin number of for the trigger (transmitter).
:param echo_pin: The pin number for the received echo.
:param cb: optional callback function to report sonar data changes
:param ping_interval: Minimum interval between pings. Lowest number
to use is 33 ms. Max is 127ms.
:param max_distance: Maximum distance in cm. Max is 200.
:param cb_type: Constants.CB_TYPE_DIRECT = direct call or
Constants.CB_TYPE_ASYNCIO = asyncio coroutine
:returns: No return value.
"""
# if there is an entry for the trigger pin in existence, just exit
if trigger_pin in self.active_sonar_map:
return
if max_distance > 200:
max_distance = 200
max_distance_lsb = max_distance & 0x7f
max_distance_msb = (max_distance >> 7) & 0x7f
data = [trigger_pin, echo_pin, ping_interval, max_distance_lsb,
max_distance_msb]
await self.set_pin_mode(trigger_pin, Constants.SONAR, Constants.INPUT)
await self.set_pin_mode(echo_pin, Constants.SONAR, Constants.INPUT)
# update the ping data map for this pin
if len(self.active_sonar_map) > 6:
if self.log_output:
logging.exception('sonar_config: maximum number of '
'devices assigned - ignoring request')
else:
print('sonar_config: maximum number of devices assigned'
' - ignoring request')
else:
self.active_sonar_map[trigger_pin] = [cb, cb_type, 0]
await self._send_sysex(PrivateConstants.SONAR_CONFIG, data) | 0.001307 |
def email_user(
self, subject, message, from_email=settings.DEFAULT_FROM_EMAIL, **kwargs
):
"""
Sends an email to this User.
If settings.EMAIL_OVERRIDE_ADDRESS is set, this mail will be redirected to the alternate mail address.
"""
receiver = self.email
if settings.EMAIL_OVERRIDE_ADDRESS:
receiver = settings.EMAIL_OVERRIDE_ADDRESS
send_mail(subject, message, from_email, [receiver], **kwargs) | 0.01046 |
def value(self):
'''The final value, if it has arrived
:raises: AttributeError, if not yet complete
:raises: an exception if the Future was :meth:`abort`\ed
'''
if not self._done.is_set():
raise AttributeError("value")
if self._failure:
raise self._failure[0], self._failure[1], self._failure[2]
return self._value | 0.007595 |
def format_bar(self):
""" Builds the progress bar """
pct = floor(round(self.progress/self.size, 2)*100)
pr = floor(pct*.33)
bar = "".join(
["‒" for x in range(pr)] + ["↦"] +
[" " for o in range(self._barsize-pr-1)])
subprogress = self.format_parent_bar() if self.parent_bar else ""
message = "Loading{} ={}{} ({}%)".format(subprogress, bar, "☉", pct)
return message.ljust(len(message)+5) | 0.004274 |
def dumps(reset=False):
"""Return a printable string of aggregate profile stats.
Parameters
----------
reset: boolean
Indicates whether to clean aggeregate statistical data collected up to this point
"""
debug_str = ctypes.c_char_p()
do_reset = 1 if reset is True else 0
check_call(_LIB.MXAggregateProfileStatsPrint(ctypes.byref(debug_str), int(do_reset)))
return py_str(debug_str.value) | 0.006944 |
def _morph(self):
"""Turn a file system Node (either a freshly initialized directory
object or a separate Entry object) into a proper directory object.
Set up this directory's entries and hook it into the file
system tree. Specify that directories (this Node) don't use
signatures for calculating whether they're current.
"""
self.repositories = []
self.srcdir = None
self.entries = {}
self.entries['.'] = self
self.entries['..'] = self.dir
self.cwd = self
self.searched = 0
self._sconsign = None
self.variant_dirs = []
self.root = self.dir.root
self.changed_since_last_build = 3
self._func_sconsign = 1
self._func_exists = 2
self._func_get_contents = 2
self._abspath = SCons.Util.silent_intern(self.dir.entry_abspath(self.name))
self._labspath = SCons.Util.silent_intern(self.dir.entry_labspath(self.name))
if self.dir._path == '.':
self._path = SCons.Util.silent_intern(self.name)
else:
self._path = SCons.Util.silent_intern(self.dir.entry_path(self.name))
if self.dir._tpath == '.':
self._tpath = SCons.Util.silent_intern(self.name)
else:
self._tpath = SCons.Util.silent_intern(self.dir.entry_tpath(self.name))
self._path_elements = self.dir._path_elements + [self]
# For directories, we make a difference between the directory
# 'name' and the directory 'dirname'. The 'name' attribute is
# used when we need to print the 'name' of the directory or
# when we it is used as the last part of a path. The 'dirname'
# is used when the directory is not the last element of the
# path. The main reason for making that distinction is that
# for RoorDir's the dirname can not be easily inferred from
# the name. For example, we have to add a '/' after a drive
# letter but not after a UNC path prefix ('//').
self.dirname = self.name + OS_SEP
# Don't just reset the executor, replace its action list,
# because it might have some pre-or post-actions that need to
# be preserved.
#
# But don't reset the executor if there is a non-null executor
# attached already. The existing executor might have other
# targets, in which case replacing the action list with a
# Mkdir action is a big mistake.
if not hasattr(self, 'executor'):
self.builder = get_MkdirBuilder()
self.get_executor().set_action_list(self.builder.action)
else:
# Prepend MkdirBuilder action to existing action list
l = self.get_executor().action_list
a = get_MkdirBuilder().action
l.insert(0, a)
self.get_executor().set_action_list(l) | 0.002406 |
def interruptible_sleep(t, event=None):
"""
Sleeps for a specified duration, optionally stopping early for event.
Returns True if interrupted
"""
log.info("sleeping %s", t)
if event is None:
time.sleep(t)
return False
else:
return not event.wait(t) | 0.003311 |
def expired(self):
"""Boolean property if this timeout has expired
"""
if self._expired_latch:
return True
self._check_time_backwards()
if time.time() > self.end:
self._expired_latch = True
return True
return False | 0.006645 |
def _get_layer_converter_fn(layer):
"""Get the right converter function for Keras
"""
layer_type = type(layer)
if layer_type in _KERAS_LAYER_REGISTRY:
return _KERAS_LAYER_REGISTRY[layer_type]
else:
raise TypeError("Keras layer of type %s is not supported." % type(layer)) | 0.006515 |
def parse_address(formatted_address):
"""
:param formatted_address: A string like "[email protected]" or "My Email <[email protected]>"
:return: Tuple: (address, name)
"""
if email_regex.match(formatted_address):
# Just a raw address
return (formatted_address, None)
match = formatted_address_regex.match(formatted_address)
if match:
(name, email) = match.group(1, 2)
return email.strip(), name.strip()
raise ValueError('"{}" is not a valid formatted address'.format(formatted_address)) | 0.008897 |
def _scan_level(self, level, prev, img):
"""Scan a level.
Parameters
----------
level : `int`
Level number.
prev : `PIL.Image` or None
Previous level image or None if level == 0.
img : `PIL.Image`
Current level image.
Returns
-------
`generator` of (`str` or `markovchain.scanner.Scanner.END` or (`markovchain.scanner.Scanner.START`, `str`))
Token generator.
"""
if level == 0:
width, height = img.size
else:
width, height = prev.size
tr = self.traversal[0](width, height, ends=(level == 0))
if level == 0:
for xy in tr:
if xy is None:
yield self.END
else:
yield pixel_to_state(img.getpixel(xy))
yield self.END
else:
scale = self.level_scale[level - 1]
for xy in tr:
x0 = xy[0] * scale
y0 = xy[1] * scale
start = (
self.START,
pixel_to_state(prev.getpixel(xy))
)
yield start
for dxy in self.traversal[level](scale, scale, True):
if dxy is None:
yield start
yield pixel_to_state(
img.getpixel((x0 + dxy[0], y0 + dxy[1]))
)
yield self.END | 0.001985 |
def download_workflow_description_file(self, filename):
'''Downloads the workflow description and writes it to a *YAML* file.
Parameters
----------
filename: str
path to the file to which description should be written
See also
--------
:meth:`tmclient.api.TmClient.download_workflow_description`
'''
description = self.download_workflow_description()
logger.info('write workflow description to file: %s', filename)
with open(filename, 'w') as f:
content = yaml.safe_dump(
description, default_flow_style=False, explicit_start=True
)
f.write(content) | 0.002845 |
def create_tag(self, name, description=None, servers=[]):
"""
Create a new Tag. Only name is mandatory.
Returns the created Tag object.
"""
servers = [str(server) for server in servers]
body = {'tag': Tag(name, description, servers).to_dict()}
res = self.request('POST', '/tag', body)
return Tag(cloud_manager=self, **res['tag']) | 0.005063 |
def Nicklin_Wilkes_Davidson(x, rhol, rhog, m, D, g=g):
r'''Calculates void fraction in two-phase flow according to the model of
[1]_ as given in [2]_ and [3]_.
.. math::
\alpha = \frac{x}{\rho_g}\left[C_0\left(\frac{x}{\rho_g} + \frac{1-x}
{\rho_l}\right) +\frac{v_{gm}}{G} \right]^{-1}
.. math::
v_{gm} = 0.35\sqrt{gD}
.. math::
C_0 = 1.2
Parameters
----------
x : float
Quality at the specific tube interval []
rhol : float
Density of the liquid [kg/m^3]
rhog : float
Density of the gas [kg/m^3]
m : float
Mass flow rate of both phases, [kg/s]
D : float
Diameter of the channel, [m]
g : float, optional
Acceleration due to gravity, [m/s^2]
Returns
-------
alpha : float
Void fraction (area of gas / total area of channel), [-]
Notes
-----
Examples
--------
>>> Nicklin_Wilkes_Davidson(0.4, 800., 2.5, m=1, D=0.3)
0.6798826626721431
References
----------
.. [1] D. Nicklin, J. Wilkes, J. Davidson, "Two-phase flow in vertical
tubes", Trans. Inst. Chem. Eng. 40 (1962) 61-68.
.. [2] Xu, Yu, and Xiande Fang. "Correlations of Void Fraction for Two-
Phase Refrigerant Flow in Pipes." Applied Thermal Engineering 64, no.
1-2 (March 2014): 242–51. doi:10.1016/j.applthermaleng.2013.12.032.
.. [3] Woldesemayat, Melkamu A., and Afshin J. Ghajar. "Comparison of Void
Fraction Correlations for Different Flow Patterns in Horizontal and
Upward Inclined Pipes." International Journal of Multiphase Flow 33,
no. 4 (April 2007): 347-370. doi:10.1016/j.ijmultiphaseflow.2006.09.004.
'''
G = m/(pi/4*D**2)
C0 = 1.2
vgm = 0.35*(g*D)**0.5
return x/rhog*(C0*(x/rhog + (1-x)/rhol) + vgm/G)**-1 | 0.006393 |
def _fchown(self, real, fileno, uid, gid):
"""Run fake fchown code if fileno points to a sub-path of our tree.
The ownership set with this fake fchown can be inspected by looking
at the self.uid/self.gid dictionaries.
"""
path = self._fake_path(self._path_from_fd(fileno))
self._chown_common(path, uid, gid) | 0.005618 |
def _request(self, method, endpoint, id=None, **kwargs):
"Handles retrying failed requests and error handling."
request = getattr(requests, method, None)
if not callable(request):
raise RequestError('Invalid method %s' % method)
# Find files, separate them out to correct kwarg for requests.
data = kwargs.get('data')
if data:
files = {}
for name, value in list(data.items()):
# Value might be a file-like object (with a read method), or it
# might be a (filename, file-like) tuple.
if hasattr(value, 'read') or isinstance(value, tuple):
files[name] = data.pop(name)
if files:
kwargs.setdefault('files', {}).update(files)
path = ['api', self.version, endpoint]
# If we received an ID, append it to the path.
if id:
path.append(str(id))
# Join fragments into a URL
path = '/'.join(path)
if not path.endswith('/'):
path += '/'
while '//' in path:
path = path.replace('//', '/')
url = self.url + path
# Add our user agent.
kwargs.setdefault('headers', {}).setdefault('User-Agent',
HTTP_USER_AGENT)
# Now try the request, if we get throttled, sleep and try again.
trys, retrys = 0, 3
while True:
if trys == retrys:
raise RequestError('Could not complete request after %s trys.'
% trys)
trys += 1
try:
return self._do_request(request, url, **kwargs)
except ResponseError as e:
if self.throttle_wait and e.status_code == 503:
m = THROTTLE_PATTERN.match(
e.response.headers.get('x-throttle', ''))
if m:
time.sleep(float(m.group(1)))
continue
# Failed for a reason other than throttling.
raise | 0.000938 |
def get_image_output(self):
"""
Create the output for the image
This is the Koji Content Generator metadata, along with the
'docker save' output to upload.
:return: tuple, (metadata dict, Output instance)
"""
saved_image = self.workflow.exported_image_sequence[-1].get('path')
image_name = get_image_upload_filename(self.workflow.exported_image_sequence[-1],
self.workflow.builder.image_id,
self.platform)
metadata = self.get_output_metadata(saved_image, image_name)
output = Output(file=open(saved_image), metadata=metadata)
return metadata, output | 0.004076 |
def start(self):
"""If we have a set of plugins that provide our expected listeners and
messengers, tell our dispatcher to start up. Otherwise, raise
InvalidApplication
"""
if not self.valid:
err = ("\nMessengers and listeners that still need set:\n\n"
"messengers : %s\n\n"
"listeners : %s\n")
raise InvalidApplication(err % (self.needed_messengers,
self.needed_listeners))
self.dispatcher.start() | 0.005425 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.