text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def subscribe(self, peer_jid):
"""
Request presence subscription with the given `peer_jid`.
This is deliberately not a coroutine; we don’t know whether the peer is
online (usually) and they may defer the confirmation very long, if they
confirm at all. Use :meth:`on_subscribed` to get notified when a peer
accepted a subscription request.
"""
self.client.enqueue(
stanza.Presence(type_=structs.PresenceType.SUBSCRIBE,
to=peer_jid)
) | 0.00367 |
def listdir(self, folder_id='0', offset=None, limit=None, fields=None):
'Get Box object, representing list of objects in a folder.'
if fields is not None\
and not isinstance(fields, types.StringTypes): fields = ','.join(fields)
return self(
join('folders', folder_id, 'items'),
dict(offset=offset, limit=limit, fields=fields) ) | 0.02924 |
def b58decode_int(v):
'''Decode a Base58 encoded string as an integer'''
v = v.rstrip()
v = scrub_input(v)
decimal = 0
for char in v:
decimal = decimal * 58 + alphabet.index(char)
return decimal | 0.004405 |
def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs):
"""
`Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
"""
args = (arg1, arg2, arg3, arg4) + args
return self.ThenAt(5, f, *args, **kwargs) | 0.010169 |
def _to_bz2file(self, file_generator):
"""Convert file to bz2-compressed file.
:return: None
:rtype: :py:obj:`None`
"""
with bz2.BZ2File(file_generator.to_path, mode="wb") as outfile:
for f in file_generator:
outfile.write(f.writestr(file_generator.to_format).encode()) | 0.005935 |
def insert(queue, items):
'''
Add an item or items to a queue
'''
handle_queue_creation(queue)
with _conn(commit=True) as cur:
if isinstance(items, dict):
items = salt.utils.json.dumps(items)
cmd = str('''INSERT INTO {0}(data) VALUES('{1}')''').format(queue, items) # future lint: disable=blacklisted-function
log.debug('SQL Query: %s', cmd)
try:
cur.execute(cmd)
except psycopg2.IntegrityError as esc:
return ('Item already exists in this queue. '
'postgres error: {0}'.format(esc))
if isinstance(items, list):
items = [(salt.utils.json.dumps(el),) for el in items]
cmd = str("INSERT INTO {0}(data) VALUES (%s)").format(queue) # future lint: disable=blacklisted-function
log.debug('SQL Query: %s', cmd)
try:
cur.executemany(cmd, items)
except psycopg2.IntegrityError as esc:
return ('One or more items already exists in this queue. '
'postgres error: {0}'.format(esc))
return True | 0.002595 |
def get_maintainer(self):
# type: () -> hdx.data.user.User
"""Get the dataset's maintainer.
Returns:
User: Dataset's maintainer
"""
return hdx.data.user.User.read_from_hdx(self.data['maintainer'], configuration=self.configuration) | 0.014035 |
def _add_nodes(self, features):
""" Adds a node to the graph for each item in 'features' using
the GraphNodes from the editor factory.
"""
graph = self._graph
if graph is not None:
for feature in features:
for graph_node in self.factory.nodes:
if feature.__class__ in graph_node.node_for:
graph.add_node( id(feature), **graph_node.dot_attr )
break
graph.arrange_all() | 0.007737 |
def is_shaped(self):
"""Return description containing array shape if exists, else None."""
for description in (self.description, self.description1):
if not description:
return None
if description[:1] == '{' and '"shape":' in description:
return description
if description[:6] == 'shape=':
return description
return None | 0.004684 |
def export_hmaps_csv(key, dest, sitemesh, array, comment):
"""
Export the hazard maps of the given realization into CSV.
:param key: output_type and export_type
:param dest: name of the exported file
:param sitemesh: site collection
:param array: a composite array of dtype hmap_dt
:param comment: comment to use as header of the exported CSV file
"""
curves = util.compose_arrays(sitemesh, array)
writers.write_csv(dest, curves, comment=comment)
return [dest] | 0.00198 |
def fuzz(p, _inplace=0):
"""Transform a layer into a fuzzy layer by replacing some default values by random objects""" # noqa: E501
if not _inplace:
p = p.copy()
q = p
while not isinstance(q, NoPayload):
for f in q.fields_desc:
if isinstance(f, PacketListField):
for r in getattr(q, f.name):
print("fuzzing", repr(r))
fuzz(r, _inplace=1)
elif f.default is not None:
if not isinstance(f, ConditionalField) or f._evalcond(q):
rnd = f.randval()
if rnd is not None:
q.default_fields[f.name] = rnd
q = q.payload
return p | 0.001389 |
def get(key, default=-1):
"""Backport support for original codes."""
if isinstance(key, int):
return Checksum(key)
if key not in Checksum._member_map_:
extend_enum(Checksum, key, default)
return Checksum[key] | 0.007576 |
def _ReadPresetsFromFileObject(self, file_object):
"""Reads parser and parser plugin presets from a file-like object.
Args:
file_object (file): file-like object containing the parser and parser
plugin presets definitions.
Yields:
ParserPreset: a parser preset.
Raises:
MalformedPresetError: if one or more plugin preset definitions are
malformed.
"""
yaml_generator = yaml.safe_load_all(file_object)
last_preset_definition = None
for yaml_definition in yaml_generator:
try:
preset_definition = self._ReadParserPresetValues(yaml_definition)
except errors.MalformedPresetError as exception:
error_location = 'At start'
if last_preset_definition:
error_location = 'After: {0:s}'.format(last_preset_definition.name)
raise errors.MalformedPresetError(
'{0:s} {1!s}'.format(error_location, exception))
yield preset_definition
last_preset_definition = preset_definition | 0.005906 |
def onMessage(self, payload, isBinary):
"""
Send the payload onto the {slack.[payload['type]'} channel.
The message is transalated from IDs to human-readable identifiers.
Note: The slack API only sends JSON, isBinary will always be false.
"""
msg = self.translate(unpack(payload))
if 'type' in msg:
channel_name = 'slack.{}'.format(msg['type'])
print('Sending on {}'.format(channel_name))
channels.Channel(channel_name).send({'text': pack(msg)}) | 0.003717 |
def amen_mv(A, x, tol, y=None, z=None, nswp=20, kickrank=4,
kickrank2=0, verb=True, init_qr=True, renorm='direct', fkick=False):
'''
Approximate the matrix-by-vector via the AMEn iteration
[y,z]=amen_mv(A, x, tol, varargin)
Attempts to approximate the y = A*x
with accuracy TOL using the AMEn+ALS iteration.
Matrix A has to be given in the TT-format, right-hand side x should be
given in the TT-format also.
Options are provided in form
'PropertyName1',PropertyValue1,'PropertyName2',PropertyValue2 and so
on. The parameters are set to default (in brackets in the following)
The list of option names and default values are:
o y0 - initial approximation to Ax [rand rank-2]
o nswp - maximal number of sweeps [20]
o verb - verbosity level, 0-silent, 1-sweep info, 2-block info [1]
o kickrank - compression rank of the error,
i.e. enrichment size [3]
o init_qr - perform QR of the input (save some time in ts, etc) [true]
o renorm - Orthog. and truncation methods: direct (svd,qr) or gram
(apply svd to the gram matrix, faster for m>>n) [direct]
o fkick - Perform solution enrichment during forward sweeps [false]
(rather questionable yet; false makes error higher, but "better
structured": it does not explode in e.g. subsequent matvecs)
o z0 - initial approximation to the error Ax-y [rand rank-kickrank]
********
For description of adaptive ALS please see
Sergey V. Dolgov, Dmitry V. Savostyanov,
Alternating minimal energy methods for linear systems in higher dimensions.
Part I: SPD systems, http://arxiv.org/abs/1301.6068,
Part II: Faster algorithm and application to nonsymmetric systems, http://arxiv.org/abs/1304.1222
Use {sergey.v.dolgov, dmitry.savostyanov}@gmail.com for feedback
********
'''
if renorm is 'gram':
print("Not implemented yet. Renorm is switched to 'direct'")
renorm = 'direct'
if isinstance(x, _tt.vector):
d = x.d
m = x.n
rx = x.r
x = _tt.vector.to_list(x)
vectype = 1 # tt_tensor
elif isinstance(x, list):
d = len(x)
m = _np.zeros(d)
rx = _np.ones(d + 1, dtype=_np.int32)
for i in xrange(d):
[_, m[i], rx[i + 1]] = x[i].shape
vectype = 0 # cell
else:
raise Exception('x: use tt.tensor or list of cores as numpy.arrays')
if isinstance(A, _tt.matrix):
n = A.n
ra = A.tt.r
A = _tt.matrix.to_list(A)
# prepare A for fast ALS-mv
for i in xrange(d):
A[i] = _reshape(A[i], (ra[i] * n[i], m[i] * ra[i + 1]))
atype = 1 # tt_matrix
# Alternative: A is a cell of cell: sparse canonical format
elif isinstance(A, list):
n = _np.zeros(d)
for i in xrange(d):
n[i] = A[i][0].shape[0]
ra = len(A[0])
atype = 0 # cell
else:
raise Exception('A: use tt.matrix or list of cores as numpy.arrays')
if y is None:
y = _tt.rand(n, d, 2)
y = _tt.vector.to_list(y)
else:
if isinstance(y, _tt.vector):
y = _tt.vector.to_list(y)
ry = _np.ones(d + 1, dtype=_np.int32)
for i in range(d):
ry[i + 1] = y[i].shape[2]
if (kickrank + kickrank2 > 0):
if z is None:
z = _tt.rand(n, d, kickrank + kickrank2)
rz = z.r
z = _tt.vector.to_list(z)
else:
if isinstance(z, _tt.vector):
z = _tt.vector.to_list(z)
rz = _np.ones(d + 1, dtype=_np.int32)
for i in range(d):
rz[i + 1] = z[i].shape[2]
phizax = [None] * (d + 1) # cell(d+1,1);
if (atype == 1):
phizax[0] = _np.ones((1, 1, 1)) # 1
phizax[d] = _np.ones((1, 1, 1)) # 1
else:
phizax[0] = _np.ones((1, ra)) # 33
phizax[d] = _np.ones((1, ra))
phizy = [None] * (d + 1)
phizy[0] = _np.ones((1)) # , 1))
phizy[d] = _np.ones((1)) # , 1))
phiyax = [None] * (d + 1)
if (atype == 1):
phiyax[0] = _np.ones((1, 1, 1)) # 1
phiyax[d] = _np.ones((1, 1, 1)) # 1
else:
phiyax[0] = _np.ones((1, ra)) # 3
phiyax[d] = _np.ones((1, ra))
nrms = _np.ones(d)
# Initial ort
for i in range(d - 1):
if init_qr:
cr = _reshape(y[i], (ry[i] * n[i], ry[i + 1]))
if (renorm is 'gram') and (ry[i] * n[i] > 5 * ry[i + 1]):
[cr, s, R] = _svdgram(cr)
else:
[cr, R] = _np.linalg.qr(cr)
nrmr = _np.linalg.norm(R) # , 'fro')
if (nrmr > 0):
R = R / nrmr
cr2 = _reshape(y[i + 1], (ry[i + 1], n[i + 1] * ry[i + 2]))
cr2 = _np.dot(R, cr2)
ry[i + 1] = cr.shape[1]
y[i] = _reshape(cr, (ry[i], n[i], ry[i + 1]))
y[i + 1] = _reshape(cr2, (ry[i + 1], n[i + 1], ry[i + 2]))
[phiyax[i + 1], nrms[i]
] = _compute_next_Phi(phiyax[i], y[i], x[i], 'lr', A[i])
if (kickrank + kickrank2 > 0):
cr = _reshape(z[i], (rz[i] * n[i], rz[i + 1]))
if (renorm == 'gram') and (rz[i] * n[i] > 5 * rz[i + 1]):
[cr, s, R] = _svdgram(cr)
else:
[cr, R] = _np.linalg.qr(cr)
nrmr = _np.linalg.norm(R) # , 'fro')
if (nrmr > 0):
R = R / nrmr
cr2 = _reshape(z[i + 1], (rz[i + 1], n[i + 1] * rz[i + 2]))
cr2 = _np.dot(R, cr2)
rz[i + 1] = cr.shape[1]
z[i] = _reshape(cr, (rz[i], n[i], rz[i + 1]))
z[i + 1] = _reshape(cr2, (rz[i + 1], n[i + 1], rz[i + 2]))
phizax[
i +
1] = _compute_next_Phi(
phizax[i],
z[i],
x[i],
'lr',
A[i],
nrms[i],
return_norm=False)
phizy[
i +
1] = _compute_next_Phi(
phizy[i],
z[i],
y[i],
'lr',
return_norm=False)
i = d - 1
direct = -1
swp = 1
max_dx = 0
while swp <= nswp:
# Project the MatVec generating vector
crx = _reshape(x[i], (rx[i] * m[i] * rx[i + 1], 1))
cry = _bfun3(phiyax[i], A[i], phiyax[i + 1], crx)
nrms[i] = _np.linalg.norm(cry) # , 'fro')
# The main goal is to keep y[i] of norm 1
if (nrms[i] > 0):
cry = cry / nrms[i]
else:
nrms[i] = 1
y[i] = _reshape(y[i], (ry[i] * n[i] * ry[i + 1], 1))
dx = _np.linalg.norm(cry - y[i])
max_dx = max(max_dx, dx)
# Truncation and enrichment
if ((direct > 0) and (i < d - 1)): # ?? i<d
cry = _reshape(cry, (ry[i] * n[i], ry[i + 1]))
if (renorm == 'gram'):
[u, s, v] = _svdgram(cry, tol / d**0.5)
v = v.T
r = u.shape[1]
else:
[u, s, vt] = _np.linalg.svd(cry, full_matrices=False)
#s = diag(s)
r = _my_chop2(s, tol * _np.linalg.norm(s) / d**0.5)
u = u[:, :r]
# ????? s - matrix or vector
v = _np.dot(_tconj(vt[:r, :]), _np.diag(s[:r]))
# Prepare enrichment, if needed
if (kickrank + kickrank2 > 0):
cry = _np.dot(u, v.T)
cry = _reshape(cry, (ry[i] * n[i], ry[i + 1]))
# For updating z
crz = _bfun3(phizax[i], A[i], phizax[i + 1], crx)
crz = _reshape(crz, (rz[i] * n[i], rz[i + 1]))
ys = _np.dot(cry, phizy[i + 1])
yz = _reshape(ys, (ry[i], n[i] * rz[i + 1]))
yz = _np.dot(phizy[i], yz)
yz = _reshape(yz, (rz[i] * n[i], rz[i + 1]))
crz = crz / nrms[i] - yz
nrmz = _np.linalg.norm(crz) # , 'fro')
if (kickrank2 > 0):
[crz, _, _] = _np.linalg.svd(crz, full_matrices=False)
crz = crz[:, : min(crz.shape[1], kickrank)]
crz = _np.hstack(
(crz, _np.random.randn(
rz[i] * n[i], kickrank2)))
# For adding into solution
if fkick:
crs = _bfun3(phiyax[i], A[i], phizax[i + 1], crx)
crs = _reshape(crs, (ry[i] * n[i], rz[i + 1]))
crs = crs / nrms[i] - ys
u = _np.hstack((u, crs))
if (renorm == 'gram') and (
ry[i] * n[i] > 5 * (ry[i + 1] + rz[i + 1])):
[u, s, R] = _svdgram(u)
else:
[u, R] = _np.linalg.qr(u)
v = _np.hstack((v, _np.zeros((ry[i + 1], rz[i + 1]))))
v = _np.dot(v, R.T)
r = u.shape[1]
y[i] = _reshape(u, (ry[i], n[i], r))
cr2 = _reshape(y[i + 1], (ry[i + 1], n[i + 1] * ry[i + 2]))
v = _reshape(v, (ry[i + 1], r))
cr2 = _np.dot(v.T, cr2)
y[i + 1] = _reshape(cr2, (r, n[i + 1], ry[i + 2]))
ry[i + 1] = r
[phiyax[i + 1], nrms[i]
] = _compute_next_Phi(phiyax[i], y[i], x[i], 'lr', A[i])
if (kickrank + kickrank2 > 0):
if (renorm == 'gram') and (rz[i] * n[i] > 5 * rz[i + 1]):
[crz, s, R] = _svdgram(crz)
else:
[crz, R] = _np.linalg.qr(crz)
rz[i + 1] = crz.shape[1]
z[i] = _reshape(crz, (rz[i], n[i], rz[i + 1]))
# z[i+1] will be recomputed from scratch in the next step
phizax[
i +
1] = _compute_next_Phi(
phizax[i],
z[i],
x[i],
'lr',
A[i],
nrms[i],
return_norm=False)
phizy[
i +
1] = _compute_next_Phi(
phizy[i],
z[i],
y[i],
'lr',
return_norm=False)
elif ((direct < 0) and (i > 0)):
cry = _reshape(cry, (ry[i], n[i] * ry[i + 1]))
if (renorm == 'gram'):
[v, s, u] = _svdgram(cry.T, tol / d**0.5)
u = u.T
r = v.shape[1]
else:
#[v, s, u] = _np.linalg.svd(cry.T, full_matrices=False)
[u, s, vt] = _np.linalg.svd(cry, full_matrices=False)
#s = diag(s);
r = _my_chop2(s, tol * _np.linalg.norm(s) / d**0.5)
v = _tconj(vt[:r, :])
#v = vt[:r, :]
#v = _np.dot(v[:, :r], _np.diag(s[:r]))
u = _np.dot(u[:, :r], _np.diag(s[:r])) # ??????????????????
# Prepare enrichment, if needed
if (kickrank + kickrank2 > 0):
cry = _np.dot(u, v.T) # .T)
cry = _reshape(cry, (ry[i], n[i] * ry[i + 1]))
# For updating z
crz = _bfun3(phizax[i], A[i], phizax[i + 1], crx)
crz = _reshape(crz, (rz[i], n[i] * rz[i + 1]))
ys = _np.dot(phizy[i], cry)
yz = _reshape(ys, (rz[i] * n[i], ry[i + 1]))
yz = _np.dot(yz, phizy[i + 1])
yz = _reshape(yz, (rz[i], n[i] * rz[i + 1]))
crz = crz / nrms[i] - yz
nrmz = _np.linalg.norm(crz) # , 'fro')
if (kickrank2 > 0):
[_, _, crz] = _np.linalg.svd(crz, full_matrices=False)
crz = crz[:, : min(crz.shape[1], kickrank)]
crz = _tconj(crz)
crz = _np.vstack(
(crz, _np.random.randn(kickrank2, n[i] * rz[i + 1])))
# For adding into solution
crs = _bfun3(phizax[i], A[i], phiyax[i + 1], crx)
crs = _reshape(crs, (rz[i], n[i] * ry[i + 1]))
crs = crs / nrms[i] - ys
v = _np.hstack((v, crs.T)) # .T
#v = v.T
if (renorm == 'gram') and (
n[i] * ry[i + 1] > 5 * (ry[i] + rz[i])):
[v, s, R] = _svdgram(v)
else:
[v, R] = _np.linalg.qr(v)
u = _np.hstack((u, _np.zeros((ry[i], rz[i]))))
u = _np.dot(u, R.T)
r = v.shape[1]
cr2 = _reshape(y[i - 1], (ry[i - 1] * n[i - 1], ry[i]))
cr2 = _np.dot(cr2, u)
y[i - 1] = _reshape(cr2, (ry[i - 1], n[i - 1], r))
y[i] = _reshape(v.T, (r, n[i], ry[i + 1]))
ry[i] = r
[phiyax[i], nrms[i]] = _compute_next_Phi(
phiyax[i + 1], y[i], x[i], 'rl', A[i])
if (kickrank + kickrank2 > 0):
if (renorm == 'gram') and (n[i] * rz[i + 1] > 5 * rz[i]):
[crz, s, R] = _svdgram(crz.T)
else:
[crz, R] = _np.linalg.qr(crz.T)
rz[i] = crz.shape[1]
z[i] = _reshape(crz.T, (rz[i], n[i], rz[i + 1]))
# don't update z[i-1], it will be recomputed from scratch
phizax[i] = _compute_next_Phi(
phizax[
i + 1],
z[i],
x[i],
'rl',
A[i],
nrms[i],
return_norm=False)
phizy[i] = _compute_next_Phi(
phizy[i + 1], z[i], y[i], 'rl', return_norm=False)
if (verb > 1):
print('amen-mv: swp=[%d,%d], dx=%.3e, r=%d, |y|=%.3e, |z|=%.3e' % (swp, i, dx, r, _np.linalg.norm(cry), nrmz))
# Stopping or reversing
if ((direct > 0) and (i == d - 1)) or ((direct < 0) and (i == 0)):
if (verb > 0):
print('amen-mv: swp=%d{%d}, max_dx=%.3e, max_r=%d' % (swp, (1 - direct) // 2, max_dx, max(ry)))
if ((max_dx < tol) or (swp == nswp)) and (direct > 0):
break
else:
# We are at the terminal block
y[i] = _reshape(cry, (ry[i], n[i], ry[i + 1]))
if (direct > 0):
swp = swp + 1
max_dx = 0
direct = -direct
else:
i = i + direct
# if (direct>0)
y[d - 1] = _reshape(cry, (ry[d - 1], n[d - 1], ry[d]))
# else
# y{1} = reshape(cry, ry(1), n(1), ry(2));
# end;
# Distribute norms equally...
nrms = _np.exp(sum(_np.log(nrms)) / d)
# ... and plug them into y
for i in xrange(d):
y[i] = _np.dot(y[i], nrms)
if (vectype == 1):
y = _tt.vector.from_list(y)
if kickrank == 0:
z = None
else:
z = _tt.vector.from_list(z)
return y, z | 0.000847 |
def update_frame(self, key, ranges=None, plot=None):
"""
Updates an existing plot with data corresponding
to the key.
"""
element = self._get_frame(key)
text, _, _ = self.get_data(element, ranges, {})
self.handles['plot'].text = text | 0.00692 |
def _set_mpls_reopt_lsp(self, v, load=False):
"""
Setter method for mpls_reopt_lsp, mapped from YANG variable /brocade_mpls_rpc/mpls_reopt_lsp (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_mpls_reopt_lsp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mpls_reopt_lsp() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=mpls_reopt_lsp.mpls_reopt_lsp, is_leaf=True, yang_name="mpls-reopt-lsp", rest_name="mpls-reopt-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'mplsReoptimize'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mpls_reopt_lsp must be of a type compatible with rpc""",
'defined-type': "rpc",
'generated-type': """YANGDynClass(base=mpls_reopt_lsp.mpls_reopt_lsp, is_leaf=True, yang_name="mpls-reopt-lsp", rest_name="mpls-reopt-lsp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'mplsReoptimize'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)""",
})
self.__mpls_reopt_lsp = t
if hasattr(self, '_set'):
self._set() | 0.00615 |
def process_results(self, results=None, **value):
"""take results list of all events and return first dict"""
for res in results:
if 'mask' in res:
res['mask'] = utils.IrcString(res['mask'])
value['success'] = res.pop('retcode', None) != '486'
value.update(res)
return value | 0.005714 |
def find_funcs_called_with_kwargs(sourcecode, target_kwargs_name='kwargs'):
r"""
Finds functions that are called with the keyword `kwargs` variable
CommandLine:
python3 -m utool.util_inspect find_funcs_called_with_kwargs
Example:
>>> # ENABLE_DOCTEST
>>> import utool as ut
>>> sourcecode = ut.codeblock(
'''
x, y = list(zip(*ut.ichunks(data, 2)))
somecall(arg1, arg2, arg3=4, **kwargs)
import sys
sys.badcall(**kwargs)
def foo():
bar(**kwargs)
ut.holymoly(**kwargs)
baz()
def biz(**kwargs):
foo2(**kwargs)
''')
>>> child_funcnamess = ut.find_funcs_called_with_kwargs(sourcecode)
>>> print('child_funcnamess = %r' % (child_funcnamess,))
>>> assert 'foo2' not in child_funcnamess, 'foo2 should not be found'
>>> assert 'bar' in child_funcnamess, 'bar should be found'
"""
import ast
sourcecode = 'from __future__ import print_function\n' + sourcecode
pt = ast.parse(sourcecode)
child_funcnamess = []
debug = False or VERYVERB_INSPECT
if debug:
print('\nInput:')
print('target_kwargs_name = %r' % (target_kwargs_name,))
print('\nSource:')
print(sourcecode)
import astor
print('\nParse:')
print(astor.dump(pt))
class KwargParseVisitor(ast.NodeVisitor):
"""
TODO: understand ut.update_existing and dict update
ie, know when kwargs is passed to these functions and
then look assume the object that was updated is a dictionary
and check wherever that is passed to kwargs as well.
"""
def visit_FunctionDef(self, node):
if debug:
print('\nVISIT FunctionDef node = %r' % (node,))
print('node.args.kwarg = %r' % (node.args.kwarg,))
if six.PY2:
kwarg_name = node.args.kwarg
else:
if node.args.kwarg is None:
kwarg_name = None
else:
kwarg_name = node.args.kwarg.arg
#import utool as ut
#ut.embed()
if kwarg_name != target_kwargs_name:
# target kwargs is still in scope
ast.NodeVisitor.generic_visit(self, node)
def visit_Call(self, node):
if debug:
print('\nVISIT Call node = %r' % (node,))
#print(ut.repr4(node.__dict__,))
if isinstance(node.func, ast.Attribute):
try:
funcname = node.func.value.id + '.' + node.func.attr
except AttributeError:
funcname = None
elif isinstance(node.func, ast.Name):
funcname = node.func.id
else:
raise NotImplementedError(
'do not know how to parse: node.func = %r' % (node.func,))
if six.PY2:
kwargs = node.kwargs
kwargs_name = None if kwargs is None else kwargs.id
if funcname is not None and kwargs_name == target_kwargs_name:
child_funcnamess.append(funcname)
if debug:
print('funcname = %r' % (funcname,))
print('kwargs_name = %r' % (kwargs_name,))
else:
if node.keywords:
for kwargs in node.keywords:
if kwargs.arg is None:
if hasattr(kwargs.value, 'id'):
kwargs_name = kwargs.value.id
if funcname is not None and kwargs_name == target_kwargs_name:
child_funcnamess.append(funcname)
if debug:
print('funcname = %r' % (funcname,))
print('kwargs_name = %r' % (kwargs_name,))
ast.NodeVisitor.generic_visit(self, node)
try:
KwargParseVisitor().visit(pt)
except Exception:
raise
pass
#import utool as ut
#if ut.SUPER_STRICT:
# raise
return child_funcnamess | 0.001597 |
def start_update(self, layer_id):
"""
A shortcut to create a new version and start importing it.
Effectively the same as :py:meth:`koordinates.layers.LayerManager.create_draft` followed by :py:meth:`koordinates.layers.LayerManager.start_import`.
"""
target_url = self.client.get_url('LAYER', 'POST', 'update', {'layer_id': layer_id})
r = self.client.request('POST', target_url, json={})
return self.parent.create_from_result(r.json()) | 0.008163 |
def register():
"""Uses the new style of registration based on GitHub Pelican issue #314."""
signals.initialized.connect(initialized)
try:
signals.content_object_init.connect(detect_content)
signals.all_generators_finalized.connect(detect_images_and_galleries)
signals.article_writer_finalized.connect(resize_photos)
except Exception as e:
logger.exception('Plugin failed to execute: {}'.format(pprint.pformat(e))) | 0.006494 |
def destripe_plus(inputfile, suffix='strp', stat='pmode1', maxiter=15,
sigrej=2.0, lower=None, upper=None, binwidth=0.3,
scimask1=None, scimask2=None,
dqbits=None, rpt_clean=0, atol=0.01,
cte_correct=True, clobber=False, verbose=True):
r"""Calibrate post-SM4 ACS/WFC exposure(s) and use
standalone :ref:`acsdestripe`.
This takes a RAW image and generates a FLT file containing
its calibrated and destriped counterpart.
If CTE correction is performed, FLC will also be present.
Parameters
----------
inputfile : str or list of str
Input filenames in one of these formats:
* a Python list of filenames
* a partial filename with wildcards ('\*raw.fits')
* filename of an ASN table ('j12345670_asn.fits')
* an at-file (``@input``)
suffix : str
The string to use to add to each input file name to
indicate an output product of ``acs_destripe``.
This only affects the intermediate output file that will
be automatically renamed to ``*blv_tmp.fits`` during the processing.
stat : { 'pmode1', 'pmode2', 'mean', 'mode', 'median', 'midpt' } (Default = 'pmode1')
Specifies the statistics to be used for computation of the
background in image rows:
* 'pmode1' - SEXTRACTOR-like mode estimate based on a
modified `Pearson's rule <http://en.wikipedia.org/wiki/Nonparametric_skew#Pearson.27s_rule>`_:
``2.5*median-1.5*mean``;
* 'pmode2' - mode estimate based on
`Pearson's rule <http://en.wikipedia.org/wiki/Nonparametric_skew#Pearson.27s_rule>`_:
``3*median-2*mean``;
* 'mean' - the mean of the distribution of the "good" pixels (after
clipping, masking, etc.);
* 'mode' - the mode of the distribution of the "good" pixels;
* 'median' - the median of the distribution of the "good" pixels;
* 'midpt' - estimate of the median of the distribution of the "good"
pixels based on an algorithm similar to IRAF's `imagestats` task
(``CDF(midpt)=1/2``).
.. note::
The midpoint and mode are computed in two passes through the
image. In the first pass the standard deviation of the pixels
is calculated and used with the *binwidth* parameter to compute
the resolution of the data histogram. The midpoint is estimated
by integrating the histogram and computing by interpolation
the data value at which exactly half the pixels are below that
data value and half are above it. The mode is computed by
locating the maximum of the data histogram and fitting the peak
by parabolic interpolation.
maxiter : int
This parameter controls the maximum number of iterations
to perform when computing the statistics used to compute the
row-by-row corrections.
sigrej : float
This parameters sets the sigma level for the rejection applied
during each iteration of statistics computations for the
row-by-row corrections.
lower : float, None (Default = None)
Lower limit of usable pixel values for computing the background.
This value should be specified in the units of the input image(s).
upper : float, None (Default = None)
Upper limit of usable pixel values for computing the background.
This value should be specified in the units of the input image(s).
binwidth : float (Default = 0.1)
Histogram's bin width, in sigma units, used to sample the
distribution of pixel brightness values in order to compute the
background statistics. This parameter is aplicable *only* to *stat*
parameter values of `'mode'` or `'midpt'`.
clobber : bool
Specify whether or not to 'clobber' (delete then replace)
previously generated products with the same names.
scimask1 : str or list of str
Mask images for *calibrated* ``SCI,1``, one for each input file.
Pixels with zero values will be masked out, in addition to clipping.
scimask2 : str or list of str
Mask images for *calibrated* ``SCI,2``, one for each input file.
Pixels with zero values will be masked out, in addition to clipping.
This is not used for subarrays.
dqbits : int, str, None (Default = None)
Integer sum of all the DQ bit values from the input image's DQ array
that should be considered "good" when building masks for de-striping
computations. For example, if pixels in the DQ array can be
combinations of 1, 2, 4, and 8 flags and one wants to consider
DQ "defects" having flags 2 and 4 as being acceptable for de-striping
computations, then `dqbits` should be set to 2+4=6. Then a DQ pixel
having values 2,4, or 6 will be considered a good pixel, while a DQ
pixel with a value, e.g., 1+2=3, 4+8=12, etc. will be flagged
as a "bad" pixel.
Alternatively, one can enter a comma- or '+'-separated list of
integer bit flags that should be added to obtain the final
"good" bits. For example, both ``4,8`` and ``4+8`` are equivalent to
setting `dqbits` to 12.
| Set `dqbits` to 0 to make *all* non-zero pixels in the DQ
mask to be considered "bad" pixels, and the corresponding image
pixels not to be used for de-striping computations.
| Default value (`None`) will turn off the use of image's DQ array
for de-striping computations.
| In order to reverse the meaning of the `dqbits`
parameter from indicating values of the "good" DQ flags
to indicating the "bad" DQ flags, prepend '~' to the string
value. For example, in order not to use pixels with
DQ flags 4 and 8 for sky computations and to consider
as "good" all other pixels (regardless of their DQ flag),
set `dqbits` to ``~4+8``, or ``~4,8``. To obtain the
same effect with an `int` input value (except for 0),
enter -(4+8+1)=-9. Following this convention,
a `dqbits` string value of ``'~0'`` would be equivalent to
setting ``dqbits=None``.
.. note::
DQ masks (if used), *will be* combined with user masks specified
in the `scimask1` and `scimask2` parameters (if any).
rpt_clean : int
An integer indicating how many *additional* times stripe cleaning
should be performed on the input image. Default = 0.
atol : float, None
The threshold for maximum absolute value of bias stripe correction
below which repeated cleanings can stop. When `atol` is `None`
cleaning will be repeated `rpt_clean` number of times.
Default = 0.01 [e].
cte_correct : bool
Perform CTE correction.
verbose : bool
Print informational messages. Default = True.
Raises
------
ImportError
``stsci.tools`` not found.
IOError
Input file does not exist.
ValueError
Invalid header values or CALACS version.
"""
# Optional package dependencies
from stsci.tools import parseinput
try:
from stsci.tools.bitmask import interpret_bit_flags
except ImportError:
from stsci.tools.bitmask import (
interpret_bits_value as interpret_bit_flags
)
# process input file(s) and if we have multiple input files - recursively
# call acs_destripe_plus for each input image:
flist = parseinput.parseinput(inputfile)[0]
if isinstance(scimask1, str):
mlist1 = parseinput.parseinput(scimask1)[0]
elif isinstance(scimask1, np.ndarray):
mlist1 = [scimask1.copy()]
elif scimask1 is None:
mlist1 = []
elif isinstance(scimask1, list):
mlist1 = []
for m in scimask1:
if isinstance(m, np.ndarray):
mlist1.append(m.copy())
elif isinstance(m, str):
mlist1 += parseinput.parseinput(m)[0]
else:
raise TypeError("'scimask1' must be a list of str or "
"numpy.ndarray values.")
else:
raise TypeError("'scimask1' must be either a str, or a "
"numpy.ndarray, or a list of the two type of "
"values.")
if isinstance(scimask2, str):
mlist2 = parseinput.parseinput(scimask2)[0]
elif isinstance(scimask2, np.ndarray):
mlist2 = [scimask2.copy()]
elif scimask2 is None:
mlist2 = []
elif isinstance(scimask2, list):
mlist2 = []
for m in scimask2:
if isinstance(m, np.ndarray):
mlist2.append(m.copy())
elif isinstance(m, str):
mlist2 += parseinput.parseinput(m)[0]
else:
raise TypeError("'scimask2' must be a list of str or "
"numpy.ndarray values.")
else:
raise TypeError("'scimask2' must be either a str, or a "
"numpy.ndarray, or a list of the two type of "
"values.")
n_input = len(flist)
n_mask1 = len(mlist1)
n_mask2 = len(mlist2)
if n_input == 0:
raise ValueError(
'No input file(s) provided or the file(s) do not exist')
if n_mask1 == 0:
mlist1 = [None] * n_input
elif n_mask1 != n_input:
raise ValueError('Insufficient masks for [SCI,1]')
if n_mask2 == 0:
mlist2 = [None] * n_input
elif n_mask2 != n_input:
raise ValueError('Insufficient masks for [SCI,2]')
if n_input > 1:
for img, mf1, mf2 in zip(flist, mlist1, mlist2):
destripe_plus(
inputfile=img, suffix=suffix, stat=stat,
lower=lower, upper=upper, binwidth=binwidth,
maxiter=maxiter, sigrej=sigrej,
scimask1=scimask1, scimask2=scimask2, dqbits=dqbits,
cte_correct=cte_correct, clobber=clobber, verbose=verbose
)
return
inputfile = flist[0]
scimask1 = mlist1[0]
scimask2 = mlist2[0]
# verify that the RAW image exists in cwd
cwddir = os.getcwd()
if not os.path.exists(os.path.join(cwddir, inputfile)):
raise IOError("{0} does not exist.".format(inputfile))
# get image's primary header:
header = fits.getheader(inputfile)
# verify masks defined (or not) simultaneously:
if (header['CCDAMP'] == 'ABCD' and
((scimask1 is not None and scimask2 is None) or
(scimask1 is None and scimask2 is not None))):
raise ValueError("Both 'scimask1' and 'scimask2' must be specified "
"or not specified together.")
calacs_str = subprocess.check_output(['calacs.e', '--version']).split()[0]
calacs_ver = [int(x) for x in calacs_str.decode().split('.')]
if calacs_ver < [8, 3, 1]:
raise ValueError('CALACS {0} is incomptible. '
'Must be 8.3.1 or later.'.format(calacs_str))
# check date for post-SM4 and if supported subarray or full frame
is_subarray = False
ctecorr = header['PCTECORR']
aperture = header['APERTURE']
detector = header['DETECTOR']
date_obs = Time(header['DATE-OBS'])
# intermediate filenames
blvtmp_name = inputfile.replace('raw', 'blv_tmp')
blctmp_name = inputfile.replace('raw', 'blc_tmp')
# output filenames
tra_name = inputfile.replace('_raw.fits', '.tra')
flt_name = inputfile.replace('raw', 'flt')
flc_name = inputfile.replace('raw', 'flc')
if detector != 'WFC':
raise ValueError("{0} is not a WFC image, please check the 'DETECTOR'"
" keyword.".format(inputfile))
if date_obs < SM4_DATE:
raise ValueError(
"{0} is a pre-SM4 image.".format(inputfile))
if header['SUBARRAY'] and cte_correct:
if aperture in SUBARRAY_LIST:
is_subarray = True
else:
LOG.warning('Using non-supported subarray, '
'turning CTE correction off')
cte_correct = False
# delete files from previous CALACS runs
if clobber:
for tmpfilename in [blvtmp_name, blctmp_name, flt_name, flc_name,
tra_name]:
if os.path.exists(tmpfilename):
os.remove(tmpfilename)
# run ACSCCD on RAW
acsccd.acsccd(inputfile)
# modify user mask with DQ masks if requested
dqbits = interpret_bit_flags(dqbits)
if dqbits is not None:
# save 'tra' file in memory to trick the log file
# not to save first acs2d log as this is done only
# for the purpose of obtaining DQ masks.
# WISH: it would have been nice is there was an easy way of obtaining
# just the DQ masks as if data were calibrated but without
# having to recalibrate them with acs2d.
if os.path.isfile(tra_name):
with open(tra_name) as fh:
tra_lines = fh.readlines()
else:
tra_lines = None
# apply flats, etc.
acs2d.acs2d(blvtmp_name, verbose=False, quiet=True)
# extract DQ arrays from the FLT image:
dq1, dq2 = _read_DQ_arrays(flt_name)
mask1 = _get_mask(scimask1, 1)
scimask1 = acs_destripe._mergeUserMaskAndDQ(dq1, mask1, dqbits)
mask2 = _get_mask(scimask2, 2)
if dq2 is not None:
scimask2 = acs_destripe._mergeUserMaskAndDQ(dq2, mask2, dqbits)
elif mask2 is None:
scimask2 = None
# reconstruct trailer file:
if tra_lines is not None:
with open(tra_name, mode='w') as fh:
fh.writelines(tra_lines)
# delete temporary FLT image:
if os.path.isfile(flt_name):
os.remove(flt_name)
# execute destriping (post-SM4 data only)
acs_destripe.clean(
blvtmp_name, suffix, stat=stat, maxiter=maxiter, sigrej=sigrej,
lower=lower, upper=upper, binwidth=binwidth,
mask1=scimask1, mask2=scimask2, dqbits=dqbits,
rpt_clean=rpt_clean, atol=atol, clobber=clobber, verbose=verbose)
blvtmpsfx = 'blv_tmp_{0}'.format(suffix)
os.rename(inputfile.replace('raw', blvtmpsfx), blvtmp_name)
# update subarray header
if is_subarray and cte_correct:
fits.setval(blvtmp_name, 'PCTECORR', value='PERFORM')
ctecorr = 'PERFORM'
# perform CTE correction on destriped image
if cte_correct:
if ctecorr == 'PERFORM':
acscte.acscte(blvtmp_name)
else:
LOG.warning(
"PCTECORR={0}, cannot run CTE correction".format(ctecorr))
cte_correct = False
# run ACS2D to get FLT and FLC images
acs2d.acs2d(blvtmp_name)
if cte_correct:
acs2d.acs2d(blctmp_name)
# delete intermediate files
os.remove(blvtmp_name)
if cte_correct and os.path.isfile(blctmp_name):
os.remove(blctmp_name)
info_str = 'Done.\nFLT: {0}\n'.format(flt_name)
if cte_correct:
info_str += 'FLC: {0}\n'.format(flc_name)
LOG.info(info_str) | 0.000261 |
def delete(self, request, bot_id, id, format=None):
"""
Delete existing Messenger Bot
---
responseMessages:
- code: 401
message: Not authenticated
"""
return super(MessengerBotDetail, self).delete(request, bot_id, id, format) | 0.010033 |
def isexception(obj):
"""Given an object, return a boolean indicating whether it is an instance
or subclass of :py:class:`Exception`.
"""
if isinstance(obj, Exception):
return True
if isclass(obj) and issubclass(obj, Exception):
return True
return False | 0.003413 |
def get_bit_series(self, bits=None):
"""Get the `StateTimeSeries` for each bit of this `StateVector`.
Parameters
----------
bits : `list`, optional
a list of bit indices or bit names, defaults to all bits
Returns
-------
bitseries : `StateTimeSeriesDict`
a `dict` of `StateTimeSeries`, one for each given bit
"""
if bits is None:
bits = [b for b in self.bits if b not in {None, ''}]
bindex = []
for bit in bits:
try:
bindex.append((self.bits.index(bit), bit))
except (IndexError, ValueError) as exc:
exc.args = ('Bit %r not found in StateVector' % bit,)
raise
self._bitseries = StateTimeSeriesDict()
for i, bit in bindex:
self._bitseries[bit] = StateTimeSeries(
self.value >> i & 1, name=bit, epoch=self.x0.value,
channel=self.channel, sample_rate=self.sample_rate)
return self._bitseries | 0.001898 |
def img_box(img, box):
"""
Selects the sub-image inside the given box
:param img: Image to crop from
:param box: Box to crop from. Box can be either Box object or array of [x, y, width, height]
:return: Cropped sub-image from the main image
"""
if isinstance(box, tuple):
box = Box.from_tup(box)
if len(img.shape) == 3:
return img[box.y:box.y + box.height, box.x:box.x + box.width, :]
else:
return img[box.y:box.y + box.height, box.x:box.x + box.width] | 0.003891 |
def simxUnpackFloats(floatsPackedInString):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
b=[]
for i in range(int(len(floatsPackedInString)/4)):
b.append(struct.unpack('<f',floatsPackedInString[4*i:4*(i+1)])[0])
return b | 0.013333 |
def read_units(self, fid):
"""Read units from an acclaim skeleton file stream."""
lin = self.read_line(fid)
while lin[0] != ':':
parts = lin.split()
if parts[0]=='mass':
self.mass = float(parts[1])
elif parts[0]=='length':
self.length = float(parts[1])
elif parts[0]=='angle':
self.angle = parts[1]
lin = self.read_line(fid)
return lin | 0.012146 |
def diffstore(self, dest, *others):
"""
Store the set difference of the current set and one or more
others in a new key.
:param dest: the name of the key to store set difference
:param others: One or more :py:class:`Set` instances
:returns: A :py:class:`Set` referencing ``dest``.
"""
keys = [self.key]
keys.extend([other.key for other in others])
self.database.sdiffstore(dest, keys)
return self.database.Set(dest) | 0.003968 |
def lookup(self, pathogenName, sampleName):
"""
Look up a pathogen name, sample name combination and get its
FASTA/FASTQ file name and unique read count.
This method should be used instead of C{add} in situations where
you want an exception to be raised if a pathogen/sample combination has
not already been passed to C{add}.
@param pathogenName: A C{str} pathogen name.
@param sampleName: A C{str} sample name.
@raise KeyError: If the pathogen name or sample name have not been
seen, either individually or in combination.
@return: A (C{str}, C{int}) tuple retrieved from self._readsFilenames
"""
pathogenIndex = self._pathogens[pathogenName]
sampleIndex = self._samples[sampleName]
return self._readsFilenames[(pathogenIndex, sampleIndex)] | 0.002307 |
def timestamp_insert(sender, frames):
"""
Timestamp the created and modified fields for all documents. This method
should be bound to a frame class like so:
```
MyFrameClass.listen('insert', MyFrameClass.timestamp_insert)
```
"""
for frame in frames:
timestamp = datetime.now(timezone.utc)
frame.created = timestamp
frame.modified = timestamp | 0.006772 |
def is_disconnected(self, node_id):
"""Check whether the node connection has been disconnected or failed.
A disconnected node has either been closed or has failed. Connection
failures are usually transient and can be resumed in the next ready()
call, but there are cases where transient failures need to be caught
and re-acted upon.
Arguments:
node_id (int): the id of the node to check
Returns:
bool: True iff the node exists and is disconnected
"""
conn = self._conns.get(node_id)
if conn is None:
return False
return conn.disconnected() | 0.003003 |
def decktape():
'''Install DeckTape.
DeckTape is a "high-quality PDF exporter for HTML5 presentation
frameworks". It can be used to create PDFs from reveal.js presentations.
More info:
https://github.com/astefanutti/decktape
https://github.com/hakimel/reveal.js/issues/1252#issuecomment-198270915
'''
run('mkdir -p ~/bin/decktape')
if not exists('~/bin/decktape/decktape-1.0.0'):
print_msg('\n## download decktape 1.0.0\n')
run('cd ~/bin/decktape && '
'curl -L https://github.com/astefanutti/decktape/archive/'
'v1.0.0.tar.gz | tar -xz --exclude phantomjs')
run('cd ~/bin/decktape/decktape-1.0.0 && '
'curl -L https://github.com/astefanutti/decktape/releases/'
'download/v1.0.0/phantomjs-linux-x86-64 -o phantomjs')
run('cd ~/bin/decktape/decktape-1.0.0 && '
'chmod +x phantomjs')
run('ln -snf ~/bin/decktape/decktape-1.0.0 ~/bin/decktape/active',
msg='\n## link installed decktape version as active')
print_msg('\nCreate PDF from reveal.js presentation:\n\n '
'# serve presentation:\n '
'cd ~/repos/my_presi/reveal.js/ && npm start\n\n '
'# create pdf in another shell:\n '
'cd ~/bin/decktape/active && \\\n '
'./phantomjs decktape.js --size 1280x800 localhost:8000 '
'~/repos/my_presi/my_presi.pdf') | 0.000688 |
def generate(env):
"""Add Builders and construction variables for swig to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
c_file.suffix['.i'] = swigSuffixEmitter
cxx_file.suffix['.i'] = swigSuffixEmitter
c_file.add_action('.i', SwigAction)
c_file.add_emitter('.i', _swigEmitter)
cxx_file.add_action('.i', SwigAction)
cxx_file.add_emitter('.i', _swigEmitter)
java_file = SCons.Tool.CreateJavaFileBuilder(env)
java_file.suffix['.i'] = swigSuffixEmitter
java_file.add_action('.i', SwigAction)
java_file.add_emitter('.i', _swigEmitter)
if 'SWIG' not in env:
env['SWIG'] = env.Detect(swigs) or swigs[0]
env['SWIGVERSION'] = _get_swig_version(env, env['SWIG'])
env['SWIGFLAGS'] = SCons.Util.CLVar('')
env['SWIGDIRECTORSUFFIX'] = '_wrap.h'
env['SWIGCFILESUFFIX'] = '_wrap$CFILESUFFIX'
env['SWIGCXXFILESUFFIX'] = '_wrap$CXXFILESUFFIX'
env['_SWIGOUTDIR'] = r'${"-outdir \"%s\"" % SWIGOUTDIR}'
env['SWIGPATH'] = []
env['SWIGINCPREFIX'] = '-I'
env['SWIGINCSUFFIX'] = ''
env['_SWIGINCFLAGS'] = '$( ${_concat(SWIGINCPREFIX, SWIGPATH, SWIGINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
env['SWIGCOM'] = '$SWIG -o $TARGET ${_SWIGOUTDIR} ${_SWIGINCFLAGS} $SWIGFLAGS $SOURCES' | 0.008895 |
def show(dataset_uri, overlay_name):
"""
Show the content of a specific overlay.
"""
dataset = dtoolcore.DataSet.from_uri(dataset_uri)
try:
overlay = dataset.get_overlay(overlay_name)
except: # NOQA
click.secho(
"No such overlay: {}".format(overlay_name),
fg="red",
err=True
)
sys.exit(11)
formatted_json = json.dumps(overlay, indent=2)
colorful_json = pygments.highlight(
formatted_json,
pygments.lexers.JsonLexer(),
pygments.formatters.TerminalFormatter())
click.secho(colorful_json, nl=False) | 0.0016 |
def multipublish(self, topic, messages):
"""Publish an iterable of messages to the given topic over http.
:param topic: the topic to publish to
:param messages: iterable of bytestrings to publish
"""
self.send(nsq.multipublish(topic, messages)) | 0.006993 |
def distance_to_point(self, p):
'''Returns the distance from the point to the interval. Zero if the point lies inside the interval.'''
if self.start <= p <= self.end:
return 0
else:
return min(abs(self.start - p), abs(self.end - p)) | 0.010714 |
def get_strain_label(entry, viral=False):
"""Try to extract a strain from an assemly summary entry.
First this checks 'infraspecific_name', then 'isolate', then
it tries to get it from 'organism_name'. If all fails, it
falls back to just returning the assembly accesion number.
"""
def get_strain(entry):
strain = entry['infraspecific_name']
if strain != '':
strain = strain.split('=')[-1]
return strain
strain = entry['isolate']
if strain != '':
return strain
if len(entry['organism_name'].split(' ')) > 2 and not viral:
strain = ' '.join(entry['organism_name'].split(' ')[2:])
return strain
return entry['assembly_accession']
def cleanup(strain):
strain = strain.strip()
strain = strain.replace(' ', '_')
strain = strain.replace(';', '_')
strain = strain.replace('/', '_')
strain = strain.replace('\\', '_')
return strain
return cleanup(get_strain(entry)) | 0.000951 |
def file_envs(self, load=None):
'''
Return environments for all backends for requests from fileclient
'''
if load is None:
load = {}
load.pop('cmd', None)
return self.envs(**load) | 0.008368 |
def __draw_constant_line(self, value_label_style):
"Draw a constant line on the y-axis with the label"
value, label, style = value_label_style
start = self.transform_output_coordinates((0, value))[1]
stop = self.graph_width
path = etree.SubElement(self.graph, 'path', {
'd': 'M 0 %(start)s h%(stop)s' % locals(),
'class': 'constantLine'})
if style:
path.set('style', style)
text = etree.SubElement(self.graph, 'text', {
'x': str(2),
'y': str(start - 2),
'class': 'constantLine'})
text.text = label | 0.030132 |
def squash_dates(obj):
"""squash datetime objects into ISO8601 strings"""
if isinstance(obj, dict):
obj = dict(obj) # don't clobber
for k,v in obj.iteritems():
obj[k] = squash_dates(v)
elif isinstance(obj, (list, tuple)):
obj = [ squash_dates(o) for o in obj ]
elif isinstance(obj, datetime):
obj = obj.strftime(ISO8601)
return obj | 0.012658 |
def _dict_seq_locus(list_c, loci_obj, seq_obj):
"""
return dict with sequences = [ cluster1, cluster2 ...]
"""
seqs = defaultdict(set)
# n = len(list_c.keys())
for c in list_c.values():
for l in c.loci2seq:
[seqs[s].add(c.id) for s in c.loci2seq[l]]
common = [s for s in seqs if len(seqs[s]) > 1]
seqs_in_c = defaultdict(float)
for c in list_c.values():
for l in c.loci2seq:
# total = sum([v for v in loci_obj[l].coverage.values()])
for s in c.loci2seq[l]:
if s in common:
pos = seq_obj[s].pos[l]
# cov = 1.0 * loci_obj[l].coverage[pos] / total
cov = 1.0 * loci_obj[l].coverage[pos]
if seqs_in_c[(s, c.id)] < cov:
seqs_in_c[(s, c.id)] = cov
seqs_in_c = _transform(seqs_in_c)
return seqs_in_c | 0.003304 |
def read_namespaced_pod_preset(self, name, namespace, **kwargs): # noqa: E501
"""read_namespaced_pod_preset # noqa: E501
read the specified PodPreset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_pod_preset(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the PodPreset (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1alpha1PodPreset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_namespaced_pod_preset_with_http_info(name, namespace, **kwargs) # noqa: E501
else:
(data) = self.read_namespaced_pod_preset_with_http_info(name, namespace, **kwargs) # noqa: E501
return data | 0.001409 |
def begin(self):
"""Load variables from checkpoint.
New model variables have the following name foramt:
new_model_scope/old_model_scope/xxx/xxx:0 To find the map of
name to variable, need to strip the new_model_scope and then
match the old_model_scope and remove the suffix :0.
"""
variables_to_restore = tf.contrib.framework.get_variables_to_restore(
include=self._include, exclude=self._exclude)
# remove new_model_scope from variable name prefix
assignment_map = {variable.name[len(self._new_model_scope):]: variable
for variable in variables_to_restore
if variable.name.startswith(self._new_model_scope)}
# remove :0 from variable name suffix
assignment_map = {name.split(":")[0]: variable
for name, variable in six.iteritems(assignment_map)
if name.startswith(self._old_model_scope)}
self._assignment_map = assignment_map
tf.logging.info("restoring %d variables from checkpoint %s"%(
len(assignment_map), self._checkpoint_path))
tf.train.init_from_checkpoint(self._checkpoint_path, self._assignment_map) | 0.001708 |
def isPIDValid(self, pid):
""" Checks if a PID is associated with a running process """
## Slightly copied wholesale from http://stackoverflow.com/questions/568271/how-to-check-if-there-exists-a-process-with-a-given-pid
## Thanks to http://stackoverflow.com/users/1777162/ntrrgc and http://stackoverflow.com/users/234270/speedplane
class ExitCodeProcess(ctypes.Structure):
_fields_ = [('hProcess', ctypes.c_void_p),
('lpExitCode', ctypes.POINTER(ctypes.c_ulong))]
SYNCHRONIZE = 0x100000
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
process = self._kernel32.OpenProcess(SYNCHRONIZE|PROCESS_QUERY_LIMITED_INFORMATION, 0, pid)
if not process:
return False
ec = ExitCodeProcess()
out = self._kernel32.GetExitCodeProcess(process, ctypes.byref(ec))
if not out:
err = self._kernel32.GetLastError()
if self._kernel32.GetLastError() == 5:
# Access is denied.
logging.warning("Access is denied to get pid info.")
self._kernel32.CloseHandle(process)
return False
elif bool(ec.lpExitCode):
# There is an exit code, it quit
self._kernel32.CloseHandle(process)
return False
# No exit code, it's running.
self._kernel32.CloseHandle(process)
return True | 0.005638 |
def loop_through_editors(self, backward=False):
"""
Loops through the editor tabs.
:param backward: Looping backward.
:type backward: bool
:return: Method success.
:rtype: bool
"""
step = not backward and 1 or -1
idx = self.Script_Editor_tabWidget.currentIndex() + step
if idx < 0:
idx = self.Script_Editor_tabWidget.count() - 1
elif idx > self.Script_Editor_tabWidget.count() - 1:
idx = 0
self.Script_Editor_tabWidget.setCurrentIndex(idx)
return True | 0.003442 |
def get_contents(self, el, no_iframe=False):
"""Get contents or contents in reverse."""
if not no_iframe or not self.is_iframe(el):
for content in el.contents:
yield content | 0.009217 |
def start(self):
"""
Starts this QEMU VM.
"""
with (yield from self._execute_lock):
if self.is_running():
# resume the VM if it is paused
yield from self.resume()
return
if self._manager.config.get_section_config("Qemu").getboolean("monitor", True):
try:
info = socket.getaddrinfo(self._monitor_host, 0, socket.AF_UNSPEC, socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
if not info:
raise QemuError("getaddrinfo returns an empty list on {}".format(self._monitor_host))
for res in info:
af, socktype, proto, _, sa = res
# let the OS find an unused port for the Qemu monitor
with socket.socket(af, socktype, proto) as sock:
sock.bind(sa)
self._monitor = sock.getsockname()[1]
except OSError as e:
raise QemuError("Could not find free port for the Qemu monitor: {}".format(e))
# check if there is enough RAM to run
self.check_available_ram(self.ram)
command = yield from self._build_command()
command_string = " ".join(shlex.quote(s) for s in command)
try:
log.info("Starting QEMU with: {}".format(command_string))
self._stdout_file = os.path.join(self.working_dir, "qemu.log")
log.info("logging to {}".format(self._stdout_file))
with open(self._stdout_file, "w", encoding="utf-8") as fd:
fd.write("Start QEMU with {}\n\nExecution log:\n".format(command_string))
self.command_line = ' '.join(command)
self._process = yield from asyncio.create_subprocess_exec(*command,
stdout=fd,
stderr=subprocess.STDOUT,
cwd=self.working_dir)
yield from self._start_ubridge()
for adapter_number, adapter in enumerate(self._ethernet_adapters):
nio = adapter.get_nio(0)
if nio:
yield from self.add_ubridge_udp_connection("QEMU-{}-{}".format(self._id, adapter_number),
self._local_udp_tunnels[adapter_number][1],
nio)
log.info('QEMU VM "{}" started PID={}'.format(self._name, self._process.pid))
self.status = "started"
monitor_process(self._process, self._termination_callback)
except (OSError, subprocess.SubprocessError, UnicodeEncodeError) as e:
stdout = self.read_stdout()
log.error("Could not start QEMU {}: {}\n{}".format(self.qemu_path, e, stdout))
raise QemuError("Could not start QEMU {}: {}\n{}".format(self.qemu_path, e, stdout))
yield from self._set_process_priority()
if self._cpu_throttling:
self._set_cpu_throttling()
if "-enable-kvm" in command_string:
self._hw_virtualization = True
try:
yield from self.start_wrap_console()
except OSError as e:
raise QemuError("Could not start QEMU console {}\n".format(e)) | 0.004946 |
def border(self):
"""Region formed by taking border elements.
:returns: :class:`jicimagelib.region.Region`
"""
border_array = self.bitmap - self.inner.bitmap
return Region(border_array) | 0.008811 |
def build(self, input_path, output_paths):
"""Should be extended by subclasses to actually do stuff. By default
this will copy `input` over every file in the `outputs` list."""
for output in output_paths:
shutil.copy(input_path, output_paths) | 0.007194 |
def clean(self):
"""Deallocates the fortran-managed memory that this ctype references.
"""
if not self.deallocated:
#Release/deallocate the pointer in fortran.
method = self._deallocator()
if method is not None:
dealloc = static_symbol("ftypes_dealloc", method, self.libpath, True)
if dealloc is None:
return
arrtype = ndpointer(dtype=int, ndim=1, shape=(len(self.indices),), flags="F")
dealloc.argtypes = [c_void_p, c_int_p, arrtype]
nindices = require(array([i.value for i in self.indices]), int, "F")
dealloc(byref(self.pointer), c_int(len(self.indices)), nindices)
self.deallocated = True | 0.009009 |
def interp_like(self, other, method='linear', assume_sorted=False,
kwargs={}):
"""Interpolate this object onto the coordinates of another object,
filling out of range values with NaN.
Parameters
----------
other : Dataset or DataArray
Object with an 'indexes' attribute giving a mapping from dimension
names to an 1d array-like, which provides coordinates upon
which to index the variables in this dataset.
method: string, optional.
{'linear', 'nearest'} for multidimensional array,
{'linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic'}
for 1-dimensional array. 'linear' is used by default.
assume_sorted: boolean, optional
If False, values of coordinates that are interpolated over can be
in any order and they are sorted first. If True, interpolated
coordinates are assumed to be an array of monotonically increasing
values.
kwargs: dictionary, optional
Additional keyword passed to scipy's interpolator.
Returns
-------
interpolated: xr.DataArray
Another dataarray by interpolating this dataarray's data along the
coordinates of the other object.
Notes
-----
scipy is required.
If the dataarray has object-type coordinates, reindex is used for these
coordinates instead of the interpolation.
See Also
--------
DataArray.interp
DataArray.reindex_like
"""
if self.dtype.kind not in 'uifc':
raise TypeError('interp only works for a numeric type array. '
'Given {}.'.format(self.dtype))
ds = self._to_temp_dataset().interp_like(
other, method=method, kwargs=kwargs, assume_sorted=assume_sorted)
return self._from_temp_dataset(ds) | 0.001529 |
def cpu():
'''
Tests for the CPU performance of minions.
CLI Examples:
.. code-block:: bash
salt '*' sysbench.cpu
'''
# Test data
max_primes = [500, 1000, 2500, 5000]
# Initializing the test variables
test_command = 'sysbench --test=cpu --cpu-max-prime={0} run'
result = None
ret_val = {}
# Test beings!
for primes in max_primes:
key = 'Prime numbers limit: {0}'.format(primes)
run_command = test_command.format(primes)
result = __salt__['cmd.run'](run_command)
ret_val[key] = _parser(result)
return ret_val | 0.001639 |
def remote_ssh(self, host):
""" Execute a command on SSH. Takes a paramiko host dict """
logger.info('Starting remote execution of task {0} on host {1}'.format(self.name, host['hostname']))
try:
self.remote_client = paramiko.SSHClient()
self.remote_client.load_system_host_keys()
self.remote_client.set_missing_host_key_policy(
paramiko.AutoAddPolicy())
self.remote_client.connect(host['hostname'], username=host['user'],
key_filename=host['identityfile'][0],
timeout=82800)
transport = self.remote_client.get_transport()
transport.set_keepalive(10)
self.remote_channel = transport.open_session()
self.remote_channel.get_pty()
self.remote_channel.exec_command(self.command)
except Exception as e:
logger.warn('Exception encountered in remote task execution')
self.remote_failure = True
self.stderr += 'Exception when trying to SSH related to: '
self.stderr += '{0}: {1}\n"'.format(type(e).__name__, str(e))
self.stderr += 'Was looking for host "{0}"\n'.format(str(host))
self.stderr += 'Found in config:\n'
self.stderr += 'host: "{0}"\n'.format(str(host))
self.stderr += 'hostname: "{0}"\n'.format(str(host.get('hostname')))
self.stderr += 'user: "{0}"\n'.format(str(host.get('user')))
self.stderr += 'identityfile: "{0}"\n'.format(str(host.get('identityfile')))
self.remote_client.close() | 0.003019 |
def create_aws_clients(region, profile, *clients):
"""
Create boto3 clients for one or more AWS services. These are the services used within the libs:
cloudformation, cloudfront, ec2, iam, lambda, route53, waf
Args:
region: the region in which to create clients that are region-specific (all but IAM)
profile: Name of profile (in .aws/credentials). Pass the value None if using instance credentials on EC2 or Lambda
clients: names of the clients to create (lowercase, must match what boto3 expects)
Returns:
A dictionary of <key>,<value> pairs for several AWS services, using the labels above as keys, e.g.:
{ "cloudfront": <cloudfront_client>, ... }
Dictionary contains an extra record, "SESSION" - pointing to the session that created the clients
"""
if not profile:
profile = None
client_key = (region, profile)
aws_clients = client_cache.get(client_key, {})
requested_clients = set(clients)
new_clients = requested_clients.difference(aws_clients)
if not new_clients:
return aws_clients
session = aws_clients.get("SESSION")
try:
if not session:
session = boto3.Session(region_name=region, profile_name=profile)
aws_clients["SESSION"] = session
# build clients
client_dict = {c: session.client(c) for c in new_clients}
# append the session itself in case it's needed by the client code - can't get it from the clients themselves
aws_clients.update(client_dict)
# add the created clients to the cache
client_cache[client_key] = aws_clients
return aws_clients
except ClientError as error:
raise RuntimeError("Exception logging in with Session() and creating clients", error) | 0.012419 |
def on_patch(resc, req, resp, rid):
""" Deserialize the payload & update the single item """
signals.pre_req.send(resc.model)
signals.pre_req_update.send(resc.model)
props = req.deserialize()
model = find(resc.model, rid)
from_rest(model, props)
goldman.sess.store.update(model)
props = to_rest_model(model, includes=req.includes)
resp.last_modified = model.updated
resp.serialize(props)
signals.post_req.send(resc.model)
signals.post_req_update.send(resc.model) | 0.001942 |
def log_player_ends_turn(self, player):
"""
:param player: catan.game.Player
"""
seconds_delta = (datetime.datetime.now() - self._latest_timestamp).total_seconds()
self._logln('{0} ends turn after {1}s'.format(player.color, round(seconds_delta)))
self._latest_timestamp = datetime.datetime.now() | 0.011662 |
def copy(self):
"""Copy text to clipboard"""
clipboard = QApplication.clipboard()
clipl = []
for idx in self.selectedIndexes():
if not idx.isValid():
continue
obj = self.delegate.get_value(idx)
# Check if we are trying to copy a numpy array, and if so make sure
# to copy the whole thing in a tab separated format
if isinstance(obj, (ndarray, MaskedArray)) \
and ndarray is not FakeObject:
if PY3:
output = io.BytesIO()
else:
output = io.StringIO()
try:
np_savetxt(output, obj, delimiter='\t')
except:
QMessageBox.warning(self, _("Warning"),
_("It was not possible to copy "
"this array"))
return
obj = output.getvalue().decode('utf-8')
output.close()
elif isinstance(obj, (DataFrame, Series)) \
and DataFrame is not FakeObject:
output = io.StringIO()
try:
obj.to_csv(output, sep='\t', index=True, header=True)
except Exception:
QMessageBox.warning(self, _("Warning"),
_("It was not possible to copy "
"this dataframe"))
return
if PY3:
obj = output.getvalue()
else:
obj = output.getvalue().decode('utf-8')
output.close()
elif is_binary_string(obj):
obj = to_text_string(obj, 'utf8')
else:
obj = to_text_string(obj)
clipl.append(obj)
clipboard.setText('\n'.join(clipl)) | 0.002509 |
def compute_distance(m, l):
'''Compute distance between two trajectories
Returns
-------
numpy.ndarray
'''
if np.shape(m) != np.shape(l):
raise ValueError("Input matrices are different sizes")
if np.array_equal(m, l):
# print("Trajectory %s and %s are equal" % (m, l))
distance = 0
else:
distance = np.array(np.sum(cdist(m, l)), dtype=np.float32)
return distance | 0.006186 |
def FetchDiscoveryDoc(discovery_url, retries=5):
"""Fetch the discovery document at the given url."""
discovery_urls = _NormalizeDiscoveryUrls(discovery_url)
discovery_doc = None
last_exception = None
for url in discovery_urls:
for _ in range(retries):
try:
content = _GetURLContent(url)
if isinstance(content, bytes):
content = content.decode('utf8')
discovery_doc = json.loads(content)
break
except (urllib_error.HTTPError, urllib_error.URLError) as e:
logging.info(
'Attempting to fetch discovery doc again after "%s"', e)
last_exception = e
if discovery_doc is None:
raise CommunicationError(
'Could not find discovery doc at any of %s: %s' % (
discovery_urls, last_exception))
return discovery_doc | 0.001071 |
def filter(self, *args, **kwargs):
"""
Apply filters to the existing nodes in the set.
:param kwargs: filter parameters
Filters mimic Django's syntax with the double '__' to separate field and operators.
e.g `.filter(salary__gt=20000)` results in `salary > 20000`.
The following operators are available:
* 'lt': less than
* 'gt': greater than
* 'lte': less than or equal to
* 'gte': greater than or equal to
* 'ne': not equal to
* 'in': matches one of list (or tuple)
* 'isnull': is null
* 'regex': matches supplied regex (neo4j regex format)
* 'exact': exactly match string (just '=')
* 'iexact': case insensitive match string
* 'contains': contains string
* 'icontains': case insensitive contains
* 'startswith': string starts with
* 'istartswith': case insensitive string starts with
* 'endswith': string ends with
* 'iendswith': case insensitive string ends with
:return: self
"""
if args or kwargs:
self.q_filters = Q(self.q_filters & Q(*args, **kwargs))
return self | 0.002327 |
def onRightDown(self, event=None):
""" right button down: show pop-up"""
if event is None:
return
# note that the matplotlib event location have to be converted
if event.inaxes is not None and self.popup_menu is not None:
pos = event.guiEvent.GetPosition()
wx.CallAfter(self.PopupMenu, self.popup_menu, pos)
self.cursor_mode_action('rightdown', event=event)
self.ForwardEvent(event=event.guiEvent) | 0.004158 |
def get_third_order_displacements(cell,
symmetry,
is_plusminus='auto',
is_diagonal=False):
"""Create dispalcement dataset
Note
----
Atoms 1, 2, and 3 are defined as follows:
Atom 1: The first displaced atom. Third order force constant
between Atoms 1, 2, and 3 is calculated.
Atom 2: The second displaced atom. Second order force constant
between Atoms 2 and 3 is calculated.
Atom 3: Force is mesuared on this atom.
Parameters
----------
cell : PhonopyAtoms
Supercell
symmetry : Symmetry
Symmetry of supercell
is_plusminus : str or bool, optional
Type of displacements, plus only (False), always plus and minus (True),
and plus and minus depending on site symmetry ('auto').
is_diagonal : bool, optional
Whether allow diagonal displacements of Atom 2 or not
Returns
-------
dict
Data structure is like:
{'natom': 64,
'cutoff_distance': 4.000000,
'first_atoms':
[{'number': atom1,
'displacement': [0.03, 0., 0.],
'second_atoms': [ {'number': atom2,
'displacement': [0., -0.03, 0.],
'distance': 2.353},
{'number': ... }, ... ] },
{'number': atom1, ... } ]}
"""
positions = cell.get_scaled_positions()
lattice = cell.get_cell().T
# Least displacements of first atoms (Atom 1) are searched by
# using respective site symmetries of the original crystal.
# 'is_diagonal=False' below is made intentionally to expect
# better accuracy.
disps_first = get_least_displacements(symmetry,
is_plusminus=is_plusminus,
is_diagonal=False)
symprec = symmetry.get_symmetry_tolerance()
dds = []
for disp in disps_first:
atom1 = disp[0]
disp1 = disp[1:4]
site_sym = symmetry.get_site_symmetry(atom1)
dds_atom1 = {'number': atom1,
'direction': disp1,
'second_atoms': []}
# Reduced site symmetry at the first atom with respect to
# the displacement of the first atoms.
reduced_site_sym = get_reduced_site_symmetry(site_sym, disp1, symprec)
# Searching orbits (second atoms) with respect to
# the first atom and its reduced site symmetry.
second_atoms = get_least_orbits(atom1,
cell,
reduced_site_sym,
symprec)
for atom2 in second_atoms:
dds_atom2 = get_next_displacements(atom1,
atom2,
reduced_site_sym,
lattice,
positions,
symprec,
is_diagonal)
min_vec = get_equivalent_smallest_vectors(atom1,
atom2,
cell,
symprec)[0]
min_distance = np.linalg.norm(np.dot(lattice, min_vec))
dds_atom2['distance'] = min_distance
dds_atom1['second_atoms'].append(dds_atom2)
dds.append(dds_atom1)
return dds | 0.000272 |
def set_burnstages_upgrade_massive(self):
'''
Outputs burnign stages as done in burningstages_upgrade (nugridse)
'''
burn_info=[]
burn_mini=[]
for i in range(len(self.runs_H5_surf)):
sefiles=se(self.runs_H5_out[i])
burn_info.append(sefiles.burnstage_upgrade())
mini=sefiles.get('mini')
#zini=sefiles.get('zini')
burn_mini.append(mini)
for i in range(len(self.runs_H5_surf)):
print 'Following returned for each initial mass'
print '[burn_cycles,burn_ages, burn_abun, burn_type,burn_lifetime]'
print '----Mini: ',burn_mini[i],'------'
print burn_info[i] | 0.038806 |
def shape(self) -> Tuple[int, int]:
"""Required shape of |NetCDFVariableAgg.array|.
For the default configuration, the first axis corresponds to the
number of devices, and the second one to the number of timesteps.
We show this for the 1-dimensional input sequence |lland_fluxes.NKor|:
>>> from hydpy.core.examples import prepare_io_example_1
>>> nodes, elements = prepare_io_example_1()
>>> from hydpy.core.netcdftools import NetCDFVariableAgg
>>> ncvar = NetCDFVariableAgg('flux_nkor', isolate=False, timeaxis=1)
>>> for element in elements:
... ncvar.log(element.model.sequences.fluxes.nkor, None)
>>> ncvar.shape
(3, 4)
When using the first axis as the "timeaxis", the order of |tuple|
entries turns:
>>> ncvar = NetCDFVariableAgg('flux_nkor', isolate=False, timeaxis=0)
>>> for element in elements:
... ncvar.log(element.model.sequences.fluxes.nkor, None)
>>> ncvar.shape
(4, 3)
"""
return self.sort_timeplaceentries(
len(hydpy.pub.timegrids.init), len(self.sequences)) | 0.001715 |
def Compile(self, filter_implementation):
"""Compile the expression."""
arguments = [self.attribute]
for argument in self.args:
arguments.append(argument.Compile(filter_implementation))
expander = filter_implementation.FILTERS['ValueExpander']
context_cls = filter_implementation.FILTERS['Context']
return context_cls(arguments=arguments,
value_expander=expander) | 0.004808 |
def find_stars(self, data, mask=None):
"""
Find stars in an astronomical image.
Parameters
----------
data : 2D array_like
The 2D image array.
mask : 2D bool array, optional
A boolean mask with the same shape as ``data``, where a
`True` value indicates the corresponding element of ``data``
is masked. Masked pixels are ignored when searching for
stars.
Returns
-------
table : `~astropy.table.Table` or `None`
A table of found stars with the following parameters:
* ``id``: unique object identification number.
* ``xcentroid, ycentroid``: object centroid.
* ``sharpness``: object sharpness.
* ``roundness1``: object roundness based on symmetry.
* ``roundness2``: object roundness based on marginal Gaussian
fits.
* ``npix``: the total number of pixels in the Gaussian kernel
array.
* ``sky``: the input ``sky`` parameter.
* ``peak``: the peak, sky-subtracted, pixel value of the object.
* ``flux``: the object flux calculated as the peak density in
the convolved image divided by the detection threshold. This
derivation matches that of `DAOFIND`_ if ``sky`` is 0.0.
* ``mag``: the object instrumental magnitude calculated as
``-2.5 * log10(flux)``. The derivation matches that of
`DAOFIND`_ if ``sky`` is 0.0.
`None` is returned if no stars are found.
"""
star_cutouts = _find_stars(data, self.kernel, self.threshold_eff,
mask=mask,
exclude_border=self.exclude_border)
if star_cutouts is None:
warnings.warn('No sources were found.', NoDetectionsWarning)
return None
self._star_cutouts = star_cutouts
star_props = []
for star_cutout in star_cutouts:
props = _DAOFind_Properties(star_cutout, self.kernel, self.sky)
if np.isnan(props.dx_hx).any() or np.isnan(props.dy_hy).any():
continue
if (props.sharpness <= self.sharplo or
props.sharpness >= self.sharphi):
continue
if (props.roundness1 <= self.roundlo or
props.roundness1 >= self.roundhi):
continue
if (props.roundness2 <= self.roundlo or
props.roundness2 >= self.roundhi):
continue
if self.peakmax is not None and props.peak >= self.peakmax:
continue
star_props.append(props)
nstars = len(star_props)
if nstars == 0:
warnings.warn('Sources were found, but none pass the sharpness '
'and roundness criteria.', NoDetectionsWarning)
return None
if self.brightest is not None:
fluxes = [props.flux for props in star_props]
idx = sorted(np.argsort(fluxes)[-self.brightest:].tolist())
star_props = [star_props[k] for k in idx]
nstars = len(star_props)
table = Table()
table['id'] = np.arange(nstars) + 1
columns = ('xcentroid', 'ycentroid', 'sharpness', 'roundness1',
'roundness2', 'npix', 'sky', 'peak', 'flux', 'mag')
for column in columns:
table[column] = [getattr(props, column) for props in star_props]
return table | 0.000554 |
def parse(self, path, args=None, unsaved_files=None, options = 0):
"""Load the translation unit from the given source code file by running
clang and generating the AST before loading. Additional command line
parameters can be passed to clang via the args parameter.
In-memory contents for files can be provided by passing a list of pairs
to as unsaved_files, the first item should be the filenames to be mapped
and the second should be the contents to be substituted for the
file. The contents may be passed as strings or file objects.
If an error was encountered during parsing, a TranslationUnitLoadError
will be raised.
"""
return TranslationUnit.from_source(path, args, unsaved_files, options,
self) | 0.005981 |
def GetStructByteOrderString(self):
"""Retrieves the Python struct format string.
Returns:
str: format string as used by Python struct or None if format string
cannot be determined.
"""
if not self._data_type_definition:
return None
return self._BYTE_ORDER_STRINGS.get(
self._data_type_definition.byte_order, None) | 0.005464 |
def _ends_in_doubled_cons(self, term):
"""Return Porter helper function _ends_in_doubled_cons value.
Parameters
----------
term : str
The word to check for a final doubled consonant
Returns
-------
bool
True iff the stem ends in a doubled consonant (as defined in the
Porter stemmer definition)
"""
return (
len(term) > 1
and term[-1] not in self._vowels
and term[-2] == term[-1]
) | 0.003731 |
def ellipse_from_second_moments_ijv(i,j, image, labels, indexes, wants_compactness = False):
"""Calculate measurements of ellipses equivalent to the second moments of labels
i,j - coordinates of each point
image - the intensity at each point
labels - for each labeled object, derive an ellipse
indexes - sequence of indexes to process
returns the following arrays:
coordinates of the center of the ellipse
eccentricity
major axis length
minor axis length
orientation
some definitions taken from "Image Moments-Based Structuring and Tracking
of Objects", LOURENA ROCHA, LUIZ VELHO, PAULO CEZAR P. CARVALHO,
http://sibgrapi.sid.inpe.br/col/sid.inpe.br/banon/2002/10.23.11.34/doc/35.pdf
particularly equation 5 (which has some errors in it).
These yield the rectangle with equivalent second moments. I translate
to the ellipse by multiplying by 1.154701 which is Matlab's calculation
of the major and minor axis length for a square of length X divided
by the actual length of the side of a square of that length.
eccentricity is the distance between foci divided by the major axis length
orientation is the angle of the major axis with respect to the X axis
"""
if len(indexes) == 0:
return [np.zeros((0,2))] + [np.zeros((0,))] * (5 if wants_compactness else 4)
if len(i) == 0:
return ([np.zeros((len(indexes), 2)), np.ones(len(indexes))] +
[np.zeros(len(indexes))] * (4 if wants_compactness else 3))
#
# Normalize to center of object for stability
#
nlabels = np.max(indexes)+1
m = np.array([[None, 0, None],
[0, None, None],
[None, None, None]], object)
if np.all(image == 1):
image = 1
m[0,0] = intensity = np.bincount(labels)
else:
m[0,0] = intensity = np.bincount(labels, image)
ic = np.bincount(labels, i * image) / intensity
jc = np.bincount(labels, j * image) / intensity
i = i - ic[labels]
j = j - jc[labels]
#
# Start by calculating the moments m[p][q] of the image
# sum(i**p j**q)
#
# m[1,0] = 0 via normalization
# m[0,1] = 0 via normalization
m[1,1] = np.bincount(labels, i*j*image)
m[2,0] = np.bincount(labels, i*i*image)
m[0,2] = np.bincount(labels, j*j*image)
a = m[2,0] / m[0,0]
b = 2*m[1,1]/m[0,0]
c = m[0,2] / m[0,0]
theta = np.arctan2(b,c-a) / 2
temp = np.sqrt(b**2+(a-c)**2)
#
# If you do a linear regression of the circles from 1 to 50 radius
# in Matlab, the resultant values fit a line with slope=.9975 and
# intercept .095. I'm adjusting the lengths accordingly.
#
mystery_constant = 0.095
mystery_multiplier = 0.9975
major_axis_len = (np.sqrt(8*(a+c+temp)) * mystery_multiplier +
mystery_constant)
minor_axis_len = (np.sqrt(8*(a+c-temp)) * mystery_multiplier +
mystery_constant)
eccentricity = np.sqrt(1-(minor_axis_len / major_axis_len)**2)
compactness = 2 * np.pi * (a + c) / m[0,0]
return ([np.column_stack((ic[indexes], jc[indexes])),
eccentricity[indexes],
major_axis_len[indexes],
minor_axis_len[indexes],
theta[indexes]] +
([compactness[indexes]] if wants_compactness else [])) | 0.007946 |
def parseerror(self, msg, line=None):
"""Emit parse error and abort assembly."""
if line is None:
line = self.sline
error('parse error: ' + msg + ' on line {}'.format(line))
sys.exit(-2) | 0.008696 |
def _batch_gvcfs(data, region, vrn_files, ref_file, out_file=None):
"""Perform batching of gVCF files if above recommended input count.
"""
if out_file is None:
out_file = vrn_files[0]
# group to get below the maximum batch size, using 200 as the baseline
max_batch = int(dd.get_joint_group_size(data))
if len(vrn_files) > max_batch:
out = []
num_batches = int(math.ceil(float(len(vrn_files)) / max_batch))
for i, batch_vrn_files in enumerate(tz.partition_all(num_batches, vrn_files)):
base, ext = utils.splitext_plus(out_file)
batch_out_file = "%s-b%s%s" % (base, i, ext)
out.append(run_combine_gvcfs(batch_vrn_files, region, ref_file, batch_out_file, data))
return _batch_gvcfs(data, region, out, ref_file)
else:
return vrn_files | 0.003559 |
def _ParseLeak(
self, parser_mediator, cache_directories, msiecf_item, recovered=False):
"""Extract data from a MSIE Cache Files (MSIECF) leak item.
Every item is stored as an event object, one for each timestamp.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
cache_directories (list[str]): cache directory names.
msiecf_item (pymsiecf.leak): MSIECF leak item.
recovered (Optional[bool]): True if the item was recovered.
"""
# TODO: add support for possible last cache synchronization date and time.
date_time = dfdatetime_semantic_time.SemanticTime('Not set')
event_data = MSIECFLeakEventData()
event_data.cached_filename = msiecf_item.filename
event_data.cached_file_size = msiecf_item.cached_file_size
event_data.cache_directory_index = msiecf_item.cache_directory_index
event_data.offset = msiecf_item.offset
event_data.recovered = recovered
if (event_data.cache_directory_index >= 0 and
event_data.cache_directory_index < len(cache_directories)):
event_data.cache_directory_name = (
cache_directories[event_data.cache_directory_index])
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
parser_mediator.ProduceEventWithEventData(event, event_data) | 0.002125 |
def merge_grad_list(all_grads, all_vars):
"""
Args:
all_grads (K x N): gradients
all_vars(K x N): variables
Return:
K x N x 2: list of list of (grad, var) pairs
"""
return [list(zip(gs, vs)) for gs, vs in zip(all_grads, all_vars)] | 0.003636 |
def _apply_shadow_vars(avg_grads):
"""
Create shadow variables on PS, and replace variables in avg_grads
by these shadow variables.
Args:
avg_grads: list of (grad, var) tuples
"""
ps_var_grads = []
for grad, var in avg_grads:
assert var.name.startswith('tower'), var.name
my_name = '/'.join(var.name.split('/')[1:])
my_name = get_op_tensor_name(my_name)[0]
new_v = tf.get_variable(my_name, dtype=var.dtype.base_dtype,
initializer=var.initial_value,
trainable=True)
# (g, v) to be applied, where v is global (ps vars)
ps_var_grads.append((grad, new_v))
return ps_var_grads | 0.002525 |
def get_record(self):
"""Override the base get_record."""
self.update_system_numbers()
self.add_systemnumber("CDS")
self.fields_list = [
"024", "041", "035", "037", "088", "100",
"110", "111", "242", "245", "246", "260",
"269", "300", "502", "650", "653", "693",
"700", "710", "773", "856", "520", "500",
"980"
]
self.keep_only_fields()
self.determine_collections()
self.add_cms_link()
self.update_languages()
self.update_reportnumbers()
self.update_date()
self.update_pagenumber()
self.update_authors()
self.update_subject_categories("SzGeCERN", "INSPIRE", "categories_inspire")
self.update_keywords()
self.update_experiments()
self.update_collaboration()
self.update_journals()
self.update_links_and_ffts()
if 'THESIS' in self.collections:
self.update_thesis_supervisors()
self.update_thesis_information()
if 'NOTE' in self.collections:
self.add_notes()
for collection in self.collections:
record_add_field(self.record,
tag='980',
subfields=[('a', collection)])
self.remove_controlfields()
return self.record | 0.002187 |
def generate_sentence(self, chain):
"""
!DEMO!
Demo function that shows how to generate a simple sentence starting with
uppercase letter without lenght limit.
Args:
chain: MarkovChain that will be used to generate sentence
"""
def weighted_choice(choices):
total_weight = sum(weight for val, weight in choices)
rand = random.uniform(0, total_weight)
upto = 0
for val, weight in choices:
if upto + weight >= rand:
return val
upto += weight
sentence = list(random.choice(chain.startwords))
while not sentence[-1][-1] in ['.', '?', '!']:
sentence.append(
weighted_choice(
chain.content[tuple(sentence[-2:])].items()
)
)
return ' '.join(sentence) | 0.003275 |
def maybeDeferred(f, *args, **kw):
"""
Copied from twsited.internet.defer and add a check to detect fibers.
"""
try:
result = f(*args, **kw)
except Exception:
return fail(failure.Failure())
if IFiber.providedBy(result):
import traceback
frames = traceback.extract_stack()
msg = "%s returned a fiber instead of a deferred" % (f, )
if len(frames) > 1:
msg += "; called from %s" % (frames[-2], )
raise RuntimeError(msg)
if isinstance(result, Deferred):
return result
elif isinstance(result, failure.Failure):
return fail(result)
else:
return succeed(result) | 0.001462 |
def send(self, conn):
''' Send the message on the given connection.
Args:
conn (WebSocketHandler) : a WebSocketHandler to send messages
Returns:
int : number of bytes sent
'''
if conn is None:
raise ValueError("Cannot send to connection None")
with (yield conn.write_lock.acquire()):
sent = 0
yield conn.write_message(self.header_json, locked=False)
sent += len(self.header_json)
# uncomment this to make it a lot easier to reproduce lock-related bugs
#yield gen.sleep(0.1)
yield conn.write_message(self.metadata_json, locked=False)
sent += len(self.metadata_json)
# uncomment this to make it a lot easier to reproduce lock-related bugs
#yield gen.sleep(0.1)
yield conn.write_message(self.content_json, locked=False)
sent += len(self.content_json)
sent += yield self.write_buffers(conn, locked=False)
raise gen.Return(sent) | 0.005587 |
def _isint(string):
"""
>>> _isint("123")
True
>>> _isint("123.45")
False
"""
return type(string) is int or \
(isinstance(string, _binary_type) or isinstance(string, _text_type)) and \
_isconvertible(int, string) | 0.015267 |
def _printStats(self, graph, hrlinetop=False):
""" shotcut to pull out useful info for interactive use
2016-05-11: note this is a local version of graph.printStats()
"""
if hrlinetop:
self._print("----------------", "TIP")
self._print("Ontologies......: %d" % len(graph.all_ontologies), "TIP")
self._print("Classes.........: %d" % len(graph.all_classes), "TIP")
self._print("Properties......: %d" % len(graph.all_properties), "TIP")
self._print("..annotation....: %d" % len(graph.all_properties_annotation), "TIP")
self._print("..datatype......: %d" % len(graph.all_properties_datatype), "TIP")
self._print("..object........: %d" % len(graph.all_properties_object), "TIP")
self._print("Concepts(SKOS)..: %d" % len(graph.all_skos_concepts), "TIP")
self._print("----------------", "TIP") | 0.006719 |
def _validate_datetime(cls, value):
"""
validate datetime value
:param value: datetime value
:return: None or validators.Invalid or MlbAmException
"""
datetime_check = validators.Int()
datetime_check.to_python(value)
if len(value) != 8:
raise MlbAmBadParameter("Length Error:{value}({length})".format(value=value, length=len(value))) | 0.007335 |
def search(self):
"""Handle the search request."""
search = self.document_class().search() # pylint: disable=not-callable
search = self.custom_filter(search)
search = self.filter_search(search)
search = self.order_search(search)
search = self.filter_permissions(search)
if search.count() > ELASTICSEARCH_SIZE:
limit = self.paginator.get_limit(self.request)
if not limit or limit > ELASTICSEARCH_SIZE:
raise TooManyResults()
search = search.extra(size=ELASTICSEARCH_SIZE)
return search | 0.003322 |
def _resolve_by_callback(request, url, urlconf=None):
"""
Finds a view function by urlconf. If the function has attribute
'navigation', it is used as breadcrumb title. Such title can be either a
callable or an object with `__unicode__` attribute. If it is callable, it
must follow the views API (i.e. the only required argument is request
object). It is also expected to return a `unicode` value.
"""
try:
callback, args, kwargs = _resolve_url(url, request, urlconf=urlconf)
except urlresolvers.Resolver404:
return None
bc = getattr(callback, 'breadcrumb', None)
if bc is None:
bc = getattr(callback, 'navigation', None)
if bc is not None: # pragma: nocover
import warnings
warnings.warn('The "navigation" attribute is deprecated, use '
'"breadcrumb" instead.')
if bc is None:
return None
if hasattr(bc, '__call__'):
# the breadcrumb is a function with an API identical to that of views.
try:
title = bc(request, *args, **kwargs)
except http.Http404:
return None
assert isinstance(title, basestring), (
'Breadcrumb function must return Unicode, not %s' % title)
else:
title = unicode(bc) # handle i18n proxy objects
return Crumb(url, title) | 0.000728 |
def read_loom(filename: PathLike, sparse: bool = True, cleanup: bool = False, X_name: str = 'spliced',
obs_names: str = 'CellID', var_names: str = 'Gene', dtype: str='float32', **kwargs) -> AnnData:
"""Read ``.loom``-formatted hdf5 file.
This reads the whole file into memory.
Beware that you have to explicitly state when you want to read the file as
sparse data.
Parameters
----------
filename
The filename.
sparse
Whether to read the data matrix as sparse.
cleanup:
Whether to remove all obs/var keys that do not store more than one unique value.
X_name:
Loompy key where the data matrix is stored.
obs_names:
Loompy key where the observation/cell names are stored.
var_names:
Loompy key where the variable/gene names are stored.
**kwargs:
Arguments to loompy.connect
"""
filename = fspath(filename) # allow passing pathlib.Path objects
from loompy import connect
with connect(filename, 'r', **kwargs) as lc:
if X_name not in lc.layers.keys(): X_name = ''
X = lc.layers[X_name].sparse().T.tocsr() if sparse else lc.layers[X_name][()].T
layers = OrderedDict()
if X_name != '': layers['matrix'] = lc.layers[''].sparse().T.tocsr() if sparse else lc.layers[''][()].T
for key in lc.layers.keys():
if key != '': layers[key] = lc.layers[key].sparse().T.tocsr() if sparse else lc.layers[key][()].T
obs = dict(lc.col_attrs)
if obs_names in obs.keys(): obs['obs_names'] = obs.pop(obs_names)
obsm_attrs = [k for k, v in obs.items() if v.ndim > 1 and v.shape[1] > 1]
obsm = {}
for key in obsm_attrs:
obsm[key] = obs.pop(key)
var = dict(lc.row_attrs)
if var_names in var.keys(): var['var_names'] = var.pop(var_names)
varm_attrs = [k for k, v in var.items() if v.ndim > 1 and v.shape[1] > 1]
varm = {}
for key in varm_attrs:
varm[key] = var.pop(key)
if cleanup:
for key in list(obs.keys()):
if len(set(obs[key])) == 1:
del obs[key]
for key in list(var.keys()):
if len(set(var[key])) == 1:
del var[key]
adata = AnnData(
X,
obs=obs, # not ideal: make the generator a dict...
var=var,
layers=layers,
obsm=obsm if obsm else None,
varm=varm if varm else None,
dtype=dtype)
return adata | 0.006211 |
def rewrite_refs( targets, old,new, index, key='refs', single_ref=False ):
"""Rewrite key in all targets (from index if necessary) to replace old with new"""
for parent in targets:
if not isinstance( parent, dict ):
try:
parent = index[parent]
except KeyError, err:
continue
rewrite_references( parent[key], old, new, single_ref=single_ref ) | 0.023697 |
def set_option(name, value):
"""
Set package option
Parameters
----------
name : str
Name of the option
value : object
New value of the option
Returns
-------
old : object
Old value of the option
"""
d = globals()
if name in {'get_option', 'set_option'} or name not in d:
from ..exceptions import PlotnineError
raise PlotnineError("Unknown option {}".format(name))
old = d[name]
d[name] = value
return old | 0.001965 |
def _jq_format(code):
"""
DEPRECATED - Use re.escape() instead, which performs the intended action.
Use before throwing raw code such as 'div[tab="advanced"]' into jQuery.
Selectors with quotes inside of quotes would otherwise break jQuery.
If you just want to escape quotes, there's escape_quotes_if_needed().
This is similar to "json.dumps(value)", but with one less layer of quotes.
"""
code = code.replace('\\', '\\\\').replace('\t', '\\t').replace('\n', '\\n')
code = code.replace('\"', '\\\"').replace('\'', '\\\'')
code = code.replace('\v', '\\v').replace('\a', '\\a').replace('\f', '\\f')
code = code.replace('\b', '\\b').replace(r'\u', '\\u').replace('\r', '\\r')
return code | 0.001366 |
def _validate_filters(cls, filters):
"""Raise a TypeError if ``filters`` contains any keys inappropriate to
this event class."""
for k in iterkeys(filters):
if k not in cls.filters:
# Mirror "unexpected keyword argument" message:
raise TypeError("%s got an unsupported filter type '%s'" %
(cls.__name__, k)) | 0.004914 |
def Run(self):
"""Run the iteration."""
count = 0
for count, input_data in enumerate(self.GetInput()):
if count % 2000 == 0:
logging.debug("%d processed.", count)
args = (input_data, self.out_queue, self.token)
self.thread_pool.AddTask(
target=self.IterFunction, args=args, name=self.THREAD_POOL_NAME)
while count >= 0:
try:
# We only use the timeout to wait if we got to the end of the Queue but
# didn't process everything yet.
out = self.out_queue.get(timeout=self.QUEUE_TIMEOUT, block=True)
if out:
yield out
count -= 1
except queue.Empty:
break
# Join and stop to clean up the threadpool.
self.thread_pool.Stop(join_timeout=THREADPOOL_JOIN_TIMEOUT) | 0.010165 |
def files(self) -> List[str]:
"""
Obtain the list of the files (excluding .git directory).
:return: List[str], the list of the files
"""
_all = []
for path, _, files in os.walk(str(self.path)):
if '.git' in path:
continue
for name in files:
_all.append(os.path.join(path, name))
return _all | 0.004963 |
def _get_classifier(self, prefix):
""" Construct a decoder for the next sentence prediction task """
with self.name_scope():
classifier = nn.Dense(2, prefix=prefix)
return classifier | 0.009174 |
def get_locations_list(self, lower_bound=0, upper_bound=None):
"""
Return the internal location list.
Args:
lower_bound:
upper_bound:
Returns:
"""
real_upper_bound = upper_bound
if upper_bound is None:
real_upper_bound = self.nbr_of_sub_locations()
try:
return self._locations_list[lower_bound:real_upper_bound]
except:
return list() | 0.00641 |
def add_row(self, data: list):
"""
Add a row of data to the current widget
:param data: a row of data
:return: None
"""
# validation
if self.headers:
if len(self.headers) != len(data):
raise ValueError
if len(data) != self.num_of_columns:
raise ValueError
offset = 0 if not self.headers else 1
row = list()
for i, element in enumerate(data):
label = ttk.Label(self, text=str(element), relief=tk.GROOVE,
padding=self.padding)
label.grid(row=len(self._rows) + offset, column=i, sticky='E,W')
row.append(label)
self._rows.append(row) | 0.002717 |
def update_properties_cache(sender, instance, action, reverse, model, pk_set, **kwargs):
"Property cache actualization at POI save. It will not work yet after property removal."
if action == 'post_add':
instance.save_properties_cache() | 0.011952 |
def get_pod_for_build(self, build_id):
"""
:return: PodResponse object for pod relating to the build
"""
pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id)
serialized_response = pods.json()
pod_list = [PodResponse(pod) for pod in serialized_response["items"]]
if not pod_list:
raise OsbsException("No pod for build")
elif len(pod_list) != 1:
raise OsbsException("Only one pod expected but %d returned",
len(pod_list))
return pod_list[0] | 0.003419 |
def read_configuration(config='DEFAULT'):
"""
Read LAtools configuration file, and return parameters as dict.
"""
# read configuration file
_, conf = read_latoolscfg()
# if 'DEFAULT', check which is the default configuration
if config == 'DEFAULT':
config = conf['DEFAULT']['config']
# grab the chosen configuration
conf = dict(conf[config])
# update config name with chosen
conf['config'] = config
return conf | 0.002141 |
def validate(self, value, redis):
''' hash passwords given via http '''
value = super().validate(value, redis)
if is_hashed(value):
return value
return make_password(value) | 0.009174 |
def _lexsorted_specs(self, order):
"""
A lexsort is specified using normal key string prefixed by '+'
(for ascending) or '-' for (for descending).
Note that in Python 2, if a key is missing, None is returned
(smallest Python value). In Python 3, an Exception will be
raised regarding comparison of heterogenous types.
"""
specs = self.specs[:]
if not all(el[0] in ['+', '-'] for el in order):
raise Exception("Please specify the keys for sorting, use"
"'+' prefix for ascending,"
"'-' for descending.)")
sort_cycles = [(el[1:], True if el[0]=='+' else False)
for el in reversed(order)
if el[1:] in self.varying_keys]
for (key, ascending) in sort_cycles:
specs = sorted(specs, key=lambda s: s.get(key, None),
reverse=(not ascending))
return specs | 0.003009 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.