text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def ip2asn(ipaddr):
"""Returns the ASN data associated with an IP (v4 or v6)
>>> from pprint import pprint
>>> pprint(ip2asn('8.8.8.8'))
{'asn': '15169',
'asname': 'GOOGLE - Google Inc., US',
'cc': 'US',
'net': '8.8.8.0/24',
'rir': 'ARIN'}
>>> pprint(ip2asn('2001:4860:4860::8888'))
{'asn': '15169',
'asname': 'GOOGLE - Google Inc., US',
'cc': 'US',
'net': '2001:4860::/32',
'rir': 'ARIN'}
>>> pprint(ip2asn('unk'))
None
"""
try:
ip = ipaddress.ip_network(ipaddr)
except ValueError:
return None
if ip.is_private:
return None
if ip.version == 4:
a, b, c, d = str(ip.exploded).split('/')[0].split('.')
reversed = "%s.%s.%s.%s" % (d, c, b, a)
name = "%s.origin.asn.cymru.com" % (reversed)
else:
only_addr = str(ip.exploded).split('/')[0].replace(':', '')
reversed = ''
for number in only_addr[::-1]:
reversed += number
reversed += '.'
reversed = reversed.rstrip('.')
name = "%s.origin6.asn.cymru.com" % (reversed)
try:
response = dns.resolver.query(name, 'TXT')
except:
return None
# "15169 | 8.8.4.0/24 | US | arin |"
r = {}
r['asn'] = response[0].to_text().split('|')[0].strip(" \"").split(' ')[0]
r['net'] = response[0].to_text().split('|')[1].strip(" \"")
r['cc'] = response[0].to_text().split('|')[2].strip(" \"")
r['rir'] = response[0].to_text().split('|')[3].strip(" \"").upper()
r['asname'] = 'unknown'
# Get AS Name
# "15169 | US | arin | 2000-03-30 | GOOGLE - Google Inc.,US"
try:
name = "AS%s.asn.cymru.com" % (r['asn'])
response = dns.resolver.query(name, 'TXT')
r['asname'] = response[0].to_text().split('|')[4].strip(" \"")
except:
pass
return(r) | 0.001592 |
def cluster_node(self, node_id):
"""
A node resource contains information about a node in the cluster.
:param str node_id: The node id
:returns: API response object with JSON data
:rtype: :py:class:`yarn_api_client.base.Response`
"""
path = '/ws/v1/cluster/nodes/{nodeid}'.format(nodeid=node_id)
return self.request(path) | 0.005168 |
def on_menu_exit(self,new):
"""
Fake event handler, same as :py:meth:`WorldView.on_menu_exit()` but force-disables mouse exclusivity.
"""
super(WorldViewMouseRotatable,self).on_menu_exit(new)
self.world.peng.window.toggle_exclusivity(False) | 0.017857 |
def __cancel(self, subscription_id, **kwargs):
"""Call documentation: `/subscription/cancel
<https://www.wepay.com/developer/reference/subscription#cancel>`_, plus
extra keyword parameters:
:keyword str access_token: will be used instead of instance's
``access_token``, with ``batch_mode=True`` will set `authorization`
param to it's value.
:keyword bool batch_mode: turn on/off the batch_mode, see
:class:`wepay.api.WePay`
:keyword str batch_reference_id: `reference_id` param for batch call,
see :class:`wepay.api.WePay`
:keyword str api_version: WePay API version, see
:class:`wepay.api.WePay`
"""
params = {
'subscription_id': subscription_id
}
return self.make_call(self.__cancel, params, kwargs) | 0.004598 |
def authenticate(self, auth_url=None, **kwargs):
"""Authenticates a user via the Keystone Identity API."""
LOG.debug('Beginning user authentication')
if not auth_url:
auth_url = settings.OPENSTACK_KEYSTONE_URL
auth_url, url_fixed = utils.fix_auth_url_version_prefix(auth_url)
if url_fixed:
LOG.warning("The OPENSTACK_KEYSTONE_URL setting points to a v2.0 "
"Keystone endpoint, but v3 is specified as the API "
"version to use by Horizon. Using v3 endpoint for "
"authentication.")
plugin, unscoped_auth = self._get_auth_backend(auth_url, **kwargs)
# the recent project id a user might have set in a cookie
recent_project = None
request = kwargs.get('request')
if request:
# Grab recent_project found in the cookie, try to scope
# to the last project used.
recent_project = request.COOKIES.get('recent_project')
unscoped_auth_ref = plugin.get_access_info(unscoped_auth)
# Check expiry for our unscoped auth ref.
self.check_auth_expiry(unscoped_auth_ref)
domain_name = kwargs.get('user_domain_name', None)
domain_auth, domain_auth_ref = plugin.get_domain_scoped_auth(
unscoped_auth, unscoped_auth_ref, domain_name)
scoped_auth, scoped_auth_ref = plugin.get_project_scoped_auth(
unscoped_auth, unscoped_auth_ref, recent_project=recent_project)
# Abort if there are no projects for this user and a valid domain
# token has not been obtained
#
# The valid use cases for a user login are:
# Keystone v2: user must have a role on a project and be able
# to obtain a project scoped token
# Keystone v3: 1) user can obtain a domain scoped token (user
# has a role on the domain they authenticated to),
# only, no roles on a project
# 2) user can obtain a domain scoped token and has
# a role on a project in the domain they
# authenticated to (and can obtain a project scoped
# token)
# 3) user cannot obtain a domain scoped token, but can
# obtain a project scoped token
if not scoped_auth_ref and domain_auth_ref:
# if the user can't obtain a project scoped token, set the scoped
# token to be the domain token, if valid
scoped_auth = domain_auth
scoped_auth_ref = domain_auth_ref
elif not scoped_auth_ref and not domain_auth_ref:
msg = _('You are not authorized for any projects.')
if utils.get_keystone_version() >= 3:
msg = _('You are not authorized for any projects or domains.')
raise exceptions.KeystoneAuthException(msg)
# Check expiry for our new scoped token.
self.check_auth_expiry(scoped_auth_ref)
# We want to try to use the same region we just logged into
# which may or may not be the default depending upon the order
# keystone uses
region_name = None
id_endpoints = scoped_auth_ref.service_catalog.\
get_endpoints(service_type='identity')
for id_endpoint in [cat for cat in id_endpoints['identity']]:
if auth_url in id_endpoint.values():
region_name = id_endpoint['region']
break
interface = getattr(settings, 'OPENSTACK_ENDPOINT_TYPE', 'public')
endpoint, url_fixed = utils.fix_auth_url_version_prefix(
scoped_auth_ref.service_catalog.url_for(
service_type='identity',
interface=interface,
region_name=region_name))
if url_fixed:
LOG.warning("The Keystone URL in service catalog points to a v2.0 "
"Keystone endpoint, but v3 is specified as the API "
"version to use by Horizon. Using v3 endpoint for "
"authentication.")
# If we made it here we succeeded. Create our User!
unscoped_token = unscoped_auth_ref.auth_token
user = auth_user.create_user_from_token(
request,
auth_user.Token(scoped_auth_ref, unscoped_token=unscoped_token),
endpoint,
services_region=region_name)
if request is not None:
# if no k2k providers exist then the function returns quickly
utils.store_initial_k2k_session(auth_url, request, scoped_auth_ref,
unscoped_auth_ref)
request.session['unscoped_token'] = unscoped_token
if domain_auth_ref:
# check django session engine, if using cookies, this will not
# work, as it will overflow the cookie so don't add domain
# scoped token to the session and put error in the log
if utils.using_cookie_backed_sessions():
LOG.error('Using signed cookies as SESSION_ENGINE with '
'OPENSTACK_KEYSTONE_MULTIDOMAIN_SUPPORT is '
'enabled. This disables the ability to '
'perform identity operations due to cookie size '
'constraints.')
else:
request.session['domain_token'] = domain_auth_ref
request.user = user
timeout = getattr(settings, "SESSION_TIMEOUT", 3600)
token_life = user.token.expires - datetime.datetime.now(pytz.utc)
session_time = min(timeout, int(token_life.total_seconds()))
request.session.set_expiry(session_time)
keystone_client_class = utils.get_keystone_client().Client
session = utils.get_session()
scoped_client = keystone_client_class(session=session,
auth=scoped_auth)
# Support client caching to save on auth calls.
setattr(request, KEYSTONE_CLIENT_ATTR, scoped_client)
LOG.debug('Authentication completed.')
return user | 0.000315 |
def fasta(self, key='vdj_nt', append_chain=True):
'''
Returns the sequence pair as a fasta string. If the Pair object contains
both heavy and light chain sequences, both will be returned as a single string.
By default, the fasta string contains the 'vdj_nt' sequence for each chain. To change,
use the <key> option to select an alternate sequence.
By default, the chain (heavy or light) will be appended to the sequence name:
>MySequence_heavy
To just use the pair name (which will result in duplicate sequence names for Pair objects
with both heavy and light chains), set <append_chain> to False.
'''
fastas = []
for s, chain in [(self.heavy, 'heavy'), (self.light, 'light')]:
if s is not None:
c = '_{}'.format(chain) if append_chain else ''
fastas.append('>{}{}\n{}'.format(s['seq_id'], c, s[key]))
return '\n'.join(fastas) | 0.007157 |
def _init_metadata(self):
"""stub"""
ItemTextsFormRecord._init_metadata(self)
ItemFilesFormRecord._init_metadata(self)
super(ItemTextsAndFilesMixin, self)._init_metadata() | 0.009852 |
def _get_subject_info(self, n_local_subj, data):
"""Calculate metadata for subjects allocated to this process
Parameters
----------
n_local_subj : int
Number of subjects allocated to this process.
data : list of 2D array. Each in shape [n_voxel, n_tr]
Total number of MPI process.
Returns
-------
max_sample_tr : 1D array
Maximum number of TR to subsample for each subject
max_sample_voxel : 1D array
Maximum number of voxel to subsample for each subject
"""
max_sample_tr = np.zeros(n_local_subj).astype(int)
max_sample_voxel = np.zeros(n_local_subj).astype(int)
for idx in np.arange(n_local_subj):
nvoxel = data[idx].shape[0]
ntr = data[idx].shape[1]
max_sample_voxel[idx] =\
min(self.max_voxel, int(self.voxel_ratio * nvoxel))
max_sample_tr[idx] = min(self.max_tr, int(self.tr_ratio * ntr))
return max_sample_tr, max_sample_voxel | 0.00188 |
def _erf(x):
"""
Port of cephes ``ndtr.c`` ``erf`` function.
See https://github.com/jeremybarnes/cephes/blob/master/cprob/ndtr.c
"""
T = [
9.60497373987051638749E0,
9.00260197203842689217E1,
2.23200534594684319226E3,
7.00332514112805075473E3,
5.55923013010394962768E4,
]
U = [
3.35617141647503099647E1,
5.21357949780152679795E2,
4.59432382970980127987E3,
2.26290000613890934246E4,
4.92673942608635921086E4,
]
# Shorcut special cases
if x == 0:
return 0
if x >= MAXVAL:
return 1
if x <= -MAXVAL:
return -1
if abs(x) > 1:
return 1 - erfc(x)
z = x * x
return x * _polevl(z, T, 4) / _p1evl(z, U, 5) | 0.001297 |
def _list_of_dicts_to_column_headers(list_of_dicts):
"""
Detects if all entries in an list of ``dict``'s have identical keys.
Returns the keys if all keys are the same and ``None`` otherwise.
Parameters
----------
list_of_dicts : list
List of dictionaries to test for identical keys.
Returns
-------
list or None
List of column headers if all dictionary posessed the same keys. Returns ``None`` otherwise.
"""
if len(list_of_dicts) < 2 or not all(isinstance(item, dict) for item in list_of_dicts):
return None
column_headers = list_of_dicts[0].keys()
for d in list_of_dicts[1:]:
if len(d.keys()) != len(column_headers) or not all(header in d for header in column_headers):
return None
return column_headers | 0.005643 |
def add_pending_model_content(cursor, publication_id, model):
"""Updates the pending model's content.
This is a secondary step not in ``add_pending_model, because
content reference resolution requires the identifiers as they
will appear in the end publication.
"""
cursor.execute("""\
SELECT id, ident_hash(uuid, major_version, minor_version)
FROM pending_documents
WHERE publication_id = %s AND uuid = %s""",
(publication_id, model.id,))
document_info = cursor.fetchone()
def attach_info_to_exception(exc):
"""Small cached function to grab the pending document id
and hash to attach to the exception, which is useful when
reading the json data on a response.
"""
exc.publication_id = publication_id
exc.pending_document_id, exc.pending_ident_hash = document_info
def mark_invalid_reference(reference):
"""Set the publication to failure and attach invalid reference
to the publication.
"""
exc = exceptions.InvalidReference(reference)
attach_info_to_exception(exc)
set_publication_failure(cursor, exc)
for resource in getattr(model, 'resources', []):
add_pending_resource(cursor, resource, document=model)
if isinstance(model, cnxepub.Document):
for reference in model.references:
if reference.is_bound:
reference.bind(reference.bound_model, '/resources/{}')
elif reference.remote_type == cnxepub.INTERNAL_REFERENCE_TYPE:
if reference.uri.startswith('#'):
pass
elif reference.uri.startswith('/contents'):
ident_hash = parse_archive_uri(reference.uri)
try:
doc_pointer = lookup_document_pointer(
ident_hash, cursor)
except DocumentLookupError:
mark_invalid_reference(reference)
else:
reference.bind(doc_pointer, "/contents/{}")
else:
mark_invalid_reference(reference)
# else, it's a remote or cnx.org reference ...Do nothing.
args = (psycopg2.Binary(model.content.encode('utf-8')),
publication_id, model.id,)
stmt = """\
UPDATE "pending_documents"
SET ("content") = (%s)
WHERE "publication_id" = %s AND "uuid" = %s"""
else:
metadata = model.metadata.copy()
# All document pointers in the tree are valid?
document_pointers = [m for m in cnxepub.flatten_model(model)
if isinstance(m, cnxepub.DocumentPointer)]
document_pointer_ident_hashes = [
(split_ident_hash(dp.ident_hash)[0],
split_ident_hash(dp.ident_hash, split_version=True)[1][0],
split_ident_hash(dp.ident_hash, split_version=True)[1][1],)
# split_ident_hash(dp.ident_hash, split_version=True)[1][0],)
for dp in document_pointers]
document_pointer_ident_hashes = zip(*document_pointer_ident_hashes)
if document_pointers:
uuids, major_vers, minor_vers = document_pointer_ident_hashes
cursor.execute("""\
SELECT dp.uuid, module_version(dp.maj_ver, dp.min_ver) AS version,
dp.uuid = m.uuid AS exists,
m.portal_type = 'Module' AS is_document
FROM (SELECT unnest(%s::uuid[]), unnest(%s::integer[]), unnest(%s::integer[]))\
AS dp(uuid, maj_ver, min_ver)
LEFT JOIN modules AS m ON dp.uuid = m.uuid AND \
(dp.maj_ver = m.major_version OR dp.maj_ver is null)""",
(list(uuids), list(major_vers), list(minor_vers),))
valid_pointer_results = cursor.fetchall()
for result_row in valid_pointer_results:
uuid, version, exists, is_document = result_row
if not (exists and is_document):
dp = [dp for dp in document_pointers
if dp.ident_hash == join_ident_hash(uuid, version)
][0]
exc = exceptions.InvalidDocumentPointer(
dp, exists=exists, is_document=is_document)
attach_info_to_exception(exc)
set_publication_failure(cursor, exc)
# Insert the tree into the metadata.
metadata['_tree'] = cnxepub.model_to_tree(model)
args = (json.dumps(metadata),
None, # TODO Render the HTML tree at ``model.content``.
publication_id, model.id,)
# Must pave over metadata because postgresql lacks built-in
# json update functions.
stmt = """\
UPDATE "pending_documents"
SET ("metadata", "content") = (%s, %s)
WHERE "publication_id" = %s AND "uuid" = %s"""
cursor.execute(stmt, args) | 0.000201 |
def AddSymbolicLink(self, path, linked_path):
"""Adds a symbolic link to the fake file system.
Args:
path (str): path of the symbolic link within the fake file system.
linked_path (str): path that is linked.
Raises:
ValueError: if the path is already set.
"""
if self.file_system.FileEntryExistsByPath(path):
raise ValueError('Path: {0:s} already set.'.format(path))
self._AddParentDirectories(path)
self.file_system.AddFileEntry(
path, file_entry_type=definitions.FILE_ENTRY_TYPE_LINK,
link_data=linked_path) | 0.003448 |
def _same_day_ids(self):
"""
:return: ids of occurrences that finish on the same day that they
start, or midnight the next day.
"""
# we can pre-filter to return only occurrences that are <=24h long,
# but until at least the `__date` can be used in F() statements
# we'll have to refine manually
qs = self.filter(end__lte=F('start') + timedelta(days=1))
# filter occurrences to those sharing the same end date, or
# midnight the next day (unless it's an all-day occurrence)
ids = [o.id for o in qs if (
(o.local_start.date() == o.local_end.date()) or
(
o.local_end.time() == time(0,0) and
o.local_end.date() == o.local_start.date() + timedelta(days=1) and
o.is_all_day == False
)
)]
return ids | 0.005669 |
def getnode():
"""Get the hardware address as a 48-bit positive integer.
The first time this runs, it may launch a separate program, which could
be quite slow. If all attempts to obtain the hardware address fail, we
choose a random 48-bit number with its eighth bit set to 1 as recommended
in RFC 4122.
"""
global _node
if _node is not None:
return _node
import sys
if sys.platform == 'win32':
getters = [_windll_getnode, _netbios_getnode, _ipconfig_getnode]
else:
getters = [_unixdll_getnode, _ifconfig_getnode]
for getter in getters + [_random_getnode]:
try:
_node = getter()
except:
continue
if _node is not None:
return _node | 0.002611 |
def get_resource(self, service_name, resource_name, base_class=None):
"""
Returns a ``Resource`` **class** for a given service.
:param service_name: A string that specifies the name of the desired
service. Ex. ``sqs``, ``sns``, ``dynamodb``, etc.
:type service_name: string
:param resource_name: A string that specifies the name of the desired
class. Ex. ``Queue``, ``Notification``, ``Table``, etc.
:type resource_name: string
:param base_class: (Optional) The base class of the object. Prevents
"magically" loading the wrong class (one with a different base).
:type base_class: class
:rtype: <kotocore.resources.Resource subclass>
"""
try:
return self.cache.get_resource(
service_name,
resource_name,
base_class=base_class
)
except NotCached:
pass
# We didn't find it. Construct it.
new_class = self.resource_factory.construct_for(
service_name,
resource_name,
base_class=base_class
)
self.cache.set_resource(service_name, resource_name, new_class)
return new_class | 0.001582 |
def _download_predicate_data(self, class_, controller):
"""Get raw predicate information for given request class, and cache for
subsequent calls.
"""
self.authenticate()
url = ('{0}{1}/modeldef/class/{2}'
.format(self.base_url, controller, class_))
logger.debug(requests.utils.requote_uri(url))
resp = self._ratelimited_get(url)
_raise_for_status(resp)
return resp.json()['data'] | 0.004246 |
def v1_highlights_post(request, response, kvlclient, tfidf,
min_delay=3, task_master=None):
'''Obtain highlights for a document POSTed inside a JSON object.
Get our Diffeo Highlighter browser extension here:
https://chrome.google.com/webstore/detail/jgfcplgdmjkdepnmbdkmgohaldaiplpo
While you're at it, pre-register for a beta account on
http://diffeo.com.
`min_delay` and `task_master` are used by tests.
The route for this endpoint is:
``POST /dossier/v1/highlights``.
The expected input structure is a JSON encoded string of an
object with these keys:
.. code-block:: javascript
{
// only text/html is supported at this time; hopefully PDF.js
// enables this to support PDF rendering too.
"content-type": "text/html",
// URL of the page (after resolving all redirects)
"content-location": "http://...",
// If provided by the original host, this will be populated,
// otherwise it is empty.
"last-modified": "datetime string or empty string",
// Boolean indicating whether the content may be stored by the
// server. If set to `false`, then server must respond
// synchronously with a newly computed response payload, and
// must purge any stored copies of this `content-location`.
// If `true`, server may respond with `state` of `pending`.
"store": false,
// full page contents obtained by Javascript in the browser
// extension accessing `document.documentElement.innerHTML`.
// This must be UTF-8 encoded.
// N.B. This needs experimentation to figure out whether the
// browser will always encode this as Unicode.
"body": "... the body content ...",
}
The output structure is a JSON UTF-8 encoded string of an
object with these keys:
.. code-block:: javascript
{
"highlights": [Highlight, Highlight, ...],
"state": State,
"id": StoreID,
"delay": 10.0,
"error": Error
}
where a `State` is one of these strings: `completed`, `stored`,
`pending`, or `error`. The `StoreID` is an opaque string computed
by the backend that the client can use to poll this end point with
`GET` requests for a `pending` request. The `delay` value is a
number of seconds that the client should wait before beginning
polling, e.g. ten seconds.
An `Error` object has this structure:
.. code-block:: javascript
{
// Error codes are (0, wrong content type), (1, empty body),
// (2, JSON decode error), (3, payload structure incorrect),
// (4, payload missing required keys), (5, invalid
// content-location), (6, too small body content), (7,
// internal error), (8, internal time out), (9, file_id does
// not exist)
"code": 0,
"message": "wrong content_type"
}
A `Highlight` object has this structure:
.. code-block:: javascript
{
// float in the range [0, 1]
"score": 0.7
// a string presented with a check box inside the options
// bubble when the user clicks the extension icon to choose
// which categories of highlights should be displayed.
"category": "Organization",
// `queries` are strings that are to be presented as
// suggestions to the user, and the extension enables the user
// to click any of the configured search engines to see
// results for a selected query string.
"queries": [],
// zero or more strings to match in the document and highlight
// with a single color.
"strings": [],
// zero or more xpath highlight objects to lookup in the document
// and highlight with a single color.
"xranges": [],
// zero or more Regex objects to compile and
// execute to find spans to highlight with a single color.
"regexes": []
}
where a Regex object is:
.. code-block:: javascript
{
"regex": "...", // e.g., "[0-9]"
"flags": "..." // e.g., "i" for case insensitive
}
where an xpath highlight object is:
.. code-block:: javascript
{
"range": XPathRange
}
where an XpathRange object is:
.. code-block:: javascript
{
"start": XPathOffset,
"end": XPathOffset
}
where an XpathOffset object is:
.. code-block:: javascript
{
"node": "/html[1]/body[1]/p[1]/text()[2]",
"idx": 4,
}
All of the `strings`, `ranges`, and `regexes` in a `Highlight`
object should be given the same highlight color. A `Highlight`
object can provide values in any of the three `strings`, `ranges`,
or `regexes` lists, and all should be highlighted.
'''
tfidf = tfidf or None
content_type = request.headers.get('content-type', '')
if not content_type.startswith('application/json'):
logger.critical('content-type=%r', content_type)
response.status = 415
return {
'state': ERROR,
'error': {
'code': 0,
'message': 'content_type=%r and should be '
'application/json' % content_type,
},
}
body = request.body.read()
if len(body) == 0:
response.status = 400
return {
'state': ERROR,
'error': {'code': 1, 'message': 'empty body'}
}
try:
data = json.loads(body.decode('utf-8'))
except Exception, exc:
response.status = 400
return {
'state': ERROR,
'error': {
'code': 2,
'message':
'failed to read JSON body: %s' % exc,
},
}
if not isinstance(data, dict):
response.status = 400
return {
'state': ERROR,
'error': {
'code': 3,
'message': 'JSON request payload deserialized to'
' other than an object: %r' % type(data),
},
}
expected_keys = set([
'content-type', 'content-location', 'last-modified', 'body',
'store',
])
if set(data.keys()) != expected_keys:
response.status = 400
return {
'state': ERROR,
'error': {
'code': 4,
'message': 'other than expected keys in JSON object. '
'Expected %r and received %r'
% (sorted(expected_keys), sorted(data.keys())),
},
}
if len(data['content-location']) < 3:
response.status = 400
return {
'state': ERROR,
'error': {
'code': 5,
'message': 'received invalid content-location=%r'
% data['content-location'],
},
}
if len(data['body']) < 3:
response.status = 400
return {
'state': ERROR,
'error': {
'code': 6,
'message': 'received too little body=%r' % data['body'],
},
}
if data['last-modified']:
try:
last_modified = int(datetime.datetime(*eut.parsedate(data['last-modified'])[:6]).strftime('%s'))
except Exception, exc:
logger.info('failed to parse last-modified=%r', data['last-modified'])
last_modified = 0
else:
last_modified = 0
doc_id = md5(data['content-location']).hexdigest()
content_hash = Nilsimsa(data['body']).hexdigest()
file_id = (doc_id, last_modified, content_hash)
file_id_str = '%s-%d-%s' % file_id
kvlclient.setup_namespace(highlights_kvlayer_tables)
if data['store'] is False:
kvlclient.delete('files', (file_id[0],))
kvlclient.delete('highlights', (file_id[0],))
logger.info('cleared all store records related to doc_id=%r', file_id[0])
else: # storing is allowed
payload_strs = list(kvlclient.get('highlights', file_id))
if payload_strs and payload_strs[0][1]:
payload_str = payload_strs[0][1]
try:
payload = json.loads(payload_str)
except Exception, exc:
logger.critical('failed to decode out of %r',
payload_str, exc_info=True)
if payload['state'] != ERROR:
logger.info('returning stored payload for %r', file_id)
return payload
else:
logger.info('previously stored data was an error so trying again')
delay = len(data['body']) / 5000 # one second per 5KB
if delay > min_delay:
# store the data in `files` table
kvlclient.put('files', (file_id, json.dumps(data)))
payload = {
'state': HIGHLIGHTS_PENDING,
'id': file_id_str,
'delay': delay,
'start': time.time()
}
# store the payload, so that it gets returned during
# polling until replaced by the work unit.
payload_str = json.dumps(payload)
kvlclient.put('highlights', (file_id, payload_str))
logger.info('launching highlights async work unit')
if task_master is None:
conf = yakonfig.get_global_config('coordinate')
task_master = coordinate.TaskMaster(conf)
task_master.add_work_units('highlights', [(file_id_str, {})])
return payload
return maybe_store_highlights(file_id, data, tfidf, kvlclient) | 0.00319 |
def job(self):
"""REST binding for the job associated with the submitted build.
Returns:
Job: REST binding for running job or ``None`` if connection information was not available or no job was submitted.
"""
if self._submitter and hasattr(self._submitter, '_job_access'):
return self._submitter._job_access()
return None | 0.007792 |
def _update_cell_values(self, cell_ids, interior_edge_ids):
"""Updates all sorts of cell information for the given cell IDs.
"""
# update idx_hierarchy
nds = self.cells["nodes"][cell_ids].T
self.idx_hierarchy[..., cell_ids] = nds[self.local_idx]
# update self.half_edge_coords
self.half_edge_coords[:, cell_ids, :] = numpy.moveaxis(
self.node_coords[self.idx_hierarchy[1, ..., cell_ids]]
- self.node_coords[self.idx_hierarchy[0, ..., cell_ids]],
0,
1,
)
# update self.ei_dot_ej
self.ei_dot_ej[:, cell_ids] = numpy.einsum(
"ijk, ijk->ij",
self.half_edge_coords[[1, 2, 0]][:, cell_ids],
self.half_edge_coords[[2, 0, 1]][:, cell_ids],
)
# update self.ei_dot_ei
e = self.half_edge_coords[:, cell_ids]
self.ei_dot_ei[:, cell_ids] = numpy.einsum("ijk, ijk->ij", e, e)
# update cell_volumes, ce_ratios_per_half_edge
cv = compute_tri_areas(self.ei_dot_ej[:, cell_ids])
ce = compute_ce_ratios(self.ei_dot_ej[:, cell_ids], cv)
self.cell_volumes[cell_ids] = cv
self.ce_ratios[:, cell_ids] = ce
if self._interior_ce_ratios is not None:
self._interior_ce_ratios[interior_edge_ids] = 0.0
edge_gids = self._edge_to_edge_gid[2][interior_edge_ids]
adj_cells = self._edges_cells[2][interior_edge_ids]
is0 = self.cells["edges"][adj_cells[:, 0]][:, 0] == edge_gids
is1 = self.cells["edges"][adj_cells[:, 0]][:, 1] == edge_gids
is2 = self.cells["edges"][adj_cells[:, 0]][:, 2] == edge_gids
assert numpy.all(
numpy.sum(numpy.column_stack([is0, is1, is2]), axis=1) == 1
)
#
self._interior_ce_ratios[interior_edge_ids[is0]] += self.ce_ratios[
0, adj_cells[is0, 0]
]
self._interior_ce_ratios[interior_edge_ids[is1]] += self.ce_ratios[
1, adj_cells[is1, 0]
]
self._interior_ce_ratios[interior_edge_ids[is2]] += self.ce_ratios[
2, adj_cells[is2, 0]
]
is0 = self.cells["edges"][adj_cells[:, 1]][:, 0] == edge_gids
is1 = self.cells["edges"][adj_cells[:, 1]][:, 1] == edge_gids
is2 = self.cells["edges"][adj_cells[:, 1]][:, 2] == edge_gids
assert numpy.all(
numpy.sum(numpy.column_stack([is0, is1, is2]), axis=1) == 1
)
#
self._interior_ce_ratios[interior_edge_ids[is0]] += self.ce_ratios[
0, adj_cells[is0, 1]
]
self._interior_ce_ratios[interior_edge_ids[is1]] += self.ce_ratios[
1, adj_cells[is1, 1]
]
self._interior_ce_ratios[interior_edge_ids[is2]] += self.ce_ratios[
2, adj_cells[is2, 1]
]
if self._signed_cell_areas is not None:
# One could make p contiguous by adding a copy(), but that's not
# really worth it here.
p = self.node_coords[self.cells["nodes"][cell_ids]].T
# <https://stackoverflow.com/q/50411583/353337>
self._signed_cell_areas[cell_ids] = (
+p[0][2] * (p[1][0] - p[1][1])
+ p[0][0] * (p[1][1] - p[1][2])
+ p[0][1] * (p[1][2] - p[1][0])
) / 2
# TODO update those values
self._cell_centroids = None
self._edge_lengths = None
self._cell_circumcenters = None
self._control_volumes = None
self._cell_partitions = None
self._cv_centroids = None
self._surface_areas = None
self.subdomains = {}
return | 0.000526 |
def repair_mongo(name, dbpath):
"""repair mongodb after usafe shutdown"""
log_file = os.path.join(dbpath, 'mongod.log')
cmd = [name, "--dbpath", dbpath, "--logpath", log_file, "--logappend",
"--repair"]
proc = subprocess.Popen(
cmd, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
timeout = 45
t_start = time.time()
while time.time() - t_start < timeout:
line = str(proc.stdout.readline())
logger.info("repair output: %s" % (line,))
return_code = proc.poll()
if return_code is not None:
if return_code:
raise Exception("mongod --repair failed with exit code %s, "
"check log file: %s" % (return_code, log_file))
# Success when poll() returns 0
return
time.sleep(1)
proc.terminate()
raise Exception("mongod --repair failed to exit after %s seconds, "
"check log file: %s" % (timeout, log_file)) | 0.000974 |
def _make_sync_method(name):
"""Helper to synthesize a synchronous method from an async method name.
Used by the @add_sync_methods class decorator below.
Args:
name: The name of the synchronous method.
Returns:
A method (with first argument 'self') that retrieves and calls
self.<name>, passing its own arguments, expects it to return a
Future, and then waits for and returns that Future's result.
"""
def sync_wrapper(self, *args, **kwds):
method = getattr(self, name)
future = method(*args, **kwds)
return future.get_result()
return sync_wrapper | 0.006734 |
def classpath(cls, targets, classpath_products, confs=('default',)):
"""Return the classpath as a list of paths covering all the passed targets.
:param targets: Targets to build an aggregated classpath for.
:param ClasspathProducts classpath_products: Product containing classpath elements.
:param confs: The list of confs for use by this classpath.
:returns: The classpath as a list of path elements.
:rtype: list of string
"""
classpath_iter = cls._classpath_iter(classpath_products.get_for_targets(targets), confs=confs)
return list(classpath_iter) | 0.005102 |
def refresh_listing():
"""Refreshes the list of programs attached to the perform module from
the path"""
for program in get_programs():
if re.match(r'^[a-zA-Z_][a-zA-Z_0-9]*$', program) is not None:
globals()[program] = partial(_run_program, program)
globals()["_"] = _underscore_run_program | 0.003049 |
def upload_file_to_s3(awsclient, bucket, key, filename):
"""Upload a file to AWS S3 bucket.
:param awsclient:
:param bucket:
:param key:
:param filename:
:return:
"""
client_s3 = awsclient.get_client('s3')
transfer = S3Transfer(client_s3)
# Upload /tmp/myfile to s3://bucket/key and print upload progress.
transfer.upload_file(filename, bucket, key)
response = client_s3.head_object(Bucket=bucket, Key=key)
etag = response.get('ETag')
version_id = response.get('VersionId', None)
return etag, version_id | 0.001773 |
def addBarcodesToIdentifier(read, UMI, cell):
'''extract the identifier from a read and append the UMI and
cell barcode before the first space'''
read_id = read.identifier.split(" ")
if cell == "":
read_id[0] = read_id[0] + "_" + UMI
else:
read_id[0] = read_id[0] + "_" + cell + "_" + UMI
identifier = " ".join(read_id)
return identifier | 0.002597 |
def _format_subtree(self, subtree):
"""Recursively format all subtrees."""
subtree['children'] = list(subtree['children'].values())
for child in subtree['children']:
self._format_subtree(child)
return subtree | 0.007937 |
def enumerate_tokens(sid=None, session_id=None, privs=None):
'''
Enumerate tokens from any existing processes that can be accessed.
Optionally filter by sid.
'''
for p in psutil.process_iter():
if p.pid == 0:
continue
try:
ph = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, 0, p.pid)
except win32api.error as exc:
if exc.winerror == 5:
log.debug("Unable to OpenProcess pid=%d name=%s", p.pid, p.name())
continue
raise exc
try:
access = (
win32security.TOKEN_DUPLICATE |
win32security.TOKEN_QUERY |
win32security.TOKEN_IMPERSONATE |
win32security.TOKEN_ASSIGN_PRIMARY
)
th = win32security.OpenProcessToken(ph, access)
except Exception as exc:
log.debug("OpenProcessToken failed pid=%d name=%s user%s", p.pid, p.name(), p.username())
continue
try:
process_sid = win32security.GetTokenInformation(th, win32security.TokenUser)[0]
except Exception as exc:
log.exception("GetTokenInformation pid=%d name=%s user%s", p.pid, p.name(), p.username())
continue
proc_sid = win32security.ConvertSidToStringSid(process_sid)
if sid and sid != proc_sid:
log.debug("Token for pid does not match user sid: %s", sid)
continue
if session_id and win32security.GetTokenInformation(th, win32security.TokenSessionId) != session_id:
continue
def has_priv(tok, priv):
luid = win32security.LookupPrivilegeValue(None, priv)
for priv_luid, flags in win32security.GetTokenInformation(tok, win32security.TokenPrivileges):
if priv_luid == luid:
return True
return False
if privs:
has_all = True
for name in privs:
if not has_priv(th, name):
has_all = False
if not has_all:
continue
yield dup_token(th) | 0.003277 |
def _abs32(ins):
""" Absolute value of top of the stack (32 bits in DEHL)
"""
output = _32bit_oper(ins.quad[2])
output.append('call __ABS32')
output.append('push de')
output.append('push hl')
REQUIRES.add('abs32.asm')
return output | 0.003802 |
def _request_toc_element(self, index):
"""Request information about a specific item in the TOC"""
logger.debug('Requesting index %d on port %d', index, self.port)
pk = CRTPPacket()
if self._useV2:
pk.set_header(self.port, TOC_CHANNEL)
pk.data = (CMD_TOC_ITEM_V2, index & 0x0ff, (index >> 8) & 0x0ff)
self.cf.send_packet(pk, expected_reply=(
CMD_TOC_ITEM_V2, index & 0x0ff, (index >> 8) & 0x0ff))
else:
pk.set_header(self.port, TOC_CHANNEL)
pk.data = (CMD_TOC_ELEMENT, index)
self.cf.send_packet(pk, expected_reply=(CMD_TOC_ELEMENT, index)) | 0.002999 |
def Nu_plate_Kumar(Re, Pr, chevron_angle, mu=None, mu_wall=None):
r'''Calculates Nusselt number for single-phase flow in a
**well-designed** Chevron-style plate heat exchanger according to [1]_.
The data is believed to have been developed by APV International Limited,
since acquired by SPX Corporation. This uses a curve fit of that data
published in [2]_.
.. math::
Nu = C_1 Re^m Pr^{0.33}\left(\frac{\mu}{\mu_{wall}}\right)^{0.17}
`C1` and `m` are coefficients looked up in a table, with varying ranges
of Re validity and chevron angle validity. See the source for their
exact values. The wall fluid property correction is included only if the
viscosity values are provided.
Parameters
----------
Re : float
Reynolds number with respect to the hydraulic diameter of the channels,
[-]
Pr : float
Prandtl number calculated with bulk fluid properties, [-]
chevron_angle : float
Angle of the plate corrugations with respect to the vertical axis
(the direction of flow if the plates were straight), between 0 and
90. Many plate exchangers use two alternating patterns; use their
average angle for that situation [degrees]
mu : float, optional
Viscosity of the fluid at the bulk (inlet and outlet average)
temperature, [Pa*s]
mu_wall : float, optional
Viscosity of fluid at wall temperature, [Pa*s]
Returns
-------
Nu : float
Nusselt number with respect to `Dh`, [-]
Notes
-----
Data on graph from Re=0.1 to Re=10000, with chevron angles 30 to 65 degrees.
See `PlateExchanger` for further clarification on the definitions.
It is believed the constants used in this correlation were curve-fit to
the actual graph in [1]_ by the author of [2]_ as there is no
As the coefficients change, there are numerous small discontinuities,
although the data on the graphs is continuous with sharp transitions
of the slope.
The author of [1]_ states clearly this correlation is "applicable only to
well designed Chevron PHEs".
Examples
--------
>>> Nu_plate_Kumar(Re=2000, Pr=0.7, chevron_angle=30)
47.757818892853955
With the wall-correction factor included:
>>> Nu_plate_Kumar(Re=2000, Pr=0.7, chevron_angle=30, mu=1E-3, mu_wall=8E-4)
49.604284135097544
References
----------
.. [1] Kumar, H. "The plate heat exchanger: construction and design." In
First U.K. National Conference on Heat Transfer: Held at the University
of Leeds, 3-5 July 1984, Institute of Chemical Engineering Symposium
Series, vol. 86, pp. 1275-1288. 1984.
.. [2] Ayub, Zahid H. "Plate Heat Exchanger Literature Survey and New Heat
Transfer and Pressure Drop Correlations for Refrigerant Evaporators."
Heat Transfer Engineering 24, no. 5 (September 1, 2003): 3-16.
doi:10.1080/01457630304056.
'''
# Uses the standard diameter as characteristic diameter
beta_list_len = len(Kumar_beta_list)
for i in range(beta_list_len):
if chevron_angle <= Kumar_beta_list[i]:
C1_options, m_options, Re_ranges = Kumar_C1s[i], Kumar_ms[i], Kumar_Nu_Res[i]
break
elif i == beta_list_len-1:
C1_options, m_options, Re_ranges = Kumar_C1s[-1], Kumar_ms[-1], Kumar_Nu_Res[-1]
Re_len = len(Re_ranges)
for j in range(Re_len):
if Re <= Re_ranges[j]:
C1, m = C1_options[j], m_options[j]
break
elif j == Re_len-1:
C1, m = C1_options[-1], m_options[-1]
Nu = C1*Re**m*Pr**0.33
if mu_wall is not None and mu is not None:
Nu *= (mu/mu_wall)**0.17
return Nu | 0.006787 |
def _mouse_pointer_moved(self, x, y):
'''GUI callback for mouse moved'''
self._namespace['MOUSEX'] = x
self._namespace['MOUSEY'] = y | 0.012821 |
def overlaps(self, canvas, exclude=[]):
"""
Returns True if sprite is touching any other sprite.
"""
try:
exclude = list(exclude)
except TypeError:
exclude = [exclude]
exclude.append(self)
for selfY, row in enumerate(self.image.image()):
for selfX, pixel in enumerate(row):
canvasPixelOn = canvas.testPixel(
(selfX + self.position[0], selfY + self.position[1]),
excludedSprites=exclude
)
if pixel and canvasPixelOn:
return True
return False | 0.003063 |
def log_stack(logger, level=logging.INFO, limit=None, frame=None):
"""
Display the current stack on ``logger``.
This function is designed to be used during emission of log messages, so it
won't call itself.
"""
if showing_stack.inside:
return
showing_stack.inside = True
try:
if frame is None:
frame = sys._getframe(1)
stack = "".join(traceback.format_stack(frame, limit))
for line in (l[2:] for l in stack.split("\n") if l.strip()):
logger.log(level, line)
finally:
showing_stack.inside = False | 0.003356 |
def get_parent_device(self):
"""Retreive parent device string id"""
if not self.parent_instance_id:
return ""
dev_buffer_type = winapi.c_tchar * MAX_DEVICE_ID_LEN
dev_buffer = dev_buffer_type()
try:
if winapi.CM_Get_Device_ID(self.parent_instance_id, byref(dev_buffer),
MAX_DEVICE_ID_LEN, 0) == 0: #success
return dev_buffer.value
return ""
finally:
del dev_buffer
del dev_buffer_type | 0.011091 |
def get_configuration_set_by_id(self, id):
'''Finds a configuration set in the component by its ID.
@param id The ID of the configuration set to search for.
@return The ConfigurationSet object for the set, or None if it was not
found.
'''
for cs in self.configuration_sets:
if cs.id == id:
return cs
return None | 0.005038 |
def get_or_create_permission(codename, name=camel_or_snake_to_title):
"""
Get a Permission object from a permission name.
@:param codename: permission code name
@:param name: human-readable permissions name (str) or callable that takes codename as
argument and returns str
"""
user_ct = ContentType.objects.get_for_model(get_user_model())
return Permission.objects.get_or_create(content_type=user_ct, codename=codename,
defaults={'name': name(codename) if callable(name) else name}) | 0.006993 |
def _validate(self, val):
"""
Checks that the value is numeric and that it is within the hard
bounds; if not, an exception is raised.
"""
if self.allow_None and val is None:
return
if not isinstance(val, dt_types) and not (self.allow_None and val is None):
raise ValueError("Date '%s' only takes datetime types."%self.name)
if self.step is not None and not isinstance(self.step, dt_types):
raise ValueError("Step parameter can only be None or a datetime type")
self._checkBounds(val) | 0.008532 |
def bootstrapping_dtrajs(dtrajs, lag, N_full, nbs=10000, active_set=None):
"""
Perform trajectory based re-sampling.
Parameters
----------
dtrajs : list of discrete trajectories
lag : int
lag time
N_full : int
Number of states in discrete trajectories.
nbs : int, optional
Number of bootstrapping samples
active_set : ndarray
Indices of active set, all count matrices will be restricted
to active set.
Returns
-------
smean : ndarray(N,)
mean values of singular values
sdev : ndarray(N,)
standard deviations of singular values
"""
# Get the number of simulations:
Q = len(dtrajs)
# Get the number of states in the active set:
if active_set is not None:
N = active_set.size
else:
N = N_full
# Build up a matrix of count matrices for each simulation. Size is Q*N^2:
traj_ind = []
state1 = []
state2 = []
q = 0
for traj in dtrajs:
traj_ind.append(q*np.ones(traj[:-lag].size))
state1.append(traj[:-lag])
state2.append(traj[lag:])
q += 1
traj_inds = np.concatenate(traj_ind)
pairs = N_full * np.concatenate(state1) + np.concatenate(state2)
data = np.ones(pairs.size)
Ct_traj = scipy.sparse.coo_matrix((data, (traj_inds, pairs)), shape=(Q, N_full*N_full))
Ct_traj = Ct_traj.tocsr()
# Perform re-sampling:
svals = np.zeros((nbs, N))
for s in range(nbs):
# Choose selection:
sel = np.random.choice(Q, Q, replace=True)
# Compute count matrix for selection:
Ct_sel = Ct_traj[sel, :].sum(axis=0)
Ct_sel = np.asarray(Ct_sel).reshape((N_full, N_full))
if active_set is not None:
from pyemma.util.linalg import submatrix
Ct_sel = submatrix(Ct_sel, active_set)
svals[s, :] = scl.svdvals(Ct_sel)
# Compute mean and uncertainties:
smean = np.mean(svals, axis=0)
sdev = np.std(svals, axis=0)
return smean, sdev | 0.000985 |
def to_mongo(self):
"""Translate projection to MongoDB query form.
:return: Dictionary to put into a MongoDB JSON query
:rtype: dict
"""
d = copy.copy(self._fields)
for k, v in self._slices.items():
d[k] = {'$slice': v}
return d | 0.006734 |
def apply_constraint(self,constraint,selectfrac_skip=False,
distribution_skip=False,overwrite=False):
"""Apply a constraint to the population
:param constraint:
Constraint to apply.
:type constraint:
:class:`Constraint`
:param selectfrac_skip: (optional)
If ``True``, then this constraint will not be considered
towards diminishing the
"""
#grab properties
constraints = self.constraints
my_selectfrac_skip = self.selectfrac_skip
my_distribution_skip = self.distribution_skip
if constraint.name in constraints and not overwrite:
logging.warning('constraint already applied: {}'.format(constraint.name))
return
constraints[constraint.name] = constraint
if selectfrac_skip:
my_selectfrac_skip.append(constraint.name)
if distribution_skip:
my_distribution_skip.append(constraint.name)
#forward-looking for EclipsePopulation
if hasattr(self, '_make_kde'):
self._make_kde()
self.constraints = constraints
self.selectfrac_skip = my_selectfrac_skip
self.distribution_skip = my_distribution_skip | 0.007092 |
def _is_array_integer(arr):
"""Returns True if an array contains integers (integer type or near-int
float values) and False otherwise.
>>> _is_array_integer(np.arange(10))
True
>>> _is_array_integer(np.arange(7.0, 20.0, 1.0))
True
>>> _is_array_integer(np.arange(0, 1, 0.1))
False
"""
return issubclass(arr.dtype.type, np.integer) or np.allclose(arr, np.round(arr)) | 0.004926 |
def update_option_by_id(cls, option_id, option, **kwargs):
"""Update Option
Update attributes of Option
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.update_option_by_id(option_id, option, async=True)
>>> result = thread.get()
:param async bool
:param str option_id: ID of option to update. (required)
:param Option option: Attributes of option to update. (required)
:return: Option
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._update_option_by_id_with_http_info(option_id, option, **kwargs)
else:
(data) = cls._update_option_by_id_with_http_info(option_id, option, **kwargs)
return data | 0.004154 |
def get(name, fallback='ssh'):
"""
Retrieve the matching backend class from a string. If no backend can be
matched, it raises an error.
>>> get('ssh')
<class 'remoto.backends.BaseConnection'>
>>> get()
<class 'remoto.backends.BaseConnection'>
>>> get('non-existent')
<class 'remoto.backends.BaseConnection'>
>>> get('non-existent', 'openshift')
<class 'remoto.backends.openshift.OpenshiftConnection'>
"""
mapping = {
'ssh': ssh.SshConnection,
'oc': openshift.OpenshiftConnection,
'openshift': openshift.OpenshiftConnection,
'kubernetes': kubernetes.KubernetesConnection,
'k8s': kubernetes.KubernetesConnection,
'local': local.LocalConnection,
'popen': local.LocalConnection,
'localhost': local.LocalConnection,
'docker': docker.DockerConnection,
'podman': podman.PodmanConnection,
}
if not name:
# fallsback to just plain local/ssh
name = 'ssh'
name = name.strip().lower()
connection_class = mapping.get(name)
if not connection_class:
logger.warning('no connection backend found for: "%s"' % name)
if fallback:
logger.info('falling back to "%s"' % fallback)
# this assumes that ``fallback`` is a valid mapping name
return mapping.get(fallback)
return connection_class | 0.000718 |
def get_supported_metrics_notification_hub(self, name, hub_name):
'''
Retrieves the list of supported metrics for this namespace and topic
name:
Name of the service bus namespace.
hub_name:
Name of the service bus notification hub in this namespace.
'''
response = self._perform_get(
self._get_get_supported_metrics_hub_path(name, hub_name),
None)
return _MinidomXmlToObject.convert_response_to_feeds(
response,
partial(
_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricProperties
)
) | 0.002915 |
def set_storage_container_metadata(kwargs=None, storage_conn=None, call=None):
'''
.. versionadded:: 2015.8.0
Set a storage container's metadata
CLI Example:
.. code-block:: bash
salt-cloud -f set_storage_container my-azure name=mycontainer \\
x_ms_meta_name_values='{"my_name": "my_value"}'
name:
Name of existing container.
meta_name_values:
A dict containing name, value for metadata.
Example: {'category':'test'}
lease_id:
If specified, set_storage_container_metadata only succeeds if the
container's lease is active and matches this ID.
'''
if call != 'function':
raise SaltCloudSystemExit(
'The create_storage_container function must be called with -f or --function.'
)
if kwargs is None:
kwargs = {}
if 'name' not in kwargs:
raise SaltCloudSystemExit('An storage container name must be specified as "name"')
x_ms_meta_name_values = salt.utils.yaml.safe_load(
kwargs.get('meta_name_values', '')
)
if not storage_conn:
storage_conn = get_storage_conn(conn_kwargs=kwargs)
try:
storage_conn.set_container_metadata(
container_name=kwargs['name'],
x_ms_meta_name_values=x_ms_meta_name_values,
x_ms_lease_id=kwargs.get('lease_id', None),
)
return {'Success': 'The storage container was successfully updated'}
except AzureConflictHttpError:
raise SaltCloudSystemExit('There was a conflict.') | 0.001928 |
def CMYK_to_CMY(cobj, *args, **kwargs):
"""
Converts CMYK to CMY.
NOTE: CMYK and CMY values range from 0.0 to 1.0
"""
cmy_c = cobj.cmyk_c * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
cmy_m = cobj.cmyk_m * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
cmy_y = cobj.cmyk_y * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
return CMYColor(cmy_c, cmy_m, cmy_y) | 0.002809 |
def scopes_as(self, new_scopes):
"""Replace my :attr:`scopes` for the duration of the with block.
My global scope is not replaced.
Args:
new_scopes (list of dict-likes): The new :attr:`scopes` to use.
"""
old_scopes, self.scopes = self.scopes, new_scopes
yield
self.scopes = old_scopes | 0.005634 |
def create_widget(self):
""" Create the underlying widget.
"""
d = self.declaration
self.widget = RelativeLayout(self.get_context(), None, d.style) | 0.011111 |
def get_all_supported_types_for_ext(self, ext_to_match: str, strict_type_matching: bool = False) -> Set[Type]:
"""
Utility method to return the set of all supported types that may be parsed from files with the given extension.
ext=JOKER is a joker that means all extensions
:param ext_to_match:
:param strict_type_matching:
:return:
"""
matching = self.find_all_matching_parsers(required_ext=ext_to_match, strict=strict_type_matching)[0]
return {typ for types in [p.supported_types for p in (matching[0] + matching[1] + matching[2])]
for typ in types} | 0.009375 |
def compile_compiler_bridge(self, context):
"""Compile the compiler bridge to be used by zinc, using our scala bootstrapper.
It will compile and cache the jar, and materialize it if not already there.
:param context: The context of the task trying to compile the bridge.
This is mostly needed to use its scheduler to create digests of the relevant jars.
:return: The absolute path to the compiled scala-compiler-bridge jar.
"""
bridge_jar_name = 'scala-compiler-bridge.jar'
bridge_jar = os.path.join(self._compiler_bridge_cache_dir, bridge_jar_name)
global_bridge_cache_dir = os.path.join(self._zinc_factory.get_options().pants_bootstrapdir, fast_relpath(self._compiler_bridge_cache_dir, self._workdir()))
globally_cached_bridge_jar = os.path.join(global_bridge_cache_dir, bridge_jar_name)
# Workaround to avoid recompiling the bridge for every integration test
# We check the bootstrapdir (.cache) for the bridge.
# If it exists, we make a copy to the buildroot.
#
# TODO Remove when action caches are implemented.
if os.path.exists(globally_cached_bridge_jar):
# Cache the bridge jar under buildroot, to allow snapshotting
safe_mkdir(self._relative_to_buildroot(self._compiler_bridge_cache_dir))
safe_hardlink_or_copy(globally_cached_bridge_jar, bridge_jar)
if not os.path.exists(bridge_jar):
res = self._run_bootstrapper(bridge_jar, context)
context._scheduler.materialize_directories((
DirectoryToMaterialize(get_buildroot(), res.output_directory_digest),
))
# For the workaround above to work, we need to store a copy of the bridge in
# the bootstrapdir cache (.cache).
safe_mkdir(global_bridge_cache_dir)
safe_hardlink_or_copy(bridge_jar, globally_cached_bridge_jar)
return ClasspathEntry(bridge_jar, res.output_directory_digest)
else:
bridge_jar_snapshot = context._scheduler.capture_snapshots((PathGlobsAndRoot(
PathGlobs((self._relative_to_buildroot(bridge_jar),)),
text_type(get_buildroot())
),))[0]
bridge_jar_digest = bridge_jar_snapshot.directory_digest
return ClasspathEntry(bridge_jar, bridge_jar_digest) | 0.008985 |
def support_autoupload_param_password(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras")
autoupload_param = ET.SubElement(support, "autoupload-param")
password = ET.SubElement(autoupload_param, "password")
password.text = kwargs.pop('password')
callback = kwargs.pop('callback', self._callback)
return callback(config) | 0.006012 |
def wrap_form_params(func):
"""
A middleware that parses the url-encoded body and attach
the result to the request `form_params` attribute.
This middleware also merges the parsed value with the existing
`params` attribute in same way as `wrap_query_params` is doing.
"""
@functools.wraps(func)
def wrapper(request, *args, **kwargs):
ctype, pdict = parse_header(request.headers.get('Content-Type', ''))
if ctype == "application/x-www-form-urlencoded":
params = {}
for key, value in parse_qs(request.body.decode("utf-8")).items():
if len(value) == 1:
params[key] = value[0]
else:
params[key] = value
request.params = merge_dicts(getattr(request, "params", None), params)
request.form_params = params
return func(request, *args, **kwargs)
return wrapper | 0.002144 |
def _copy_flaky_attributes(cls, test, test_class):
"""
Copy flaky attributes from the test callable or class to the test.
:param test:
The test that is being prepared to run
:type test:
:class:`nose.case.Test`
"""
test_callable = cls._get_test_callable(test)
if test_callable is None:
return
for attr, value in cls._get_flaky_attributes(test_class).items():
already_set = hasattr(test, attr)
if already_set:
continue
attr_on_callable = getattr(test_callable, attr, None)
if attr_on_callable is not None:
cls._set_flaky_attribute(test, attr, attr_on_callable)
elif value is not None:
cls._set_flaky_attribute(test, attr, value) | 0.002395 |
def download_signed_document(self, signature_id, document_id):
"""
Get the audit trail of concrete document
@signature_id: Id of signature
@document_id: Id of document
"""
connection = Connection(self.token)
connection.set_url(self.production, self.SIGNS_DOCUMENTS_SIGNED_URL % (signature_id, document_id))
response, headers = connection.file_request()
if headers['content-type'] == 'application/json':
return response
return response | 0.00566 |
def transform(self, fn, dtype=None, *args, **kwargs):
"""Equivalent to map, compatibility purpose only.
Column parameter ignored.
"""
rdd = self._rdd.map(fn)
if dtype is None:
return self.__class__(rdd, noblock=True, **self.get_params())
if dtype is np.ndarray:
return ArrayRDD(rdd, bsize=self.bsize, noblock=True)
elif dtype is sp.spmatrix:
return SparseRDD(rdd, bsize=self.bsize, noblock=True)
else:
return BlockRDD(rdd, bsize=self.bsize, dtype=dtype, noblock=True) | 0.003448 |
def _read_optimized_geometry(self):
"""
Parses optimized XYZ coordinates. If not present, parses optimized Z-matrix.
"""
header_pattern = r"\*+\s+OPTIMIZATION\s+CONVERGED\s+\*+\s+\*+\s+Coordinates \(Angstroms\)\s+ATOM\s+X\s+Y\s+Z"
table_pattern = r"\s+\d+\s+\w+\s+([\d\-\.]+)\s+([\d\-\.]+)\s+([\d\-\.]+)"
footer_pattern = r"\s+Z-matrix Print:"
parsed_optimized_geometry = read_table_pattern(
self.text, header_pattern, table_pattern, footer_pattern)
if parsed_optimized_geometry == [] or None:
self.data["optimized_geometry"] = None
header_pattern = r"^\s+\*+\s+OPTIMIZATION CONVERGED\s+\*+\s+\*+\s+Z-matrix\s+Print:\s+\$molecule\s+[\d\-]+\s+[\d\-]+\n"
table_pattern = r"\s*(\w+)(?:\s+(\d+)\s+([\d\-\.]+)(?:\s+(\d+)\s+([\d\-\.]+)(?:\s+(\d+)\s+([\d\-\.]+))*)*)*(?:\s+0)*"
footer_pattern = r"^\$end\n"
self.data["optimized_zmat"] = read_table_pattern(
self.text, header_pattern, table_pattern, footer_pattern)
else:
self.data["optimized_geometry"] = process_parsed_coords(
parsed_optimized_geometry[0])
if self.data.get('charge') != None:
self.data["molecule_from_optimized_geometry"] = Molecule(
species=self.data.get('species'),
coords=self.data.get('optimized_geometry'),
charge=self.data.get('charge'),
spin_multiplicity=self.data.get('multiplicity')) | 0.005158 |
def get_guardian_enrollments(self, user_id):
"""Retrieves all Guardian enrollments.
Args:
user_id (str): The user_id of the user to retrieve
See: https://auth0.com/docs/api/management/v2#!/Users/get_enrollments
"""
url = self._url('{}/enrollments'.format(user_id))
return self.client.get(url) | 0.005634 |
def search(self):
"redirect to bookmark search"
form = forms.HomeForm()
bbm_filter = bs_filters.BookmarkBukuFilter(
all_keywords=False, deep=form.deep.data, regex=form.regex.data)
op_text = bbm_filter.operation()
values_combi = sorted(itertools.product([True, False], repeat=3))
for idx, (all_keywords, deep, regex) in enumerate(values_combi):
if deep == form.deep.data and regex == form.regex.data and not all_keywords:
choosen_idx = idx
url_op_text = op_text.replace(', ', '_').replace(' ', ' ').replace(' ', '_')
key = ''.join(['flt', str(choosen_idx), '_buku_', url_op_text])
kwargs = {key: form.keyword.data}
url = url_for('bookmark.index_view', **kwargs)
return redirect(url) | 0.004938 |
def set_bot(self, bot):
''' Bot must be set before running '''
self.bot = bot
self.sink.set_bot(bot) | 0.016129 |
def add(addon, dev, interactive):
"""Add a dependency.
Examples:
$ django add dynamic-rest==1.5.0
+ dynamic-rest == 1.5.0
"""
application = get_current_application()
application.add(
addon,
dev=dev,
interactive=interactive
) | 0.003534 |
def countNeighbours(self, cell):
"""
Return the number active neighbours within one positions away from cell
"""
count = 0
y, x = cell
y = y % self.y_grid
x = x % self.x_grid
y1 = (y - 1) % self.y_grid
y2 = (y + 1) % self.y_grid
x1 = (x - 1) % self.x_grid
x2 = (x + 1) % self.x_grid
cell = y, x
for neighbour in product([y1, y, y2], [x1, x, x2]):
if neighbour != cell and self.grid.get(neighbour):
count += 1
return count | 0.003559 |
def replace_some(ol,value,*indexes,**kwargs):
'''
from elist.elist import *
ol = [1,'a',3,'a',5,'a',6,'a']
id(ol)
new = replace_some(ol,'AAA',1,3,7)
ol
new
id(ol)
id(new)
####
ol = [1,'a',3,'a',5,'a',6,'a']
id(ol)
rslt = replace_some(ol,'AAA',1,3,7,mode="original")
ol
rslt
id(ol)
id(rslt)
'''
if('mode' in kwargs):
mode = kwargs["mode"]
else:
mode = "new"
indexes = list(indexes)
return(replace_seqs(ol,value,indexes,mode=mode)) | 0.011725 |
def mccormick(theta):
"""McCormick function"""
x, y = theta
obj = np.sin(x + y) + (x - y)**2 - 1.5 * x + 2.5 * y + 1
grad = np.array([np.cos(x + y) + 2 * (x - y) - 1.5,
np.cos(x + y) - 2 * (x - y) + 2.5])
return obj, grad | 0.003817 |
def refresh(self, include_fields=None, exclude_fields=None,
extra_fields=None):
"""
Refresh the bug with the latest data from bugzilla
"""
# pylint: disable=protected-access
r = self.bugzilla._getbug(self.bug_id,
include_fields=include_fields, exclude_fields=exclude_fields,
extra_fields=self._bug_fields + (extra_fields or []))
# pylint: enable=protected-access
self._update_dict(r) | 0.010616 |
def is_file_consistent(local_path_file, md5_hash):
"""Check if file is there and if the md5_hash is correct."""
return os.path.isfile(local_path_file) and \
hashlib.md5(open(local_path_file, 'rb').read()).hexdigest() == md5_hash | 0.004098 |
def trace(self, *attributes):
"""
Function decorator that traces functions
NOTE: Must be placed after the @app.route decorator
@param attributes any number of flask.Request attributes
(strings) to be set as tags on the created span
"""
def decorator(f):
def wrapper(*args, **kwargs):
if self._trace_all_requests:
return f(*args, **kwargs)
self._before_request_fn(list(attributes))
try:
r = f(*args, **kwargs)
self._after_request_fn()
except Exception as e:
self._after_request_fn(error=e)
raise
self._after_request_fn()
return r
wrapper.__name__ = f.__name__
return wrapper
return decorator | 0.002245 |
def ask_for_confirm_with_message(cls, ui, prompt='Do you agree?', message='', **options):
"""Returns True if user agrees, False otherwise"""
return cls.get_appropriate_helper(ui).ask_for_confirm_with_message(prompt, message) | 0.016667 |
def class_types(self):
"""list of class/class declaration types, extracted from the
operator arguments"""
if None is self.__class_types:
self.__class_types = []
for type_ in self.argument_types:
decl = None
type_ = type_traits.remove_reference(type_)
if type_traits_classes.is_class(type_):
decl = type_traits_classes.class_traits.get_declaration(
type_)
elif type_traits_classes.is_class_declaration(type_):
tt = type_traits_classes.class_declaration_traits
decl = tt.get_declaration(type_)
else:
pass
if decl:
self.__class_types.append(decl)
return self.__class_types | 0.002364 |
def get_requirement_from_url(url):
"""Get a requirement from the URL, if possible. This looks for #egg
in the URL"""
link = Link(url)
egg_info = link.egg_fragment
if not egg_info:
egg_info = splitext(link.filename)[0]
return package_to_requirement(egg_info) | 0.003448 |
def run(ident):
'''Launch or resume an harvesting for a given source if none is running'''
source = get_source(ident)
cls = backends.get(current_app, source.backend)
backend = cls(source)
backend.harvest() | 0.004444 |
def on_queue_declareok(self, method_frame):
"""Method invoked by pika when the Queue.Declare RPC call made in
setup_queue has completed. In this method we will bind the queue
and exchange together with the routing key by issuing the Queue.Bind
RPC command. When this command is complete, the on_bindok method will
be invoked by pika.
:param pika.frame.Method method_frame: The Queue.DeclareOk frame
"""
logger.info('Binding to rabbit', exchange=self._exchange, queue=self._queue)
self._channel.queue_bind(self.on_bindok, self._queue, self._exchange) | 0.004815 |
def nmget(o, key_path, def_val=None, path_delimiter='.', null_as_default=True):
'''
A short-hand for retrieving a value from nested mappings
("nested-mapping-get"). At each level it checks if the given "path"
component in the given key exists and return the default value whenever
fails.
Example:
>>> o = {'a':{'b':1}, 'x': None}
>>> nmget(o, 'a', 0)
{'b': 1}
>>> nmget(o, 'a.b', 0)
1
>>> nmget(o, 'a/b', 0, '/')
1
>>> nmget(o, 'a.c', 0)
0
>>> nmget(o, 'x', 0)
0
>>> nmget(o, 'x', 0, null_as_default=False)
None
'''
pieces = key_path.split(path_delimiter)
while pieces:
p = pieces.pop(0)
if o is None or p not in o:
return def_val
o = o[p]
if o is None and null_as_default:
return def_val
return o | 0.001192 |
def get_words(data):
"""
Extracts the words from given string.
Usage::
>>> get_words("Users are: John Doe, Jane Doe, Z6PO.")
[u'Users', u'are', u'John', u'Doe', u'Jane', u'Doe', u'Z6PO']
:param data: Data to extract words from.
:type data: unicode
:return: Words.
:rtype: list
"""
words = re.findall(r"\w+", data)
LOGGER.debug("> Words: '{0}'".format(", ".join(words)))
return words | 0.002242 |
def set_filters(self, filters=None):
"""Apply filtering to all messages received by this Bus.
All messages that match at least one filter are returned.
If `filters` is `None` or a zero length sequence, all
messages are matched.
Calling without passing any filters will reset the applied
filters to `None`.
:param filters:
A iterable of dictionaries each containing a "can_id",
a "can_mask", and an optional "extended" key.
>>> [{"can_id": 0x11, "can_mask": 0x21, "extended": False}]
A filter matches, when
``<received_can_id> & can_mask == can_id & can_mask``.
If ``extended`` is set as well, it only matches messages where
``<received_is_extended> == extended``. Else it matches every
messages based only on the arbitration ID and mask.
"""
self._filters = filters or None
self._apply_filters(self._filters) | 0.002022 |
def get_last_value_from_timeseries(timeseries):
"""Gets the most recent non-zero value for a .last metric or zero
for empty data."""
if not timeseries:
return 0
for metric, points in timeseries.items():
return next((p['y'] for p in reversed(points) if p['y'] > 0), 0) | 0.003344 |
def load_device(self, serial=None):
"""Creates an AndroidDevice for the given serial number.
If no serial is given, it will read from the ANDROID_SERIAL
environmental variable. If the environmental variable is not set, then
it will read from 'adb devices' if there is only one.
"""
serials = android_device.list_adb_devices()
if not serials:
raise Error('No adb device found!')
# No serial provided, try to pick up the device automatically.
if not serial:
env_serial = os.environ.get('ANDROID_SERIAL', None)
if env_serial is not None:
serial = env_serial
elif len(serials) == 1:
serial = serials[0]
else:
raise Error(
'Expected one phone, but %d found. Use the -s flag or '
'specify ANDROID_SERIAL.' % len(serials))
if serial not in serials:
raise Error('Device "%s" is not found by adb.' % serial)
ads = android_device.get_instances([serial])
assert len(ads) == 1
self._ad = ads[0] | 0.001744 |
def plot_clock_diagrams(self, colormap="summer"):
"""
Ploting clock diagrams - one or more rings around residue name and id (and chain id).
The rings show the fraction of simulation time this residue has spent in the vicinity of the
ligand - characterised by distance.
"""
cmap = plt.get_cmap(colormap)
for res in self.topology_data.dict_of_plotted_res:
colors = [cmap(i) for i in numpy.linspace(0, 1, len(self.topology_data.dict_of_plotted_res[res]))]
traj_colors_ = {traj:colors[i] for i,traj in enumerate(self.topology_data.dict_of_plotted_res[res])}
plt.figure(figsize=(2.25, 2.25))
ring_number=[sum(1 for x in v if x) for k,v in self.topology_data.dict_of_plotted_res.items()][0]
self.topology_data.ring_number = ring_number
rings=[]
# When only a few rings to plot they can be thicker
if ring_number<2:
width = 0.3
else:
width = 0.2
for ring in range(0,ring_number):
ring,_=plt.pie([self.topology_data.dict_of_plotted_res[res][ring],1-self.topology_data.dict_of_plotted_res[res][ring]], radius=0.9+width*(ring+1), startangle=90, colors=[colors[ring],"white"], counterclock=False)
rings=rings+ring
plt.setp(rings, width=width)
if len(self.topology_data.universe.protein.segments)<=1:
#Settings with domain
plt.text(-0.0,-0.62,res[0]+"\n"+res[1],ha='center',size=32, fontweight='bold')
else:
plt.text(-0.0,-0.72,res[0]+"\n"+res[1]+"\n"+res[2],ha='center',size=25, fontweight='bold')
pylab.savefig(res[1]+res[2]+".svg", dpi=300, transparent=True) | 0.018519 |
def debug_option(f):
"""
Configures --debug option for CLI
:param f: Callback Function to be passed to Click
"""
def callback(ctx, param, value):
state = ctx.ensure_object(Context)
state.debug = value
return value
return click.option('--debug',
expose_value=False,
is_flag=True,
envvar="SAM_DEBUG",
help='Turn on debug logging to print debug message generated by SAM CLI.',
callback=callback)(f) | 0.00354 |
def set_value(cls, group, key=None, value=None):
"""set|create/update"""
return cls.query.set_value(general_object_model_class=cls, group=group, key=key, value=value) | 0.016484 |
def get_unused_port(port=None):
"""Checks if port is already in use."""
if port is None or port < 1024 or port > 65535:
port = random.randint(1024, 65535)
assert(1024 <= port <= 65535)
while True:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(('', port)) # Try to open port
except socket.error as e:
if e.errno in (98, 10048): # 98, 10048 means address already bound
return get_unused_port(None)
raise e
s.close()
return port | 0.001776 |
def cancel_spot_requests(self, requests):
"""Cancel one or more EC2 spot instance requests.
:param requests: List of EC2 spot instance request IDs.
:type requests: list
"""
ec2_requests = self.retry_on_ec2_error(self.ec2.get_all_spot_instance_requests, request_ids=requests)
for req in ec2_requests:
req.cancel() | 0.008021 |
def examples():
''' Examples of how to use. Default are that some functions are commented out in order
to not cause harm to existing metadata within the database.
'''
sci = InterLexClient(
api_key = os.environ.get('INTERLEX_API_KEY'),
base_url = 'https://beta.scicrunch.org/api/1/', # NEVER CHANGE
)
entity = {
'label': 'brain115',
'type': 'fde', # broken at the moment NEEDS PDE HARDCODED
'definition': 'Part of the central nervous system',
'comment': 'Cannot live without it',
'superclass': {
'ilx_id': 'ilx_0108124', # ILX ID for Organ
},
'synonyms': [
{
'literal': 'Encephalon'
},
{
'literal': 'Cerebro'
},
],
'existing_ids': [
{
'iri': 'http://uri.neuinfo.org/nif/nifstd/birnlex_796',
'curie': 'BIRNLEX:796',
},
],
}
simple_entity = {
'label': entity['label'],
'type': entity['type'], # broken at the moment NEEDS PDE HARDCODED
'definition': entity['definition'],
'comment': entity['comment'],
'superclass': entity['superclass']['ilx_id'],
'synonyms': [syn['literal'] for syn in entity['synonyms']],
'predicates': {'tmp_0381624': 'http://example_dbxref'}
}
annotation = {
'term_ilx_id': 'ilx_0101431', # brain ILX ID
'annotation_type_ilx_id': 'tmp_0381624', # hasDbXref ILX ID
'annotation_value': 'PMID:12345',
}
relationship = {
'entity1_ilx': 'ilx_0101431', # brain
'relationship_ilx': 'ilx_0115023', # Related to
'entity2_ilx': 'ilx_0108124', #organ
}
update_entity_data = {
'ilx_id': 'ilx_0101431',
'label': 'Brain',
'definition': 'update_test!!',
'type': 'fde',
'comment': 'test comment',
'superclass': 'ilx_0108124',
'synonyms': ['test', 'test2', 'test2'],
}
# resp = sci.delete_annotation(**{
# 'term_ilx_id': 'ilx_0101431', # brain ILX ID
# 'annotation_type_ilx_id': 'ilx_0115071', # hasConstraint ILX ID
# 'annotation_value': 'test_12345',
# })
relationship = {
'entity1_ilx': 'http://uri.interlex.org/base/ilx_0100001', # (R)N6 chemical ILX ID
'relationship_ilx': 'http://uri.interlex.org/base/ilx_0112772', # Afferent projection ILX ID
'entity2_ilx': 'http://uri.interlex.org/base/ilx_0100000', #1,2-Dibromo chemical ILX ID
} | 0.008939 |
def get_form_kwargs(self):
"""
Pass template pack argument
"""
kwargs = super(FormContainersMixin, self).get_form_kwargs()
kwargs.update({
'pack': "foundation-{}".format(self.kwargs.get('foundation_version'))
})
return kwargs | 0.010239 |
def rehash(path, algo='sha256', blocksize=1 << 20):
"""Return (hash, length) for path using hashlib.new(algo)"""
h = hashlib.new(algo)
length = 0
with open(path, 'rb') as f:
block = f.read(blocksize)
while block:
length += len(block)
h.update(block)
block = f.read(blocksize)
digest = 'sha256=' + urlsafe_b64encode(
h.digest()
).decode('latin1').rstrip('=')
return (digest, length) | 0.002132 |
def create_record(self, rtype=None, name=None, content=None, **kwargs):
"""
Create record. If record already exists with the same content, do nothing.
"""
if not rtype and kwargs.get('type'):
warnings.warn('Parameter "type" is deprecated, use "rtype" instead.',
DeprecationWarning)
rtype = kwargs.get('type')
return self._create_record(rtype, name, content) | 0.008929 |
def get_token_from_authorization_code(self,
authorization_code, redirect_uri):
"""Like `get_token`, but using an OAuth 2 authorization code.
Use this method if you run a webserver that serves as an endpoint for
the redirect URI. The webserver can retrieve the authorization code
from the URL that is requested by ORCID.
Parameters
----------
:param redirect_uri: string
The redirect uri of the institution.
:param authorization_code: string
The authorization code.
Returns
-------
:returns: dict
All data of the access token. The access token itself is in the
``"access_token"`` key.
"""
token_dict = {
"client_id": self._key,
"client_secret": self._secret,
"grant_type": "authorization_code",
"code": authorization_code,
"redirect_uri": redirect_uri,
}
response = requests.post(self._token_url, data=token_dict,
headers={'Accept': 'application/json'},
timeout=self._timeout)
response.raise_for_status()
if self.do_store_raw_response:
self.raw_response = response
return json.loads(response.text) | 0.002187 |
def is_tensor_on_canonical_device(self, tensor_name):
"""Whether the tensor is on the first (canonical) device.
Tensors not assigned to a device are assumed to be on all devices, including
the canonical device.
Args:
tensor_name: a string, name of a tensor in the graph.
Returns:
a boolean indicating whether the tensor is on the first device.
"""
device = self.get_tensor_device(tensor_name)
return not device or device == self.canonical_device | 0.004065 |
def generate(bits, progress_func=None):
"""
Generate a new private RSA key. This factory function can be used to
generate a new host key or authentication key.
:param int bits: number of bits the generated key should be.
:param progress_func: Unused
:return: new `.RSAKey` private key
"""
key = rsa.generate_private_key(
public_exponent=65537, key_size=bits, backend=default_backend()
)
return RSAKey(key=key) | 0.003976 |
def main(cls, args=None):
"""
Fill in command-line arguments from argv
"""
if args is None:
args = sys.argv[1:]
try:
o = cls()
o.parseOptions(args)
except usage.UsageError as e:
print(o.getSynopsis())
print(o.getUsage())
print(str(e))
return 1
except CLIError as ce:
print(str(ce))
return ce.returnCode
return 0 | 0.004124 |
def ManualUpdateTables(self):
"""
Allow user to manually update the database tables.
User options from initial prompt are:
- 'ls' : print database contents
- 'a' : add an row to a database table
- 'd' : delete a single table row
- 'p' : delete an entire table (purge)
- 'f' : finish updates and continue
- 'x' : finish updates and exit
Selecting add, delete or purge will proceed to a further prompt where the
user can enter exactly what information should be added or deleted.
"""
goodlogging.Log.Info("DB", "Starting manual database update:\n")
updateFinished = False
# Loop until the user continues program flow or exits
while not updateFinished:
prompt = "Enter 'ls' to print the database contents, " \
"'a' to add a table entry, " \
"'d' to delete a single table row, " \
"'p' to select a entire table to purge, " \
"'f' to finish or " \
"'x' to exit: "
response = goodlogging.Log.Input("DM", prompt)
goodlogging.Log.NewLine()
goodlogging.Log.IncreaseIndent()
# Exit program
if response.lower() == 'x':
goodlogging.Log.Fatal("DB", "Program exited by user response")
# Finish updating database
elif response.lower() == 'f':
updateFinished = True
# Print database tables
elif response.lower() == 'ls':
self.PrintAllTables()
# Purge a given table
elif response.lower() == 'p':
response = goodlogging.Log.Input("DM", "Enter database table to purge or 'c' to cancel: ")
# Go back to main update selection
if response.lower() == 'c':
goodlogging.Log.Info("DB", "Database table purge cancelled")
# Purge table
else:
if response in self._tableDict.keys():
self._PrintDatabaseTable(response)
deleteConfirmation = goodlogging.Log.Input("DB", "***WARNING*** DELETE ALL ROWS FROM {0} TABLE? [y/n]: ".format(response))
deleteConfirmation = util.ValidUserResponse(deleteConfirmation, ('y', 'n'))
if deleteConfirmation.lower() == 'n':
goodlogging.Log.Info("DB", "Database table purge cancelled")
else:
self._PurgeTable(response)
goodlogging.Log.Info("DB", "{0} database table purged".format(response))
else:
goodlogging.Log.Info("DB", "Unknown table name ({0}) given to purge".format(response))
# Add new row to table
elif response.lower() == 'a':
addFinished = False
while not addFinished:
prompt = "Enter new database row (in format TABLE COL1=VAL COL2=VAL etc) " \
"or 'c' to cancel: "
response = goodlogging.Log.Input("DM", prompt)
# Go back to main update selection
if response.lower() == 'c':
goodlogging.Log.Info("DB", "Database table add cancelled")
addFinished = True
# Add row to table
else:
self._UpdateDatabaseFromResponse(response, 'ADD')
# Delete row(s) from table
elif response.lower() == 'd':
deleteFinished = False
while not deleteFinished:
prompt = "Enter database row to delete (in format TABLE COL1=VAL COL2=VAL etc) " \
"or 'c' to cancel: "
response = goodlogging.Log.Input("DM", prompt)
# Go back to main update selection
if response.lower() == 'c':
goodlogging.Log.Info("DB", "Database table row delete cancelled")
deleteFinished = True
# Delete row(s) from table
else:
self._UpdateDatabaseFromResponse(response, 'DEL')
# Unknown user input given
else:
goodlogging.Log.Info("DB", "Unknown response")
goodlogging.Log.DecreaseIndent()
goodlogging.Log.NewLine()
goodlogging.Log.Info("DB", "Manual database update complete.")
self.PrintAllTables() | 0.011311 |
def _iget(key, lookup_dict):
"""
Case-insensitive search for `key` within keys of `lookup_dict`.
"""
for k, v in lookup_dict.items():
if k.lower() == key.lower():
return v
return None | 0.004484 |
def encode_pin(self, pin, matrix=None):
"""Transform correct PIN according to the displayed matrix."""
if matrix is None:
_, matrix = self.read_pin()
return "".join([str(matrix.index(p) + 1) for p in pin]) | 0.008299 |
def report(self, linenumber, filename, severity, message, rulename, char):
"""Report a rule violation"""
if self._print_filename is not None:
# we print the filename only once. self._print_filename
# will get reset each time a new file is processed.
print("+ " + self._print_filename)
self._print_filename = None
if severity in (WARNING, ERROR):
self.counts[severity] += 1
else:
self.counts["other"] += 1
print(self.args.format.format(linenumber=linenumber, filename=filename,
severity=severity, message=message.encode('utf-8'),
rulename=rulename, char=char)) | 0.004 |
def rand_str(length, allowed=CHARSET_ALPHA_DIGITS):
"""Generate fixed-length random string from your allowed character pool.
:param length: total length of this string.
:param allowed: allowed charset.
Example::
>>> import string
>>> rand_str(32)
H6ExQPNLzb4Vp3YZtfpyzLNPFwdfnwz6
"""
res = list()
for _ in range(length):
res.append(random.choice(allowed))
return "".join(res) | 0.002262 |
def _set_overlay_service_policy(self, v, load=False):
"""
Setter method for overlay_service_policy, mapped from YANG variable /overlay_gateway/overlay_service_policy (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_overlay_service_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_overlay_service_policy() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=overlay_service_policy.overlay_service_policy, is_container='container', presence=False, yang_name="overlay-service-policy", rest_name="overlay-service-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Attach Overlay policy Map to overlay-gateway'}}, namespace='urn:brocade.com:mgmt:brocade-tunnels', defining_module='brocade-tunnels', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """overlay_service_policy must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=overlay_service_policy.overlay_service_policy, is_container='container', presence=False, yang_name="overlay-service-policy", rest_name="overlay-service-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Attach Overlay policy Map to overlay-gateway'}}, namespace='urn:brocade.com:mgmt:brocade-tunnels', defining_module='brocade-tunnels', yang_type='container', is_config=True)""",
})
self.__overlay_service_policy = t
if hasattr(self, '_set'):
self._set() | 0.005969 |
def _is_dir(self, f):
'''Check if the given in-dap file is a directory'''
return self._tar.getmember(f).type == tarfile.DIRTYPE | 0.013986 |
def get_jsapi_ticket(self):
"""
获取微信 JS-SDK ticket
该方法会通过 session 对象自动缓存管理 ticket
:return: ticket
"""
ticket_key = '{0}_jsapi_ticket'.format(self.appid)
expires_at_key = '{0}_jsapi_ticket_expires_at'.format(self.appid)
ticket = self.session.get(ticket_key)
expires_at = self.session.get(expires_at_key, 0)
if not ticket or expires_at < int(time.time()):
jsapi_ticket_response = self.get_ticket('jsapi')
ticket = jsapi_ticket_response['ticket']
expires_at = int(time.time()) + int(jsapi_ticket_response['expires_in'])
self.session.set(ticket_key, ticket)
self.session.set(expires_at_key, expires_at)
return ticket | 0.003937 |
def scanProcessForOpenFile(pid, searchPortion, isExactMatch=True, ignoreCase=False):
'''
scanProcessForOpenFile - Scans open FDs for a given pid to see if any are the provided searchPortion
@param searchPortion <str> - Filename to check
@param isExactMatch <bool> Default True - If match should be exact, otherwise a partial match is performed.
@param ignoreCase <bool> Default False - If True, search will be performed case-insensitively
@return - If result is found, the following dict is returned. If no match found on the given pid, or the pid is not found running, None is returned.
{
'searchPortion' : The search portion provided
'pid' : The passed pid (as an integer)
'owner' : String of process owner, or "unknown" if one could not be determined
'cmdline' : Commandline string
'fds' : List of file descriptors assigned to this file (could be mapped several times)
'filenames' : List of the filenames matched
}
'''
try:
try:
pid = int(pid)
except ValueError as e:
sys.stderr.write('Expected an integer, got %s for pid.\n' %(str(type(pid)),))
raise e
prefixDir = "/proc/%d/fd" % (pid,)
processFDs = os.listdir(prefixDir)
matchedFDs = []
matchedFilenames = []
if isExactMatch is True:
if ignoreCase is False:
isMatch = lambda searchFor, totalPath : bool(searchFor == totalPath)
else:
isMatch = lambda searchFor, totalPath : bool(searchFor.lower() == totalPath.lower())
else:
if ignoreCase is False:
isMatch = lambda searchFor, totalPath : bool(searchFor in totalPath)
else:
isMatch = lambda searchFor, totalPath : bool(searchFor.lower() in totalPath.lower())
for fd in processFDs:
fdPath = os.readlink(prefixDir + '/' + fd)
if isMatch(searchPortion, fdPath):
matchedFDs.append(fd)
matchedFilenames.append(fdPath)
if len(matchedFDs) == 0:
return None
cmdline = getProcessCommandLineStr(pid)
owner = getProcessOwnerStr(pid)
return {
'searchPortion' : searchPortion,
'pid' : pid,
'owner' : owner,
'cmdline' : cmdline,
'fds' : matchedFDs,
'filenames' : matchedFilenames,
}
except OSError:
return None
except IOError:
return None
except FileNotFoundError:
return None
except PermissionError:
return None | 0.012119 |
def _prune(dir_: str, epochs: int) -> None:
"""
Delete all training dirs with incomplete training artifacts or with less than specified epochs done.
:param dir_: dir with training log dirs
:param epochs: minimum number of finished epochs to keep the training logs
:return: number of log dirs pruned
"""
for logdir in [path.join(dir_, f) for f in listdir(dir_) if path.isdir(path.join(dir_, f))]:
if not is_train_dir(logdir):
_safe_rmtree(logdir)
else:
trace_path = path.join(logdir, CXF_TRACE_FILE)
try:
epochs_done = TrainingTrace.from_file(trace_path)[TrainingTraceKeys.EPOCHS_DONE]
except (KeyError, TypeError):
epochs_done = 0
if not epochs_done or epochs_done < epochs:
_safe_rmtree(logdir) | 0.004717 |
def press_pwr_btn(self):
"""Simulates a physical press of the server power button.
:raises: IloError, on an error from iLO.
"""
sushy_system = self._get_sushy_system(PROLIANT_SYSTEM_ID)
try:
sushy_system.push_power_button(sys_cons.PUSH_POWER_BUTTON_PRESS)
except sushy.exceptions.SushyError as e:
msg = (self._('The Redfish controller failed to press power button'
' of server. Error %(error)s') %
{'error': str(e)})
LOG.debug(msg)
raise exception.IloError(msg) | 0.003317 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.