text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def get_dotted(self, key):
"""
obj = qs.get_dotted('foo.bar.baz')
is equivelent to:
obj = qs.foo.bar.baz
"""
parts = key.split('.')
cobj = self
for attr in parts:
cobj = getattr(cobj, attr)
return cobj | 0.006515 |
def _configure_using_fluent_definition(self):
"""
Configure the console command using a fluent definition.
"""
definition = Parser.parse(self.signature)
self._config.set_name(definition["name"])
for name, flags, description, default in definition["arguments"]:
self._config.add_argument(name, flags, description, default)
for long_name, short_name, flags, description, default in definition["options"]:
self._config.add_option(long_name, short_name, flags, description, default) | 0.00713 |
def __read_byte_offset(decl, attrs):
"""Using duck typing to set the offset instead of in constructor"""
offset = attrs.get(XML_AN_OFFSET, 0)
# Make sure the size is in bytes instead of bits
decl.byte_offset = int(offset) / 8 | 0.007782 |
def print_summary(self, verbose=False, no_color=False):
'Prints a summary of the validation process so far.'
types = {0: 'Unknown',
1: 'Extension/Multi-Extension',
2: 'Full Theme',
3: 'Dictionary',
4: 'Language Pack',
5: 'Search Provider',
7: 'Subpackage',
8: 'App'}
detected_type = types[self.detected_type]
buffer = StringIO()
self.handler = OutputHandler(buffer, no_color)
# Make a neat little printout.
self.handler.write('\n<<GREEN>>Summary:') \
.write('-' * 30) \
.write('Detected type: <<BLUE>>%s' % detected_type) \
.write('-' * 30)
if self.failed():
self.handler.write('<<BLUE>>Test failed! Errors:')
# Print out all the errors/warnings:
for error in self.errors:
self._print_message('<<RED>>Error:<<NORMAL>>\t',
error, verbose)
for warning in self.warnings:
self._print_message('<<YELLOW>>Warning:<<NORMAL>> ',
warning, verbose)
else:
self.handler.write('<<GREEN>>All tests succeeded!')
if self.notices:
for notice in self.notices:
self._print_message(prefix='<<WHITE>>Notice:<<NORMAL>>\t',
message=notice,
verbose=verbose)
if 'is_jetpack' in self.metadata and verbose:
self.handler.write('\n')
self.handler.write('<<GREEN>>Jetpack add-on detected.<<NORMAL>>\n'
'Identified files:')
if 'jetpack_identified_files' in self.metadata:
for filename, data in \
self.metadata['jetpack_identified_files'].items():
self.handler.write((' %s\n' % filename) +
(' %s : %s' % data))
if 'jetpack_unknown_files' in self.metadata:
self.handler.write('Unknown files:')
for filename in self.metadata['jetpack_unknown_files']:
self.handler.write(' %s' % filename)
self.handler.write('\n')
if self.unfinished:
self.handler.write('<<RED>>Validation terminated early')
self.handler.write('Errors during validation are preventing '
'the validation process from completing.')
self.handler.write('Use the <<YELLOW>>--determined<<NORMAL>> '
'flag to ignore these errors.')
self.handler.write('\n')
return buffer.getvalue() | 0.000718 |
def _dictionary(self):
# type: () -> Dict[str, Any]
"""A dictionary representing the loaded configuration.
"""
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
# are not needed here.
retval = {}
for variant in self._override_order:
retval.update(self._config[variant])
return retval | 0.007732 |
def overlay_gateway_activate(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
overlay_gateway = ET.SubElement(config, "overlay-gateway", xmlns="urn:brocade.com:mgmt:brocade-tunnels")
name_key = ET.SubElement(overlay_gateway, "name")
name_key.text = kwargs.pop('name')
activate = ET.SubElement(overlay_gateway, "activate")
callback = kwargs.pop('callback', self._callback)
return callback(config) | 0.006085 |
def _encode_path(path_items):
"""Take an iterable of ``(path_operation, coordinates)`` tuples
in the same format as from :meth:`Context.copy_path`
and return a ``(path, data)`` tuple of cdata object.
The first cdata object is a ``cairo_path_t *`` pointer
that can be used as long as both objects live.
"""
points_per_type = PATH_POINTS_PER_TYPE
path_items = list(path_items)
length = 0
for path_type, coordinates in path_items:
num_points = points_per_type[path_type]
length += 1 + num_points # 1 header + N points
if len(coordinates) != 2 * num_points:
raise ValueError('Expected %d coordinates, got %d.' % (
2 * num_points, len(coordinates)))
data = ffi.new('cairo_path_data_t[]', length)
position = 0
for path_type, coordinates in path_items:
header = data[position].header
header.type = path_type
header.length = 1 + len(coordinates) // 2
position += 1
for i in range(0, len(coordinates), 2):
point = data[position].point
point.x = coordinates[i]
point.y = coordinates[i + 1]
position += 1
path = ffi.new(
'cairo_path_t *',
{'status': constants.STATUS_SUCCESS, 'data': data, 'num_data': length})
return path, data | 0.000748 |
def user_transactions(self, offset=0, limit=100, descending=True,
base=None, quote=None):
"""
Returns descending list of transactions. Every transaction (dictionary)
contains::
{u'usd': u'-39.25',
u'datetime': u'2013-03-26 18:49:13',
u'fee': u'0.20',
u'btc': u'0.50000000',
u'type': 2,
u'id': 213642}
Instead of the keys btc and usd, it can contain other currency codes
"""
data = {
'offset': offset,
'limit': limit,
'sort': 'desc' if descending else 'asc',
}
url = self._construct_url("user_transactions/", base, quote)
return self._post(url, data=data, return_json=True, version=2) | 0.003769 |
def publish(self, message_type=ON_SEND, client_id=None, client_storage=None,
*args, **kwargs):
"""
Publishes a message
"""
self.publisher.publish(
message_type, client_id, client_storage, *args, **kwargs) | 0.011364 |
def render(self, **kwargs):
"""Renders the HTML representation of the element."""
figure = self.get_root()
assert isinstance(figure, Figure), ('You cannot render this Element '
'if it is not in a Figure.')
header = self._template.module.__dict__.get('header', None)
if header is not None:
figure.header.add_child(Element(header(self, kwargs)),
name=self.get_name())
html = self._template.module.__dict__.get('html', None)
if html is not None:
figure.html.add_child(Element(html(self, kwargs)),
name=self.get_name())
script = self._template.module.__dict__.get('script', None)
if script is not None:
figure.script.add_child(Element(script(self, kwargs)),
name=self.get_name())
for name, element in self._children.items():
element.render(**kwargs) | 0.001947 |
def install_mathjax(tag='v1.1', replace=False):
"""Download and install MathJax for offline use.
This will install mathjax to the 'static' dir in the IPython notebook
package, so it will fail if the caller does not have write access
to that location.
MathJax is a ~15MB download, and ~150MB installed.
Parameters
----------
replace : bool [False]
Whether to remove and replace an existing install.
tag : str ['v1.1']
Which tag to download. Default is 'v1.1', the current stable release,
but alternatives include 'v1.1a' and 'master'.
"""
mathjax_url = "https://github.com/mathjax/MathJax/tarball/%s"%tag
nbdir = os.path.dirname(os.path.abspath(nbmod.__file__))
static = os.path.join(nbdir, 'static')
dest = os.path.join(static, 'mathjax')
# check for existence and permissions
if not os.access(static, os.W_OK):
raise IOError("Need have write access to %s"%static)
if os.path.exists(dest):
if replace:
if not os.access(dest, os.W_OK):
raise IOError("Need have write access to %s"%dest)
print "removing previous MathJax install"
shutil.rmtree(dest)
else:
print "offline MathJax apparently already installed"
return
# download mathjax
print "Downloading mathjax source..."
response = urllib2.urlopen(mathjax_url)
print "done"
# use 'r|gz' stream mode, because socket file-like objects can't seek:
tar = tarfile.open(fileobj=response.fp, mode='r|gz')
topdir = tar.firstmember.path
print "Extracting to %s"%dest
tar.extractall(static)
# it will be mathjax-MathJax-<sha>, rename to just mathjax
os.rename(os.path.join(static, topdir), dest) | 0.006645 |
def copyHiddenToContext(self):
"""
Uses key to identify the hidden layer associated with each
layer in the self.contextLayers dictionary.
"""
for item in list(self.contextLayers.items()):
if self.verbosity > 2: print('Hidden layer: ', self.getLayer(item[0]).activation)
if self.verbosity > 2: print('Context layer before copy: ', item[1].activation)
item[1].copyActivations(self.getLayer(item[0]).activation)
if self.verbosity > 2: print('Context layer after copy: ', item[1].activation) | 0.015625 |
def on_stop(self):
"""
stop subscriber
"""
LOGGER.debug("zeromq.Subscriber.on_stop")
self.running = False
while self.is_started:
time.sleep(0.1)
self.zmqsocket.close()
self.zmqcontext.destroy() | 0.007435 |
def gaussian(h, Xi, x):
"""
Gaussian Kernel for continuous variables
Parameters
----------
h : 1-D ndarray, shape (K,)
The bandwidths used to estimate the value of the kernel function.
Xi : 1-D ndarray, shape (K,)
The value of the training set.
x : 1-D ndarray, shape (K,)
The value at which the kernel density is being estimated.
Returns
-------
kernel_value : ndarray, shape (nobs, K)
The value of the kernel function at each training point for each var.
"""
return (1. / np.sqrt(2 * np.pi)) * np.exp(-(Xi - x)**2 / (h**2 * 2.)) | 0.001634 |
def graceful(cls):
""" A decorator to protect against message structure changes.
Many of our processors expect messages to be in a certain format. If the
format changes, they may start to fail and raise exceptions. This decorator
is in place to catch and log those exceptions and to gracefully return
default values.
"""
def _wrapper(f):
@functools.wraps(f)
def __wrapper(msg, **config):
try:
return f(msg, **config)
except KeyError:
log.exception("%r failed on %r" % (f, msg.get('msg_id')))
return cls()
return __wrapper
return _wrapper | 0.00299 |
def imagetransformer_b12l_4h_b128_h512_uncond_dr01_im():
"""TPU related imagenet model."""
hparams = imagetransformer_b12l_4h_b256_uncond_dr03_tpu()
update_hparams_for_tpu(hparams)
hparams.batch_size = 4
hparams.optimizer = "Adafactor"
hparams.learning_rate_schedule = "rsqrt_decay"
hparams.learning_rate_warmup_steps = 6000
hparams.layer_prepostprocess_dropout = 0.1
return hparams | 0.025 |
def service(
state, host,
*args, **kwargs
):
'''
Manage the state of services. This command checks for the presence of all the
init systems pyinfra can handle and executes the relevant operation. See init
system sepcific operation for arguments.
'''
if host.fact.which('systemctl'):
yield systemd(state, host, *args, **kwargs)
return
if host.fact.which('initctl'):
yield upstart(state, host, *args, **kwargs)
return
if host.fact.directory('/etc/init.d'):
yield d(state, host, *args, **kwargs)
return
if host.fact.directory('/etc/rc.d'):
yield rc(state, host, *args, **kwargs)
return
raise OperationError((
'No init system found '
'(no systemctl, initctl, /etc/init.d or /etc/rc.d found)'
)) | 0.003628 |
def add_dynamic_gateway(self, networks):
"""
A dynamic gateway object creates a router object that is
attached to a DHCP interface. You can associate networks with
this gateway address to identify networks for routing on this
interface.
::
route = engine.routing.get(0)
route.add_dynamic_gateway([Network('mynetwork')])
:param list Network: list of network elements to add to
this DHCP gateway
:raises ModificationAborted: Change must be made at the interface level
:raises UpdateElementFailed: failure to update routing table
:return: Status of whether the route table was updated
:rtype: bool
"""
routing_node_gateway = RoutingNodeGateway(dynamic_classid='gateway',
destinations=networks or [])
return self._add_gateway_node('dynamic_netlink', routing_node_gateway) | 0.006342 |
def free_symbols(self):
"""Set of free SymPy symbols contained within the expression."""
if self._free_symbols is None:
if len(self._vals) == 0:
self._free_symbols = self.operand.free_symbols
else:
dummy_map = {}
for sym in self._vals.keys():
dummy_map[sym] = sympy.Dummy()
# bound symbols may not be atomic, so we have to replace them
# with dummies
self._free_symbols = {
sym for sym
in self.operand.substitute(dummy_map).free_symbols
if not isinstance(sym, sympy.Dummy)}
for val in self._vals.values():
self._free_symbols.update(val.free_symbols)
return self._free_symbols | 0.002395 |
def as_object(obj):
"""Return a JSON serializable type for ``o``.
Args:
obj (:py:class:`object`): the object to be serialized.
Raises:
:py:class:`AttributeError`:
when ``o`` is not a Python object.
Returns:
(dict): JSON serializable type for the given object.
"""
LOGGER.debug('as_object(%s)', obj)
if isinstance(obj, datetime.date):
return as_date(obj)
elif hasattr(obj, '__dict__'):
# populate dict with visible attributes
out = {k: obj.__dict__[k] for k in obj.__dict__ if not k.startswith('_')}
# populate dict with property names and values
for k, v in (
(p, getattr(obj, p))
for p, _ in inspect.getmembers(
obj.__class__,
lambda x: isinstance(x, property))
):
out[k] = v
return out | 0.002215 |
def parse_arg(arg: str) -> typing.Tuple[str, typing.Any]:
"""
Parse CLI argument in format ``key=value`` to ``(key, value)``
:param arg: CLI argument string
:return: tuple (key, value)
:raise: yaml.ParserError: on yaml parse error
"""
assert '=' in arg, 'Unrecognized argument `{}`. [name]=[value] expected.'.format(arg)
key = arg[:arg.index('=')]
value = yaml.load(arg[arg.index('=') + 1:])
return key, value | 0.004425 |
def _update_keywords(self, **update_props):
""" Update operation for ISO type-specific Keywords metadata: Theme or Place """
tree_to_update = update_props['tree_to_update']
prop = update_props['prop']
values = update_props['values']
keywords = []
if prop in KEYWORD_PROPS:
xpath_root = self._data_map['_keywords_root']
xpath_map = self._data_structures[prop]
xtype = xpath_map['keyword_type']
xroot = xpath_map['keyword_root']
xpath = xpath_map['keyword']
ktype = KEYWORD_TYPES[prop]
# Remove descriptiveKeyword nodes according to type
for element in get_elements(tree_to_update, xpath_root):
if get_element_text(element, xtype).lower() == ktype.lower():
remove_element(tree_to_update, xpath_root)
element = insert_element(tree_to_update, 0, xpath_root)
insert_element(element, 0, xtype, ktype) # Add the type node
keywords.extend(update_property(element, xroot, xpath, prop, values))
return keywords | 0.00353 |
def read_node(self, name, **kwargs): # noqa: E501
"""read_node # noqa: E501
read the specified Node # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_node(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Node (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1Node
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_node_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.read_node_with_http_info(name, **kwargs) # noqa: E501
return data | 0.001711 |
def get_all_active(self):
"""
Get all of the active messages ordered by the active_datetime.
"""
now = timezone.now()
return self.select_related().filter(active_datetime__lte=now,
inactive_datetime__gte=now).order_by('active_datetime') | 0.012461 |
def send_keys(self, keyserver, *keyids):
"""Send keys to a keyserver."""
result = self._result_map['list'](self)
log.debug('send_keys: %r', keyids)
data = _util._make_binary_stream("", self._encoding)
args = ['--keyserver', keyserver, '--send-keys']
args.extend(keyids)
self._handle_io(args, data, result, binary=True)
log.debug('send_keys result: %r', result.__dict__)
data.close()
return result | 0.004202 |
def delete(self, list_id, webhook_id):
"""
Delete a specific webhook in a list.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
:param webhook_id: The unique id for the webhook.
:type webhook_id: :py:class:`str`
"""
self.list_id = list_id
self.webhook_id = webhook_id
return self._mc_client._delete(url=self._build_path(list_id, 'webhooks', webhook_id)) | 0.006508 |
def toCSV(pdl,out=None,write_field_names=True):
"""Conversion from the PyDbLite Base instance pdl to the file object out
open for writing in binary mode
If out is not specified, the field name is the same as the PyDbLite
file with extension .csv
If write_field_names is True, field names are written at the top
of the CSV file"""
import csv
if out is None:
file_name = os.path.splitext(pdl.name)[0]+".csv"
out = open(file_name,"wb")
fields = ["__id__","__version__"]+pdl.fields
writer = csv.DictWriter(out,fields)
# write field names
if write_field_names:
writer.writerow(dict([(k,k) for k in fields]))
# write values
writer.writerows(pdl())
return file_name | 0.010512 |
def web_agent(self, reactor, socks_endpoint, pool=None):
"""
:param socks_endpoint: create one with
:meth:`txtorcon.TorConfig.create_socks_endpoint`. Can be a
Deferred.
:param pool: passed on to the Agent (as ``pool=``)
"""
# local import because there isn't Agent stuff on some
# platforms we support, so this will only error if you try
# this on the wrong platform (pypy [??] and old-twisted)
from txtorcon import web
return web.tor_agent(
reactor,
socks_endpoint,
circuit=self,
pool=pool,
) | 0.003096 |
def gmres(A, b, x0=None, tol=1e-5, restrt=None, maxiter=None, xtype=None,
M=None, callback=None, residuals=None, orthog='householder',
**kwargs):
"""Generalized Minimum Residual Method (GMRES).
GMRES iteratively refines the initial solution guess to the
system Ax = b
Parameters
----------
A : array, matrix, sparse matrix, LinearOperator
n x n, linear system to solve
b : array, matrix
right hand side, shape is (n,) or (n,1)
x0 : array, matrix
initial guess, default is a vector of zeros
tol : float
relative convergence tolerance, i.e. tol is scaled by the norm
of the initial preconditioned residual
restrt : None, int
- if int, restrt is max number of inner iterations
and maxiter is the max number of outer iterations
- if None, do not restart GMRES, and max number of inner iterations
is maxiter
maxiter : None, int
- if restrt is None, maxiter is the max number of inner iterations
and GMRES does not restart
- if restrt is int, maxiter is the max number of outer iterations,
and restrt is the max number of inner iterations
xtype : type
dtype for the solution, default is automatic type detection
M : array, matrix, sparse matrix, LinearOperator
n x n, inverted preconditioner, i.e. solve M A x = M b.
callback : function
User-supplied function is called after each iteration as
callback( ||rk||_2 ), where rk is the current preconditioned residual
vector
residuals : list
residuals contains the preconditioned residual norm history,
including the initial residual.
orthog : string
'householder' calls _gmres_householder which uses Householder
reflections to find the orthogonal basis for the Krylov space.
'mgs' calls _gmres_mgs which uses modified Gram-Schmidt to find the
orthogonal basis for the Krylov space
Returns
-------
(xNew, info)
xNew : an updated guess to the solution of Ax = b
info : halting status of gmres
== =============================================
0 successful exit
>0 convergence to tolerance not achieved,
return iteration count instead. This value
is precisely the order of the Krylov space.
<0 numerical breakdown, or illegal input
== =============================================
Notes
-----
- The LinearOperator class is in scipy.sparse.linalg.interface.
Use this class if you prefer to define A or M as a mat-vec routine
as opposed to explicitly constructing the matrix. A.psolve(..) is
still supported as a legacy.
- The orthogonalization method, orthog='householder', is more robust
than orthog='mgs', however for the majority of problems your
problem will converge before 'mgs' loses orthogonality in your basis.
- orthog='householder' has been more rigorously tested, and is
therefore currently the default
Examples
--------
>>> from pyamg.krylov import gmres
>>> from pyamg.util.linalg import norm
>>> import numpy as np
>>> from pyamg.gallery import poisson
>>> A = poisson((10,10))
>>> b = np.ones((A.shape[0],))
>>> (x,flag) = gmres(A,b, maxiter=2, tol=1e-8)
>>> print norm(b - A*x)
6.5428213057
References
----------
.. [1] Yousef Saad, "Iterative Methods for Sparse Linear Systems,
Second Edition", SIAM, pp. 151-172, pp. 272-275, 2003
http://www-users.cs.umn.edu/~saad/books.html
"""
# pass along **kwargs
if orthog == 'householder':
(x, flag) = gmres_householder(A, b, x0=x0, tol=tol, restrt=restrt,
maxiter=maxiter, xtype=xtype, M=M,
callback=callback, residuals=residuals,
**kwargs)
elif orthog == 'mgs':
(x, flag) = gmres_mgs(A, b, x0=x0, tol=tol, restrt=restrt,
maxiter=maxiter, xtype=xtype, M=M,
callback=callback, residuals=residuals, **kwargs)
return (x, flag) | 0.000232 |
def sg_arg_def(**kwargs):
r"""Defines command line options
Args:
**kwargs:
key: A name for the option.
value : Default value or a tuple of (default value, description).
Returns:
None
For example,
```
# Either of the following two lines will define `--n_epoch` command line argument and set its default value as 1.
tf.sg_arg_def(n_epoch=1)
tf.sg_arg_def(n_epoch=(1, 'total number of epochs'))
```
"""
for k, v in kwargs.items():
if type(v) is tuple or type(v) is list:
v, c = v[0], v[1]
else:
c = k
if type(v) is str:
tf.app.flags.DEFINE_string(k, v, c)
elif type(v) is int:
tf.app.flags.DEFINE_integer(k, v, c)
elif type(v) is float:
tf.app.flags.DEFINE_float(k, v, c)
elif type(v) is bool:
tf.app.flags.DEFINE_bool(k, v, c) | 0.002172 |
def connect(self, fn):
"""SQLite connect method initialize db"""
self.conn = sqlite3.connect(fn)
cur = self.get_cursor()
cur.execute('PRAGMA page_size=4096')
cur.execute('PRAGMA FOREIGN_KEYS=ON')
cur.execute('PRAGMA cache_size=10000')
cur.execute('PRAGMA journal_mode=MEMORY') | 0.006024 |
def _ivy_jvm_options(self, repo):
"""Get the JVM options for ivy authentication, if needed."""
# Get authentication for the publish repo if needed.
if not repo.get('auth'):
# No need to copy here, as this list isn't modified by the caller.
return self._jvm_options
# Create a copy of the options, so that the modification is appropriately transient.
jvm_options = copy(self._jvm_options)
user = repo.get('username')
password = repo.get('password')
if user and password:
jvm_options.append('-Dlogin={}'.format(user))
jvm_options.append('-Dpassword={}'.format(password))
else:
raise TaskError('Unable to publish to {}. {}'
.format(repo.get('resolver'), repo.get('help', '')))
return jvm_options | 0.008917 |
def create_authorization(self, scopes=github.GithubObject.NotSet, note=github.GithubObject.NotSet, note_url=github.GithubObject.NotSet, client_id=github.GithubObject.NotSet, client_secret=github.GithubObject.NotSet, onetime_password=None):
"""
:calls: `POST /authorizations <http://developer.github.com/v3/oauth>`_
:param scopes: list of string
:param note: string
:param note_url: string
:param client_id: string
:param client_secret: string
:param onetime_password: string
:rtype: :class:`github.Authorization.Authorization`
"""
assert scopes is github.GithubObject.NotSet or all(isinstance(element, (str, unicode)) for element in scopes), scopes
assert note is github.GithubObject.NotSet or isinstance(note, (str, unicode)), note
assert note_url is github.GithubObject.NotSet or isinstance(note_url, (str, unicode)), note_url
assert client_id is github.GithubObject.NotSet or isinstance(client_id, (str, unicode)), client_id
assert client_secret is github.GithubObject.NotSet or isinstance(client_secret, (str, unicode)), client_secret
assert onetime_password is None or isinstance(onetime_password, (str, unicode)), onetime_password
post_parameters = dict()
if scopes is not github.GithubObject.NotSet:
post_parameters["scopes"] = scopes
if note is not github.GithubObject.NotSet:
post_parameters["note"] = note
if note_url is not github.GithubObject.NotSet:
post_parameters["note_url"] = note_url
if client_id is not github.GithubObject.NotSet:
post_parameters["client_id"] = client_id
if client_secret is not github.GithubObject.NotSet:
post_parameters["client_secret"] = client_secret
if onetime_password is not None:
request_header = {Consts.headerOTP: onetime_password} # pragma no cover (Should be covered)
else:
request_header = None
headers, data = self._requester.requestJsonAndCheck(
"POST",
"/authorizations",
input=post_parameters,
headers=request_header,
)
return github.Authorization.Authorization(self._requester, headers, data, completed=True) | 0.004758 |
def set_debug(self, debug=1):
"""
Set the debug level.
:type debug: int
:param debug: The debug level.
"""
self._check_if_ready()
self.debug = debug
self.main_loop.debug = debug | 0.00823 |
def cfmakeraw(tflags):
"""Given a list returned by :py:func:`termios.tcgetattr`, return a list
modified in a manner similar to the `cfmakeraw()` C library function, but
additionally disabling local echo."""
# BSD: https://github.com/freebsd/freebsd/blob/master/lib/libc/gen/termios.c#L162
# Linux: https://github.com/lattera/glibc/blob/master/termios/cfmakeraw.c#L20
iflag, oflag, cflag, lflag, ispeed, ospeed, cc = tflags
iflag &= ~flags('IMAXBEL IXOFF INPCK BRKINT PARMRK ISTRIP INLCR ICRNL IXON IGNPAR')
iflag &= ~flags('IGNBRK BRKINT PARMRK')
oflag &= ~flags('OPOST')
lflag &= ~flags('ECHO ECHOE ECHOK ECHONL ICANON ISIG IEXTEN NOFLSH TOSTOP PENDIN')
cflag &= ~flags('CSIZE PARENB')
cflag |= flags('CS8 CREAD')
return [iflag, oflag, cflag, lflag, ispeed, ospeed, cc] | 0.006083 |
def _proc_error(ifn: str, e: Exception) -> None:
""" Report an error
:param ifn: Input file name
:param e: Exception to report
"""
type_, value_, traceback_ = sys.exc_info()
traceback.print_tb(traceback_, file=sys.stderr)
print(file=sys.stderr)
print("***** ERROR: %s" % ifn, file=sys.stderr)
print(str(e), file=sys.stderr) | 0.005063 |
def get_hash(self):
"""Generate and return the dict index hash of the given queue item.
Note:
Cookies should not be included in the hash calculation because
otherwise requests are crawled multiple times with e.g. different
session keys, causing infinite crawling recursion.
Note:
At this moment the keys do not actually get hashed since it works perfectly without and
since hashing the keys requires us to built hash collision management.
Returns:
str: The hash of the given queue item.
"""
if self.__index_hash:
return self.__index_hash
key = self.request.method
key += URLHelper.get_protocol(self.request.url)
key += URLHelper.get_subdomain(self.request.url)
key += URLHelper.get_hostname(self.request.url)
key += URLHelper.get_tld(self.request.url)
key += URLHelper.get_path(self.request.url)
key += str(URLHelper.get_ordered_params(self.request.url))
if self.request.data is not None:
key += str(self.request.data.keys())
self.__index_hash = key
return self.__index_hash | 0.003311 |
def norm(self, coords: Vector3Like, frac_coords: bool = True) -> float:
"""
Compute the norm of vector(s).
Args:
coords:
Array-like object with the coordinates.
frac_coords:
Boolean stating whether the vector corresponds to fractional or
cartesian coordinates.
Returns:
one-dimensional `numpy` array.
"""
return np.sqrt(self.dot(coords, coords, frac_coords=frac_coords)) | 0.003968 |
def get(self, sid):
"""
Constructs a MessageContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.api.v2010.account.message.MessageContext
:rtype: twilio.rest.api.v2010.account.message.MessageContext
"""
return MessageContext(self._version, account_sid=self._solution['account_sid'], sid=sid, ) | 0.007752 |
def show(self, baseAppInstance):
"""Allows to show the widget as root window"""
self.from_dict_to_fields(self.configDict)
super(ProjectConfigurationDialog, self).show(baseAppInstance) | 0.009662 |
def get_or_create(self, qualifier, new_parameter, **kwargs):
"""
Get a :class:`Parameter` from the ParameterSet, if it does not exist,
create and attach it.
Note: running this on a ParameterSet that is NOT a
:class:`phoebe.frontend.bundle.Bundle`,
will NOT add the Parameter to the bundle, but only the temporary
ParameterSet
:parameter str qualifier: the qualifier of the :class:`Parameter`
(note, not the twig)
:parameter new_parameter: the parameter to attach if no
result is found
:type new_parameter: :class:`Parameter`
:parameter **kwargs: meta-tags to search - will also be applied to
new_parameter if it is attached.
:return: Parameter, created
:rtype: :class:`Parameter`, bool
:raises ValueError: if more than 1 result was found using the search
criteria.
"""
ps = self.filter_or_get(qualifier=qualifier, **kwargs)
if isinstance(ps, Parameter):
return ps, False
elif len(ps):
# TODO: custom exception?
raise ValueError("more than 1 result was found")
else:
self._attach_params(ParameterSet([new_parameter]), **kwargs)
logger.debug("creating and attaching new parameter: {}".format(new_parameter.qualifier))
return self.filter_or_get(qualifier=qualifier, **kwargs), True | 0.002046 |
def resolveWithMib(self, mibViewController):
"""Perform MIB variable ID conversion.
Parameters
----------
mibViewController : :py:class:`~pysnmp.smi.view.MibViewController`
class instance representing MIB browsing functionality.
Returns
-------
: :py:class:`~pysnmp.smi.rfc1902.ObjectIdentity`
reference to itself
Raises
------
SmiError
In case of fatal MIB hanling errora
Notes
-----
Calling this method might cause the following sequence of
events (exact details depends on many factors):
* ASN.1 MIB file downloaded and handed over to
:py:class:`~pysmi.compiler.MibCompiler` for conversion into
Python MIB module (based on pysnmp classes)
* Python MIB module is imported by SNMP engine, internal indices
created
* :py:class:`~pysnmp.smi.view.MibViewController` looks up the
rest of MIB identification information based on whatever information
is already available, :py:class:`~pysnmp.smi.rfc1902.ObjectIdentity`
class instance
gets updated and ready for further use.
Examples
--------
>>> objectIdentity = ObjectIdentity('SNMPv2-MIB', 'sysDescr')
>>> objectIdentity.resolveWithMib(mibViewController)
ObjectIdentity('SNMPv2-MIB', 'sysDescr')
>>>
"""
if self._mibSourcesToAdd is not None:
debug.logger & debug.FLAG_MIB and debug.logger(
'adding MIB sources %s' % ', '.join(self._mibSourcesToAdd))
mibViewController.mibBuilder.addMibSources(
*[ZipMibSource(x) for x in self._mibSourcesToAdd])
self._mibSourcesToAdd = None
if self._asn1SourcesToAdd is None:
addMibCompiler(
mibViewController.mibBuilder,
ifAvailable=True, ifNotAdded=True)
else:
debug.logger & debug.FLAG_MIB and debug.logger(
'adding MIB compiler with source paths '
'%s' % ', '.join(self._asn1SourcesToAdd))
addMibCompiler(
mibViewController.mibBuilder,
sources=self._asn1SourcesToAdd,
searchers=self._asn1SourcesOptions.get('searchers'),
borrowers=self._asn1SourcesOptions.get('borrowers'),
destination=self._asn1SourcesOptions.get('destination'),
ifAvailable=self._asn1SourcesOptions.get('ifAvailable'),
ifNotAdded=self._asn1SourcesOptions.get('ifNotAdded')
)
self._asn1SourcesToAdd = self._asn1SourcesOptions = None
if self._modNamesToLoad is not None:
debug.logger & debug.FLAG_MIB and debug.logger(
'loading MIB modules %s' % ', '.join(self._modNamesToLoad))
mibViewController.mibBuilder.loadModules(*self._modNamesToLoad)
self._modNamesToLoad = None
if self._state & self.ST_CLEAN:
return self
MibScalar, MibTableColumn = mibViewController.mibBuilder.importSymbols(
'SNMPv2-SMI', 'MibScalar', 'MibTableColumn')
self._indices = ()
if isinstance(self._args[0], ObjectIdentity):
self._args[0].resolveWithMib(mibViewController)
if len(self._args) == 1: # OID or label or MIB module
debug.logger & debug.FLAG_MIB and debug.logger(
'resolving %s as OID or label' % self._args)
try:
# pyasn1 ObjectIdentifier or sequence of ints or string OID
self._oid = rfc1902.ObjectName(self._args[0]) # OID
except PyAsn1Error:
# sequence of sub-OIDs and labels
if isinstance(self._args[0], (list, tuple)):
prefix, label, suffix = mibViewController.getNodeName(
self._args[0])
# string label
elif '.' in self._args[0]:
prefix, label, suffix = mibViewController.getNodeNameByOid(
tuple(self._args[0].split('.')))
# MIB module name
else:
modName = self._args[0]
mibViewController.mibBuilder.loadModules(modName)
if self._kwargs.get('last'):
(prefix, label,
suffix) = mibViewController.getLastNodeName(modName)
else:
(prefix, label,
suffix) = mibViewController.getFirstNodeName(modName)
if suffix:
try:
suffix = tuple(int(x) for x in suffix)
except ValueError:
raise SmiError(
'Unknown object name component %r' % (suffix,))
self._oid = rfc1902.ObjectName(prefix + suffix)
else:
prefix, label, suffix = mibViewController.getNodeNameByOid(
self._oid)
debug.logger & debug.FLAG_MIB and debug.logger(
'resolved %r into prefix %r and suffix '
'%r' % (self._args, prefix, suffix))
modName, symName, _ = mibViewController.getNodeLocation(prefix)
self._modName = modName
self._symName = symName
self._label = label
mibNode, = mibViewController.mibBuilder.importSymbols(
modName, symName)
self._mibNode = mibNode
debug.logger & debug.FLAG_MIB and debug.logger(
'resolved prefix %r into MIB node %r' % (prefix, mibNode))
if isinstance(mibNode, MibTableColumn): # table column
if suffix:
rowModName, rowSymName, _ = mibViewController.getNodeLocation(
mibNode.name[:-1]
)
rowNode, = mibViewController.mibBuilder.importSymbols(
rowModName, rowSymName
)
self._indices = rowNode.getIndicesFromInstId(suffix)
elif isinstance(mibNode, MibScalar): # scalar
if suffix:
self._indices = (rfc1902.ObjectName(suffix),)
else:
if suffix:
self._indices = (rfc1902.ObjectName(suffix),)
self._state |= self.ST_CLEAN
debug.logger & debug.FLAG_MIB and debug.logger(
'resolved indices are %r' % (self._indices,))
return self
elif len(self._args) > 1: # MIB, symbol[, index, index ...]
# MIB, symbol, index, index
if self._args[0] and self._args[1]:
self._modName = self._args[0]
self._symName = self._args[1]
# MIB, ''
elif self._args[0]:
mibViewController.mibBuilder.loadModules(self._args[0])
if self._kwargs.get('last'):
(prefix, label,
suffix) = mibViewController.getLastNodeName(self._args[0])
else:
(prefix, label,
suffix) = mibViewController.getFirstNodeName(self._args[0])
(self._modName,
self._symName, _) = mibViewController.getNodeLocation(prefix)
# '', symbol, index, index
else:
(prefix, label,
suffix) = mibViewController.getNodeName(self._args[1:])
(self._modName,
self._symName, _) = mibViewController.getNodeLocation(prefix)
mibNode, = mibViewController.mibBuilder.importSymbols(
self._modName, self._symName)
self._mibNode = mibNode
self._oid = rfc1902.ObjectName(mibNode.getName())
(prefix, label,
suffix) = mibViewController.getNodeNameByOid(
self._oid)
self._label = label
debug.logger & debug.FLAG_MIB and debug.logger(
'resolved %r into prefix %r and suffix '
'%r' % (self._args, prefix, suffix))
if isinstance(mibNode, MibTableColumn): # table
rowModName, rowSymName, _ = mibViewController.getNodeLocation(
mibNode.name[:-1])
rowNode, = mibViewController.mibBuilder.importSymbols(
rowModName, rowSymName)
if self._args[2:]:
try:
instIds = rowNode.getInstIdFromIndices(*self._args[2:])
self._oid += instIds
self._indices = rowNode.getIndicesFromInstId(instIds)
except PyAsn1Error as exc:
raise SmiError(
'Instance index %r to OID conversion failure '
'at object %r: '
'%s' % (self._args[2:], mibNode.getLabel(), exc))
elif self._args[2:]: # any other kind of MIB node with indices
if self._args[2:]:
instId = rfc1902.ObjectName(
'.'.join(str(x) for x in self._args[2:]))
self._oid += instId
self._indices = (instId,)
self._state |= self.ST_CLEAN
debug.logger & debug.FLAG_MIB and debug.logger(
'resolved indices are %r' % (self._indices,))
return self
else:
raise SmiError('Non-OID, label or MIB symbol') | 0.000412 |
def _main_loop(self):
'''
Continuous loop that reads from a kafka topic and tries to validate
incoming messages
'''
self.logger.debug("Processing messages")
old_time = 0
while True:
self._process_messages()
if self.settings['STATS_DUMP'] != 0:
new_time = int(old_div(time.time(), self.settings['STATS_DUMP']))
# only log every X seconds
if new_time != old_time:
self._dump_stats()
old_time = new_time
self._report_self()
time.sleep(self.settings['SLEEP_TIME']) | 0.004594 |
def readTuple(self, stream):
"""Read symbol from stream. Returns symbol, length.
"""
length, symbol = self.decodePeek(stream.peek(self.maxLength))
stream.pos += length
return length, symbol | 0.008734 |
def call(self, method, args={}, retry=False, retry_policy=None,
ticket=None, **props):
"""Send message to the same actor and return :class:`AsyncResult`."""
ticket = ticket or uuid()
reply_q = self.get_reply_queue(ticket)
self.cast(method, args, declare=[reply_q], reply_to=ticket, **props)
return self.AsyncResult(ticket, self) | 0.007874 |
def preserve_builtin_query_params(url, request=None):
"""
Given an incoming request, and an outgoing URL representation,
append the value of any built-in query parameters.
"""
if request is None:
return url
overrides = [
api_settings.URL_FORMAT_OVERRIDE,
]
for param in overrides:
if param and (param in request.GET):
value = request.GET[param]
url = replace_query_param(url, param, value)
return url | 0.002053 |
def two_lorentzian(freq, freq0_1, freq0_2, area1, area2, hwhm1, hwhm2, phase1,
phase2, offset, drift):
"""
A two-Lorentzian model.
This is simply the sum of two lorentzian functions in some part of the
spectrum. Each individual Lorentzian has its own peak frequency, area, hwhm
and phase, but they share common offset and drift parameters.
"""
return (lorentzian(freq, freq0_1, area1, hwhm1, phase1, offset, drift) +
lorentzian(freq, freq0_2, area2, hwhm2, phase2, offset, drift)) | 0.007463 |
def get_permission(context, method, *args, **kwargs):
""" This will return a boolean indicating if the considered permission is granted for the passed
user.
Usage::
{% get_permission 'can_access_moderation_panel' request.user as var %}
"""
request = context.get('request', None)
perm_handler = request.forum_permission_handler if request else PermissionHandler()
allowed_methods = inspect.getmembers(perm_handler, predicate=inspect.ismethod)
allowed_method_names = [a[0] for a in allowed_methods if not a[0].startswith('_')]
if method not in allowed_method_names:
raise template.TemplateSyntaxError(
'Only the following methods are allowed through '
'this templatetag: {}'.format(allowed_method_names))
perm_method = getattr(perm_handler, method)
return perm_method(*args, **kwargs) | 0.005708 |
def input(self, *args, **kwargs):
"""
Adapt the input and check for errors.
Returns a tuple of adapted (args, kwargs) or raises
AnticipateErrors
"""
errors = []
if args and self.arg_names:
args = list(args)
# Replace args inline that have adapters
for i, (key, val) in enumerate(izip(self.arg_names, args)):
try:
args[i] = self._adapt_param(key, val)
except AnticipateParamError as e:
errors.append(e)
args = tuple(args)
if kwargs and self.params:
# Adapt all adaptable arguments
for key, val in kwargs.items():
try:
kwargs[key] = self._adapt_param(key, val)
except AnticipateParamError as e:
errors.append(e)
if errors:
raise AnticipateErrors(
message='Invalid input for %s' % self.func,
errors=errors)
return args, kwargs | 0.001873 |
def find(max_depth=3):
"""Returns the path of a Pipfile in parent directories."""
i = 0
for c, d, f in walk_up(os.getcwd()):
i += 1
if i < max_depth:
if 'Pipfile':
p = os.path.join(c, 'Pipfile')
if os.path.isfile(p):
return p
raise RuntimeError('No Pipfile found!') | 0.004975 |
def _makewindows(self, indices, window):
"""
Make masks used by windowing functions
Given a list of indices specifying window centers,
and a window size, construct a list of index arrays,
one per window, that index into the target array
Parameters
----------
indices : array-like
List of times specifying window centers
window : int
Window size
"""
div = divmod(window, 2)
before = div[0]
after = div[0] + div[1]
index = asarray(self.index)
indices = asarray(indices)
if where(index == max(indices))[0][0] + after > len(index):
raise ValueError("Maximum requested index %g, with window %g, exceeds length %g"
% (max(indices), window, len(index)))
if where(index == min(indices))[0][0] - before < 0:
raise ValueError("Minimum requested index %g, with window %g, is less than 0"
% (min(indices), window))
masks = [arange(where(index == i)[0][0]-before, where(index == i)[0][0]+after, dtype='int') for i in indices]
return masks | 0.004216 |
def _do_names(names, fun, path=None):
'''
Invoke a function in the lxc module with no args
path
path to the container parent
default: /var/lib/lxc (system default)
.. versionadded:: 2015.8.0
'''
ret = {}
hosts = find_guests(names, path=path)
if not hosts:
return False
client = salt.client.get_local_client(__opts__['conf_file'])
for host, sub_names in six.iteritems(hosts):
cmds = []
for name in sub_names:
cmds.append(client.cmd_iter(
host,
'lxc.{0}'.format(fun),
[name],
kwarg={'path': path},
timeout=60))
for cmd in cmds:
data = next(cmd)
data = data.get(host, {}).get('ret', None)
if data:
ret.update({host: data})
return ret | 0.00112 |
def copy_ifcfg_file(source_interface, dest_interface):
"""Copies an existing ifcfg network script to another
:param source_interface: String (e.g. 1)
:param dest_interface: String (e.g. 0:0)
:return: None
:raises TypeError, OSError
"""
log = logging.getLogger(mod_logger + '.copy_ifcfg_file')
# Validate args
if not isinstance(source_interface, basestring):
msg = 'source_interface argument must be a string'
log.error(msg)
raise TypeError(msg)
if not isinstance(dest_interface, basestring):
msg = 'dest_interface argument must be a string'
log.error(msg)
raise TypeError(msg)
network_script = '/etc/sysconfig/network-scripts/ifcfg-eth'
source_file = network_script + source_interface
dest_file = network_script + dest_interface
command = ['cp', '-f', source_file, dest_file]
try:
result = run_command(command)
code = result['code']
except CommandError:
_, ex, trace = sys.exc_info()
msg = 'Unable to copy the ifcfg file from interface {s} to interface {d}\n{e}'.format(
s=source_interface, d=dest_interface, e=str(ex))
raise OSError, msg, trace
log.info('Copy command exited with code: {c}'.format(c=code))
if code != 0:
msg = 'There was a problem copying file {s} file to {d}'.format(s=source, d=dest_file)
log.error(msg)
raise OSError(msg)
# Updating the destination network script DEVICE property
try:
sed(file_path=dest_file, pattern='^DEVICE=.*',
replace_str='DEVICE="eth{i}"'.format(i=dest_interface))
except CommandError:
_, ex, trace = sys.exc_info()
msg = 'Unable to update DEVICE in file: {d}\n{e}'.format(
d=dest_file, e=str(ex))
log.error(msg)
raise CommandError, msg, trace
log.info('Successfully created file: {d}'.format(d=dest_file))
log.info('Restarting networking in 10 seconds to ensure the changes take effect...')
time.sleep(10)
retry_time = 10
max_retries = 10
for i in range(1, max_retries+2):
if i > max_retries:
msg = 'Unable to successfully start the networking service after {m} attempts'.format(m=max_retries)
log.error(msg)
raise OSError(msg)
log.info('Attempting to restart the networking service, attempt #{i} of {m}'.format(i=i, m=max_retries))
try:
service_network_restart()
except CommandError:
_, ex, trace = sys.exc_info()
log.warn('Attempted unsuccessfully to restart networking on attempt #{i} of {m}, trying again in {t} '
'seconds\n{e}'.format(i=i, m=max_retries, t=retry_time, e=str(ex)))
time.sleep(retry_time)
else:
log.info('Successfully restarted networking')
break
log.info('Successfully configured interface: {d}'.format(d=dest_interface)) | 0.002702 |
def script_current_send(self, seq, force_mavlink1=False):
'''
This message informs about the currently active SCRIPT.
seq : Active Sequence (uint16_t)
'''
return self.send(self.script_current_encode(seq), force_mavlink1=force_mavlink1) | 0.00885 |
def draw(self, **kwargs):
"""
Renders the rfecv curve.
"""
# Compute the curves
x = self.n_feature_subsets_
means = self.cv_scores_.mean(axis=1)
sigmas = self.cv_scores_.std(axis=1)
# Plot one standard deviation above and below the mean
self.ax.fill_between(x, means - sigmas, means+sigmas, alpha=0.25)
# Plot the curve
self.ax.plot(x, means, 'o-')
# Plot the maximum number of features
self.ax.axvline(
self.n_features_, c='k', ls='--',
label="n_features = {}\nscore = {:0.3f}".format(
self.n_features_, self.cv_scores_.mean(axis=1).max()
)
)
return self.ax | 0.004082 |
def _model(self, beta):
""" Creates the structure of the model (model matrices, etc)
Parameters
----------
beta : np.array
Contains untransformed starting values for the latent variables
Returns
----------
lambda : np.array
Contains the values for the conditional volatility series
Y : np.array
Contains the length-adjusted time series (accounting for lags)
scores : np.array
Contains the score terms for the time series
"""
Y = np.array(self.data[self.max_lag:self.data.shape[0]])
X = np.ones(Y.shape[0])
scores = np.zeros(Y.shape[0])
# Transform latent variables
parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])])
lmda = np.ones(Y.shape[0])*parm[0]
# Loop over time series
for t in range(0, Y.shape[0]):
if t < self.max_lag:
lmda[t] = parm[0]/(1-np.sum(parm[1:(self.p+1)]))
else:
# Loop over GARCH terms
for p_term in range(0, self.p):
lmda[t] += parm[1+p_term]*lmda[t-p_term-1]
# Loop over Score terms
for q_term in range(0, self.q):
lmda[t] += parm[1+self.p+q_term]*scores[t-q_term-1]
if self.leverage is True:
lmda[t] += parm[-3]*np.sign(-(Y[t-1]-parm[-1]))*(scores[t-1]+1)
scores[t] = (((parm[-2]+1.0)*np.power(Y[t]-parm[-1],2))/float(parm[-2]*np.exp(lmda[t]) + np.power(Y[t]-parm[-1],2))) - 1.0
return lmda, Y, scores | 0.007493 |
def load(self, filething):
"""load(filething)
Load file information from a filename.
Args:
filething (filething)
Raises:
mutagen.MutagenError
"""
fileobj = filething.fileobj
try:
self.info = self._Info(fileobj)
self.tags = self._Tags(fileobj, self.info)
self.info._post_tags(fileobj)
except (error, IOError) as e:
reraise(self._Error, e, sys.exc_info()[2])
except EOFError:
raise self._Error("no appropriate stream found") | 0.003436 |
def commit_config(self, message=""):
"""Implementation of NAPALM method commit_config."""
if message:
raise NotImplementedError(
"Commit message not implemented for this platform"
)
commands = [
"copy startup-config flash:rollback-0",
"configure session {}".format(self.config_session),
"commit",
"write memory",
]
self.device.run_commands(commands)
self.config_session = None | 0.003899 |
async def _run(self):
"""后台任务更新时间戳和重置序号"""
tick_gen = _task_idle_ticks(0.5*self._shard_ttl)
self._is_running = True
self._ready_event.clear()
while True:
try:
await self._lease_shard()
break
except grpc.RpcError as exc:
nap = _rand_uniform(3, 15)
logger.warn(f'failed in gRPC [{exc.code()}]: {exc.details()} '
f'. napping {nap:.0f} secs ...')
if await self._continueAfterSleep(nap):
continue
else:
return
assert self._shard_id is not None
try:
while self._is_running:
self._ready_event.clear()
try:
await self._renew_timestamp()
await self._keepalive_shard()
except grpc.RpcError as exc:
# exc.code()==grpc.StatusCode.UNAVAILABLE
nap = _rand_uniform(3, 15)
logger.warn(f'failed in grpc[{exc.code()}]: {exc.details()}'
f', napping {nap:.0f}secs ...')
if await self._continueAfterSleep(nap):
continue
else:
break
self._ready_event.set()
if await self._continueAfterSleep(next(tick_gen)):
continue
else:
break
except asyncio.CancelledError:
pass
except Exception:
logger.error(f'Error in shard#{self._shard_id}:', exc_info=True)
finally:
self._ready_event.clear()
await self._lease.revoke() # 取消租约
logger.debug(f'shard#{self._shard_id}, the lease revoked') | 0.004219 |
async def async_run_command(self, command, retry=False):
"""Run commands through an SSH connection.
Connect to the SSH server if not currently connected, otherwise
use the existing connection.
"""
if not self.is_connected:
await self.async_connect()
try:
result = await asyncio.wait_for(self._client.run(
"%s && %s" % (_PATH_EXPORT_COMMAND, command)), 9)
except asyncssh.misc.ChannelOpenError:
if not retry:
await self.async_connect()
return self.async_run_command(command, retry=True)
else:
self._connected = False
_LOGGER.error("No connection to host")
return []
except TimeoutError:
del self._client
self._connected = False
_LOGGER.error("Host timeout.")
return []
self._connected = True
return result.stdout.split('\n') | 0.002006 |
def is_response_correct(self, response):
"""returns True if response evaluates to an Item Answer that is 100 percent correct"""
for answer in self.my_osid_object.get_answers():
if self._is_match(response, answer):
return True
return False | 0.010345 |
def hierarchy_cycles(rdf, fix=False):
"""Check if the graph contains skos:broader cycles and optionally break these.
:param Graph rdf: An rdflib.graph.Graph object.
:param bool fix: Fix the problem by removing any skos:broader that overlaps
with skos:broaderTransitive.
"""
top_concepts = sorted(rdf.subject_objects(SKOS.hasTopConcept))
status = {}
for cs, root in top_concepts:
_hierarchy_cycles_visit(
rdf, root, None, fix, status=status)
# double check that all concepts were actually visited in the search,
# and visit remaining ones if necessary
recheck_top_concepts = False
for conc in sorted(rdf.subjects(RDF.type, SKOS.Concept)):
if conc not in status:
recheck_top_concepts = True
_hierarchy_cycles_visit(
rdf, conc, None, fix, status=status)
return recheck_top_concepts | 0.002212 |
def __create_log_props(cls, log_props, _getdict, _setdict): # @NoSelf
"""Creates all the logical property.
The list of names of properties to be created is passed
with frozenset log_props. The getter/setter information is
taken from _{get,set}dict.
This method resolves also wildcards in names, and performs
all checks to ensure correctness.
Returns the frozen set of the actually created properties
(as not log_props may be really created, e.g. when no
getter is provided, and a warning is issued).
"""
real_log_props = set()
resolved_getdict = {}
resolved_setdict = {}
for _dict_name, _dict, _resolved_dict in (
("getter", _getdict, resolved_getdict),
("setter", _setdict, resolved_setdict)):
# first resolve all wildcards
for pat, ai in ((pat, ai)
for pat, ai in _dict.items()
if frozenset(pat) & WILDCARDS):
matches = fnmatch.filter(log_props, pat)
for match in matches:
if match in _resolved_dict:
raise NameError("In class %s.%s %s property '%s' "
"is matched multiple times"
" by patterns" % \
(cls.__module__, cls.__name__, _dict_name, match))
_resolved_dict[match] = ai
if not matches:
logger.warning("In class %s.%s %s pattern '%s' "
"did not match any existing "
"logical property",
cls.__module__, cls.__name__, _dict_name, pat)
# now adds the exact matches (no wilcards) which override
# the pattern-matches
_resolved_dict.update((name, ai)
for name, ai in _dict.items()
if name in log_props)
# checks that all getter/setter have a corresponding logical
# property
not_found = [name for name in _resolved_dict
if name not in log_props]
if not_found:
logger.warning("In class %s.%s logical %s were declared for "
"non-existent observables: %s",
cls.__module__, cls.__name__, _dict_name,
str(not_found))
# creates the properties
for name in log_props:
# finds the getter
ai_get = resolved_getdict.get(name, None)
if ai_get:
# decorator-based
_getter = type(cls).get_getter(cls, name, ai_get.func,
ai_get.has_args)
_deps = ai_get.deps
else:
# old style
_getter = type(cls).get_getter(cls, name)
if _getter is None:
raise RuntimeError("In class %s.%s "
"logical observable '%s' "
"has no getter method" % \
(cls.__module__, cls.__name__, name))
_deps = type(cls)._get_old_style_getter_deps(cls, name,
_getter)
# finds the setter
ai_set = resolved_setdict.get(name, None)
if ai_set:
# decorator-based
if ai_get:
_setter = type(cls).get_setter(cls, name,
ai_set.func, ai_set.has_args,
ai_get.func, ai_get.has_args)
else:
# the getter is old style. _getter is already
# resolved wrt the name it may take, so
# getter_takes_name is False
_setter = type(cls).get_setter(cls, name,
ai_set.func, ai_set.has_args,
_getter, False)
else:
# old style setter
if ai_get:
_setter = type(cls).get_setter(cls, name,
None, None,
ai_get.func,
ai_get.has_args)
else:
_setter = type(cls).get_setter(cls, name)
# creates the logical property, here _setter can be None
prop = PropertyMeta.LogicalOP(_getter, _setter, frozenset(_deps))
setattr(cls, name, prop)
real_log_props.add(name)
# checks that all setters have a getter
setters_no_getters = (set(resolved_setdict) - real_log_props) & \
log_props
if setters_no_getters:
logger.warning("In class %s.%s logical setters have no "
"getters: %s",
cls.__module__, cls.__name__,
", ".join(setters_no_getters))
return frozenset(real_log_props) | 0.00239 |
def _param32(ins):
""" Pushes 32bit param into the stack
"""
output = _32bit_oper(ins.quad[1])
output.append('push de')
output.append('push hl')
return output | 0.005495 |
def clone(self):
"""
Do not initialize again since everything is ready to launch app.
:return: Initialized monitor instance
"""
return Monitor(org=self.org, app=self.app, env=self.env) | 0.008929 |
def DSP_callback_tic(self):
"""
Add new tic time to the DSP_tic list. Will not be called if
Tcapture = 0.
"""
if self.Tcapture > 0:
self.DSP_tic.append(time.time()-self.start_time) | 0.012097 |
def build_genome_alignment_from_file(ga_path, ref_spec, idx_path=None,
verbose=False):
"""
build a genome alignment by loading from a single MAF file.
:param ga_path: the path to the file to load.
:param ref_spec: which species in the MAF file is the reference?
:param idx_path: if provided, use this index to generate a just-in-time
genome alignment, instead of loading the file immediately.
"""
blocks = []
if (idx_path is not None):
bound_iter = functools.partial(genome_alignment_iterator,
reference_species=ref_spec)
hash_func = JustInTimeGenomeAlignmentBlock.build_hash
factory = IndexedFile(None, bound_iter, hash_func)
factory.read_index(idx_path, ga_path, verbose=verbose)
pind = None
for k in factory:
if verbose:
if pind is None:
total = len(factory)
pind = ProgressIndicator(totalToDo=total, messagePrefix="completed",
messageSuffix="building alignment blocks ")
pind.done += 1
pind.showProgress()
blocks.append(JustInTimeGenomeAlignmentBlock(factory, k))
else:
for b in genome_alignment_iterator(ga_path, ref_spec, verbose=verbose):
blocks.append(b)
return GenomeAlignment(blocks, verbose) | 0.008203 |
def sortshaw(s, datablock):
"""
sorts data block in to ARM1,ARM2 NRM,TRM,ARM1,ARM2=[],[],[],[]
stick first zero field stuff into first_Z
"""
for rec in datablock:
methcodes = rec["magic_method_codes"].split(":")
step = float(rec["treatment_ac_field"])
str = float(rec["measurement_magn_moment"])
if "LT-NO" in methcodes:
NRM.append([0, str])
if "LT-T-I" in methcodes:
TRM.append([0, str])
field = float(rec["treatment_dc_field"])
if "LT-AF-I" in methcodes:
ARM1.append([0, str])
if "LT-AF-I-2" in methcodes:
ARM2.append([0, str])
if "LT-AF-Z" in methcodes:
if "LP-ARM-AFD" in methcodes:
ARM1.append([step, str])
elif "LP-TRM-AFD" in methcodes:
TRM.append([step, str])
elif "LP-ARM2-AFD" in methcodes:
ARM2.append([step, str])
else:
NRM.append([step, str])
cont = 1
while cont == 1:
if len(NRM) != len(TRM):
print("Uneven NRM/TRM steps: ")
NRM, TRM, cont = cleanup(TRM, NRM)
else:
cont = 0
cont = 1
while cont == 1:
if len(ARM1) != len(ARM2):
print("Uneven ARM1/ARM2 steps: ")
ARM1, ARM2, cont = cleanup(ARM2, ARM1)
else:
cont = 0
#
# final check
#
if len(NRM) != len(TRM) or len(ARM1) != len(ARM2):
print(len(NRM), len(TRM), len(ARM1), len(ARM2))
print(" Something wrong with this specimen! Better fix it or delete it ")
input(" press return to acknowledge message")
# now do the ratio to "fix" NRM/TRM data
# a
TRM_ADJ = []
for kk in range(len(TRM)):
step = TRM[kk][0]
for k in range(len(ARM1)):
if ARM1[k][0] == step:
TRM_ADJ.append([step, TRM[kk][1] * ARM1[k][1] / ARM2[k][1]])
break
shawblock = (NRM, TRM, ARM1, ARM2, TRM_ADJ)
return shawblock, field | 0.000982 |
def calcELAxi(R,vR,vT,pot,vc=1.,ro=1.):
"""
NAME:
calcELAxi
PURPOSE:
calculate the energy and angular momentum
INPUT:
R - Galactocentric radius (/ro)
vR - radial part of the velocity (/vc)
vT - azimuthal part of the velocity (/vc)
vc - circular velocity
ro - reference radius
OUTPUT:
(E,L)
HISTORY:
2010-11-30 - Written - Bovy (NYU)
"""
return (potentialAxi(R,pot)+vR**2./2.+vT**2./2.,R*vT) | 0.017578 |
def get_identities(self, item):
""" Return the identities from an item """
# All identities are in the post stream
# The first post is the question. Next replies
posts = item['data']['post_stream']['posts']
for post in posts:
user = self.get_sh_identity(post)
yield user | 0.005935 |
def cleanPolyline(elem, options):
"""
Scour the polyline points attribute
"""
pts = parseListOfPoints(elem.getAttribute('points'))
elem.setAttribute('points', scourCoordinates(pts, options, True)) | 0.004566 |
def update(self, name, rssi):
"""Update the device name and/or RSSI.
During an ongoing scan, multiple records from the same device can be
received during the scan. Each time that happens this method is
called to update the :attr:`name` and/or :attr:`rssi` attributes.
"""
self.name = name
self.rssi = rssi
self._age = time.time() | 0.010101 |
def run_in_terminal(self, func, render_cli_done=False, cooked_mode=True):
"""
Run function on the terminal above the prompt.
What this does is first hiding the prompt, then running this callable
(which can safely output to the terminal), and then again rendering the
prompt which causes the output of this function to scroll above the
prompt.
:param func: The callable to execute.
:param render_cli_done: When True, render the interface in the
'Done' state first, then execute the function. If False,
erase the interface first.
:param cooked_mode: When True (the default), switch the input to
cooked mode while executing the function.
:returns: the result of `func`.
"""
# Draw interface in 'done' state, or erase.
if render_cli_done:
self._return_value = True
self._redraw()
self.renderer.reset() # Make sure to disable mouse mode, etc...
else:
self.renderer.erase()
self._return_value = None
# Run system command.
if cooked_mode:
with self.input.cooked_mode():
result = func()
else:
result = func()
# Redraw interface again.
self.renderer.reset()
self.renderer.request_absolute_cursor_position()
self._redraw()
return result | 0.001376 |
def normalize(self, **kwargs):
"""
Adjust the offsetvector so that a particular instrument has
the desired offset. All other instruments have their
offsets adjusted so that the relative offsets are
preserved. The instrument to noramlize, and the offset one
wishes it to have, are provided as a key-word argument.
The return value is the time slide dictionary, which is
modified in place.
If more than one key-word argument is provided the keys are
sorted and considered in order until a key is found that is
in the offset vector. The offset vector is normalized to
that value. This function is a no-op if no key-word
argument is found that applies.
Example:
>>> a = offsetvector({"H1": -10, "H2": -10, "L1": -10})
>>> a.normalize(L1 = 0)
offsetvector({'H2': 0, 'H1': 0, 'L1': 0})
>>> a = offsetvector({"H1": -10, "H2": -10})
>>> a.normalize(L1 = 0, H2 = 5)
offsetvector({'H2': 5, 'H1': 5})
"""
# FIXME: should it be performed in place? if it should
# be, the should there be no return value?
for key, offset in sorted(kwargs.items()):
if key in self:
delta = offset - self[key]
for key in self.keys():
self[key] += delta
break
return self | 0.026294 |
def create_snapshot(kwargs=None, call=None, wait_to_finish=False):
'''
Create a snapshot.
volume_id
The ID of the Volume from which to create a snapshot.
description
The optional description of the snapshot.
CLI Exampe:
.. code-block:: bash
salt-cloud -f create_snapshot my-ec2-config volume_id=vol-351d8826
salt-cloud -f create_snapshot my-ec2-config volume_id=vol-351d8826 \\
description="My Snapshot Description"
'''
if call != 'function':
raise SaltCloudSystemExit(
'The create_snapshot function must be called with -f '
'or --function.'
)
if kwargs is None:
kwargs = {}
volume_id = kwargs.get('volume_id', None)
description = kwargs.get('description', '')
if volume_id is None:
raise SaltCloudSystemExit(
'A volume_id must be specified to create a snapshot.'
)
params = {'Action': 'CreateSnapshot',
'VolumeId': volume_id,
'Description': description}
log.debug(params)
data = aws.query(params,
return_url=True,
return_root=True,
location=get_location(),
provider=get_provider(),
opts=__opts__,
sigver='4')[0]
r_data = {}
for d in data:
for k, v in six.iteritems(d):
r_data[k] = v
if 'snapshotId' in r_data:
snapshot_id = r_data['snapshotId']
# Waits till volume is available
if wait_to_finish:
salt.utils.cloud.run_func_until_ret_arg(fun=describe_snapshots,
kwargs={'snapshot_id': snapshot_id},
fun_call=call,
argument_being_watched='status',
required_argument_response='completed')
return r_data | 0.001971 |
def import_cls(cls_name):
"""Import class by its fully qualified name.
In terms of current example it is just a small helper function. Please,
don't use it in production approaches.
"""
path_components = cls_name.split('.')
module = __import__('.'.join(path_components[:-1]),
locals(),
globals(),
fromlist=path_components[-1:])
return getattr(module, path_components[-1]) | 0.002119 |
def getEdges(npArr):
"""get np array of bin edges"""
edges = np.concatenate(([0], npArr[:,0] + npArr[:,2]))
return np.array([Decimal(str(i)) for i in edges]) | 0.03681 |
def from_characteristic_rate(cls, min_mag, b_val, char_mag, char_rate,
bin_width):
"""
Define Youngs and Coppersmith 1985 MFD by constraing cumulative a
value from characteristic rate.
The cumulative a value is obtained by making use of the property that
the rate of events at m' - 1 must be equal to the rate at the
characteristic magnitude, and therefore by first computing the
incremental a value, using the following equation::
10 ** (a_incr - b_val * (m_prime - 1)) == char_rate / 0.5
where ``m' - 1 = char_mag - 1.25``.
The cumulative a value is then obtained as ::
a_val = a_incr - log10(b_val * ln(10))
:param min_mag:
The lowest magnitude for the MFD. The first bin in the
:meth:`result histogram <get_annual_occurrence_rates>` is aligned
to make its left border match this value.
:param b_val:
The Gutenberg-Richter ``b`` value -- the gradient of the loglinear
G-R relationship.
:param char_mag:
The characteristic magnitude defining the middle point of
characteristic distribution. That is the boxcar function
representing the characteristic distribution is defined in the
range [char_mag - 0.25, char_mag + 0.25].
:param char_rate:
The characteristic rate associated to the characteristic magnitude,
to be distributed over the domain of the boxcar function
representing the characteristic distribution (that is λ_char =
char_rate / 0.5)
:param bin_width:
A positive float value -- the width of a single histogram bin.
:returns:
An instance of :class:`YoungsCoppersmith1985MFD`.
Values for ``min_mag`` and the maximum magnitude (char_mag + 0.25)
don't have to be aligned with respect to ``bin_width``. They get
rounded accordingly anyway so that both are divisible by ``bin_width``
just before converting a function to a histogram.
See :meth:`_get_min_mag_and_num_bins`.
"""
a_incr = b_val * (char_mag - 1.25) + numpy.log10(char_rate /
DELTA_CHAR)
a_val = a_incr - numpy.log10(b_val * numpy.log(10))
return cls(min_mag, a_val, b_val, char_mag, char_rate, bin_width) | 0.001216 |
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition('.')
try:
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
except (ValueError, AttributeError):
raise ImportError('Class %s cannot be found (%s)' %
(class_str,
traceback.format_exception(*sys.exc_info()))) | 0.002174 |
def create_negotiate_message(self, domain_name=None, workstation=None):
"""
Create an NTLM NEGOTIATE_MESSAGE
:param domain_name: The domain name of the user account we are authenticating with, default is None
:param worksation: The workstation we are using to authenticate with, default is None
:return: A base64 encoded string of the NEGOTIATE_MESSAGE
"""
self.negotiate_message = NegotiateMessage(self.negotiate_flags, domain_name, workstation)
return base64.b64encode(self.negotiate_message.get_data()) | 0.008757 |
def subvolume_find_new(name, last_gen):
'''
List the recently modified files in a subvolume
name
Name of the subvolume
last_gen
Last transid marker from where to compare
CLI Example:
.. code-block:: bash
salt '*' btrfs.subvolume_find_new /var/volumes/tmp 1024
'''
cmd = ['btrfs', 'subvolume', 'find-new', name, last_gen]
res = __salt__['cmd.run_all'](cmd)
salt.utils.fsutils._verify_run(res)
lines = res['stdout'].splitlines()
# Filenames are at the end of each inode line
files = [l.split()[-1] for l in lines if l.startswith('inode')]
# The last transid is in the last line
transid = lines[-1].split()[-1]
return {
'files': files,
'transid': transid,
} | 0.002597 |
def ve_interfaces(self, **kwargs):
"""list[dict]: A list of dictionary items describing the operational
state of ve interfaces along with the ip address associations.
Args:
rbridge_id (str): rbridge-id for device.
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
None
Examples:
>>> import pynos.device
>>> conn = ('10.24.39.211', '22')
>>> auth = ('admin', 'password')
>>> with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.interface.ve_interfaces()
... output = dev.interface.ve_interfaces(rbridge_id='1')
"""
urn = "{urn:brocade.com:mgmt:brocade-interface-ext}"
rbridge_id = kwargs.pop('rbridge_id', None)
ip_result = []
request_interface = self._get_intf_rb_id(rbridge_id=rbridge_id)
interface_result = self._callback(request_interface, 'get')
for interface in interface_result.findall('%sinterface' % urn):
int_type = interface.find('%sinterface-type' % urn).text
int_name = interface.find('%sinterface-name' % urn).text
int_state = interface.find('%sif-state' % urn).text
int_proto_state = interface.find('%sline-protocol-state' %
urn).text
ip_address = interface.find('.//%sipv4' % urn).text
if_name = interface.find('%sif-name' % urn).text
results = {'interface-type': int_type,
'interface-name': int_name,
'if-name': if_name,
'interface-state': int_state,
'interface-proto-state': int_proto_state,
'ip-address': ip_address}
ip_result.append(results)
return ip_result | 0.000963 |
def sg_symbol_from_int_number(int_number, hexagonal=True):
"""
Obtains a SpaceGroup name from its international number.
Args:
int_number (int): International number.
hexagonal (bool): For rhombohedral groups, whether to return the
hexagonal setting (default) or rhombohedral setting.
Returns:
(str) Spacegroup symbol
"""
syms = []
for n, v in get_symm_data("space_group_encoding").items():
if v["int_number"] == int_number:
syms.append(n)
if len(syms) == 0:
raise ValueError("Invalid international number!")
if len(syms) == 2:
if hexagonal:
syms = list(filter(lambda s: s.endswith("H"), syms))
else:
syms = list(filter(lambda s: not s.endswith("H"), syms))
return syms.pop() | 0.00122 |
def scrape_wikinews(conn, project, articleset, query):
"""
Scrape wikinews articles from the given query
@param conn: The AmcatAPI object
@param articleset: The target articleset ID
@param category: The wikinews category name
"""
url = "http://en.wikinews.org/w/index.php?search={}&limit=50".format(query)
logging.info(url)
for page in get_pages(url):
urls = get_article_urls(page)
arts = list(get_articles(urls))
logging.info("Adding {} articles to set {}:{}"
.format(len(arts), project, articleset))
conn.create_articles(project=project, articleset=articleset,
json_data=arts) | 0.002878 |
def removeRow(self, triggered):
"""Removes a row to the model.
This method is also a slot.
Args:
triggered (bool): If the corresponding button was
activated, the selected row will be removed
from the model.
"""
if triggered:
model = self.tableView.model()
selection = self.tableView.selectedIndexes()
rows = [index.row() for index in selection]
model.removeDataFrameRows(set(rows))
self.sender().setChecked(False) | 0.003565 |
def move_tab(self, index_from, index_to):
"""
Move tab.
(tabs themselves have already been moved by the history.tabwidget)
"""
filename = self.filenames.pop(index_from)
editor = self.editors.pop(index_from)
self.filenames.insert(index_to, filename)
self.editors.insert(index_to, editor) | 0.005682 |
def resize(self, size, interp='bilinear'):
"""Resize the image.
Parameters
----------
size : int, float, or tuple
* int - Percentage of current size.
* float - Fraction of current size.
* tuple - Size of the output image.
interp : :obj:`str`, optional
Interpolation to use for re-sizing ('nearest', 'lanczos', 'bilinear',
'bicubic', or 'cubic')
"""
# resize channels separately
gray_im_resized = self.gray.resize(size, interp)
depth_im_resized = self.depth.resize(size, interp)
# return combination of resized data
return GdImage.from_grayscale_and_depth(
gray_im_resized, depth_im_resized) | 0.003974 |
def scan_to_table(input_table, genome, scoring, pwmfile=None, ncpus=None):
"""Scan regions in input table with motifs.
Parameters
----------
input_table : str
Filename of input table. Can be either a text-separated tab file or a
feather file.
genome : str
Genome name. Can be either the name of a FASTA-formatted file or a
genomepy genome name.
scoring : str
"count" or "score"
pwmfile : str, optional
Specify a PFM file for scanning.
ncpus : int, optional
If defined this specifies the number of cores to use.
Returns
-------
table : pandas.DataFrame
DataFrame with motif ids as column names and regions as index. Values
are either counts or scores depending on the 'scoring' parameter.s
"""
config = MotifConfig()
if pwmfile is None:
pwmfile = config.get_default_params().get("motif_db", None)
if pwmfile is not None:
pwmfile = os.path.join(config.get_motif_dir(), pwmfile)
if pwmfile is None:
raise ValueError("no pwmfile given and no default database specified")
logger.info("reading table")
if input_table.endswith("feather"):
df = pd.read_feather(input_table)
idx = df.iloc[:,0].values
else:
df = pd.read_table(input_table, index_col=0, comment="#")
idx = df.index
regions = list(idx)
s = Scanner(ncpus=ncpus)
s.set_motifs(pwmfile)
s.set_genome(genome)
s.set_background(genome=genome)
nregions = len(regions)
scores = []
if scoring == "count":
logger.info("setting threshold")
s.set_threshold(fpr=FPR)
logger.info("creating count table")
for row in s.count(regions):
scores.append(row)
logger.info("done")
else:
s.set_threshold(threshold=0.0)
logger.info("creating score table")
for row in s.best_score(regions, normalize=True):
scores.append(row)
logger.info("done")
motif_names = [m.id for m in read_motifs(pwmfile)]
logger.info("creating dataframe")
return pd.DataFrame(scores, index=idx, columns=motif_names) | 0.005398 |
def query( self ):
"""
Returns the query this widget is representing from the tree widget.
:return <Query> || <QueryCompound> || None
"""
if ( not self.uiQueryCHK.isChecked() ):
return None
# build a query if not searching all
q = Q()
for i in range(self.uiQueryTREE.topLevelItemCount()):
item = self.uiQueryTREE.topLevelItem(i)
q &= item.query()
return q | 0.016032 |
def on(self):
"""
Turn all the output devices on.
"""
for device in self:
if isinstance(device, (OutputDevice, CompositeOutputDevice)):
device.on() | 0.009662 |
def load(custom_url=None, pkg_type=None, serve_local=None, version='4.9.2'):
"""Load CKEditor resource from CDN or local.
:param custom_url: The custom resource url to use, build your CKEditor
on `CKEditor builder <https://ckeditor.com/cke4/builder>`_.
:param pkg_type: The type of CKEditor package, one of ``basic``,
``standard`` and ``full``. Default to ``standard``. It's a
mirror argument to overwrite ``CKEDITOR_PKG_TYPE``.
:param serve_local: Mirror argument to overwrite ``CKEDITOR_SERVE_LOCAL``.
:param version: The version of CKEditor.
"""
pkg_type = pkg_type or current_app.config['CKEDITOR_PKG_TYPE']
if pkg_type not in ['basic', 'standard', 'full']:
warnings.warn('The provided pkg_type string was invalid, '
'it should be one of basic/standard/full.')
pkg_type = 'standard'
if serve_local or current_app.config['CKEDITOR_SERVE_LOCAL']:
url = url_for('ckeditor.static', filename='%s/ckeditor.js' % pkg_type)
else:
url = '//cdn.ckeditor.com/%s/%s/ckeditor.js' % (version, pkg_type)
if custom_url:
url = custom_url
return Markup('<script src="%s"></script>' % url) | 0.003086 |
def login():
" View function which handles an authentication request. "
form = LoginForm(request.form)
# make sure data are valid, but doesn't validate password is right
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
# we use werzeug to validate user's password
if user and user.check_password(form.password.data):
users.login(user)
flash(_('Welcome %(user)s', user=user.username))
return redirect(url_for('users.profile'))
flash(_('Wrong email or password'), 'error-message')
return redirect(request.referrer or url_for(users._login_manager.login_view)) | 0.002924 |
def count(index,h):
'''
Gives count of the documents stored in Elasticsearch. If index option is
provided, it will provide document count of that index.
'''
try:
response = base.es.cat.count(index,h=h)
table = base.draw_table(response)
except Exception as e:
click.echo(e)
else:
click.echo(table) | 0.008427 |
def print_splits(cliques, next_cliques):
"""Print shifts for new forks."""
splits = 0
for i, clique in enumerate(cliques):
parent, _ = clique
# If this fork continues
if parent in next_cliques:
# If there is a new fork, print a split
if len(next_cliques[parent]) > 1:
print_split(i + splits, len(cliques) + splits)
splits += 1 | 0.002387 |
def _next_file(self):
"""Find next filename.
self._filenames may need to be expanded via listbucket.
Returns:
None if no more file is left. Filename otherwise.
"""
while True:
if self._bucket_iter:
try:
return self._bucket_iter.next().filename
except StopIteration:
self._bucket_iter = None
self._bucket = None
if self._index >= len(self._filenames):
return
filename = self._filenames[self._index]
self._index += 1
if self._delimiter is None or not filename.endswith(self._delimiter):
return filename
self._bucket = cloudstorage.listbucket(filename,
delimiter=self._delimiter)
self._bucket_iter = iter(self._bucket) | 0.013889 |
def _get_children(self, index):
"""
Извлекает всех потомков вершины с номером index
"""
if self.dict_storage:
return list(self.graph[index].values())
else:
return [elem for elem in self.graph[index] if elem != Trie.NO_NODE] | 0.006969 |
def propagate_timezone_option(self):
"""Set our timezone value and give it too to unset satellites
:return: None
"""
if self.use_timezone:
# first apply myself
os.environ['TZ'] = self.use_timezone
time.tzset()
tab = [self.schedulers, self.pollers, self.brokers, self.receivers, self.reactionners]
for sat_list in tab:
for sat in sat_list:
if sat.use_timezone == 'NOTSET':
setattr(sat, 'use_timezone', self.use_timezone) | 0.005236 |
def download_to_file(self, file):
"""
Download and store the file of the sample.
:param file: A file-like object to store the file.
"""
con = ConnectionManager().get_connection(self._connection_alias)
return con.download_to_file(self.file, file, append_base_url=False) | 0.006309 |
def fftlog(fEM, time, freq, ftarg):
r"""Fourier Transform using FFTLog.
FFTLog is the logarithmic analogue to the Fast Fourier Transform FFT.
FFTLog was presented in Appendix B of [Hami00]_ and published at
<http://casa.colorado.edu/~ajsh/FFTLog>.
This function uses a simplified version of ``pyfftlog``, which is a
python-version of ``FFTLog``. For more details regarding ``pyfftlog`` see
<https://github.com/prisae/pyfftlog>.
Not the full flexibility of ``FFTLog`` is available here: Only the
logarithmic FFT (``fftl`` in ``FFTLog``), not the Hankel transform (``fht``
in ``FFTLog``). Furthermore, the following parameters are fixed:
- ``kr`` = 1 (initial value)
- ``kropt`` = 1 (silently adjusts ``kr``)
- ``dir`` = 1 (forward)
Furthermore, ``q`` is restricted to -1 <= q <= 1.
The function is called from one of the modelling routines in :mod:`model`.
Consult these modelling routines for a description of the input and output
parameters.
Returns
-------
tEM : array
Returns time-domain EM response of ``fEM`` for given ``time``.
conv : bool
Only relevant for QWE/QUAD.
"""
# Get tcalc, dlnr, kr, rk, q; a and n
_, _, q, mu, tcalc, dlnr, kr, rk = ftarg
if mu > 0: # Sine
a = -fEM.imag
else: # Cosine
a = fEM.real
n = a.size
# 1. Amplitude and Argument of kr^(-2 i y) U_mu(q + 2 i y)
ln2kr = np.log(2.0/kr)
d = np.pi/(n*dlnr)
m = np.arange(1, (n+1)/2)
y = m*d # y = m*pi/(n*dlnr)
if q == 0: # unbiased case (q = 0)
zp = special.loggamma((mu + 1)/2.0 + 1j*y)
arg = 2.0*(ln2kr*y + zp.imag)
else: # biased case (q != 0)
xp = (mu + 1.0 + q)/2.0
xm = (mu + 1.0 - q)/2.0
zp = special.loggamma(xp + 0j)
zm = special.loggamma(xm + 0j)
# Amplitude and Argument of U_mu(q)
amp = np.exp(np.log(2.0)*q + zp.real - zm.real)
# note +Im(zm) to get conjugate value below real axis
arg = zp.imag + zm.imag
# first element: cos(arg) = ±1, sin(arg) = 0
argcos1 = amp*np.cos(arg)
# remaining elements
zp = special.loggamma(xp + 1j*y)
zm = special.loggamma(xm + 1j*y)
argamp = np.exp(np.log(2.0)*q + zp.real - zm.real)
arg = 2*ln2kr*y + zp.imag + zm.imag
argcos = np.cos(arg)
argsin = np.sin(arg)
# 2. Centre point of array
jc = np.array((n + 1)/2.0)
j = np.arange(n)+1
# 3. a(r) = A(r) (r/rc)^[-dir*(q-.5)]
a *= np.exp(-(q - 0.5)*(j - jc)*dlnr)
# 4. transform a(r) -> ã(k)
# 4.a normal FFT
a = fftpack.rfft(a)
# 4.b
m = np.arange(1, n/2, dtype=int) # index variable
if q == 0: # unbiased (q = 0) transform
# multiply by (kr)^[- i 2 m pi/(n dlnr)] U_mu[i 2 m pi/(n dlnr)]
ar = a[2*m-1]
ai = a[2*m]
a[2*m-1] = ar*argcos[:-1] - ai*argsin[:-1]
a[2*m] = ar*argsin[:-1] + ai*argcos[:-1]
# problematical last element, for even n
if np.mod(n, 2) == 0:
ar = argcos[-1]
a[-1] *= ar
else: # biased (q != 0) transform
# multiply by (kr)^[- i 2 m pi/(n dlnr)] U_mu[q + i 2 m pi/(n dlnr)]
# phase
ar = a[2*m-1]
ai = a[2*m]
a[2*m-1] = ar*argcos[:-1] - ai*argsin[:-1]
a[2*m] = ar*argsin[:-1] + ai*argcos[:-1]
a[0] *= argcos1
a[2*m-1] *= argamp[:-1]
a[2*m] *= argamp[:-1]
# problematical last element, for even n
if np.mod(n, 2) == 0:
m = int(n/2)-3
ar = argcos[m-1]*argamp[m-1]
a[-1] *= ar
# 4.c normal FFT back
a = fftpack.irfft(a)
# Ã(k) = ã(k) k^[-dir*(q+.5)] rc^[-dir*(q-.5)]
# = ã(k) (k/kc)^[-dir*(q+.5)] (kc rc)^(-dir*q) (rc/kc)^(dir*.5)
a = a[::-1]*np.exp(-((q + 0.5)*(j - jc)*dlnr + q*np.log(kr) -
np.log(rk)/2.0))
# Interpolate for the desired times
ttEM = iuSpline(np.log(tcalc), a)
tEM = ttEM(np.log(time))
# (Second argument is only for QWE)
return tEM, True | 0.000242 |
def load_template_source(self, *ka):
"""
Backward compatible method for Django < 2.0.
"""
template_name = ka[0]
for origin in self.get_template_sources(template_name):
try:
return self.get_contents(origin), origin.name
except TemplateDoesNotExist:
pass
raise TemplateDoesNotExist(template_name) | 0.005025 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.