text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def _generate(self):
u"""overrided in each modules."""
self._data['key'] = self.key
self._data['value'] = self.value
self._data['host'] = self.host
self._data['clock'] = self.clock | 0.00905 |
def apply_smoothing(self, smooth_fwhm):
"""Set self._smooth_fwhm and then smooths the data.
See boyle.nifti.smooth.smooth_imgs.
Returns
-------
the smoothed data deepcopied.
"""
if smooth_fwhm <= 0:
return
old_smooth_fwhm = self._smooth_fwhm
self._smooth_fwhm = smooth_fwhm
try:
data = self.get_data(smoothed=True, masked=True, safe_copy=True)
except ValueError as ve:
self._smooth_fwhm = old_smooth_fwhm
raise
else:
self._smooth_fwhm = smooth_fwhm
return data | 0.004739 |
def parse_cfg(self):
""" parses the given config file for experiments. """
self.cfgparser = ConfigParser()
if not self.cfgparser.read(self.options.config):
raise SystemExit('config file %s not found.'%self.options.config)
# Change the current working directory to be relative to 'experiments.cfg'
projectDir = os.path.dirname(self.options.config)
projectDir = os.path.abspath(projectDir)
os.chdir(projectDir) | 0.01046 |
def _set_parent_port_detail(self, v, load=False):
"""
Setter method for parent_port_detail, mapped from YANG variable /ptp_state/parent_port_detail (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_parent_port_detail is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_parent_port_detail() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=parent_port_detail.parent_port_detail, is_container='container', presence=False, yang_name="parent-port-detail", rest_name="parent-port-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ptp-parent-port-detail', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-ptp-operational', defining_module='brocade-ptp-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """parent_port_detail must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=parent_port_detail.parent_port_detail, is_container='container', presence=False, yang_name="parent-port-detail", rest_name="parent-port-detail", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ptp-parent-port-detail', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-ptp-operational', defining_module='brocade-ptp-operational', yang_type='container', is_config=False)""",
})
self.__parent_port_detail = t
if hasattr(self, '_set'):
self._set() | 0.005426 |
def _setup_logging(cls, args):
'''Set up the root logger if needed.
The root logger is set the appropriate level so the file and WARC logs
work correctly.
'''
assert (
logging.CRITICAL >
logging.ERROR >
logging.WARNING >
logging.INFO >
logging.DEBUG >
logging.NOTSET
)
assert (
LOG_VERY_QUIET >
LOG_QUIET >
LOG_NO_VERBOSE >
LOG_VERBOSE >
LOG_DEBUG
)
assert args.verbosity
root_logger = logging.getLogger()
current_level = root_logger.getEffectiveLevel()
min_level = LOG_VERY_QUIET
if args.verbosity == LOG_QUIET:
min_level = logging.ERROR
if args.verbosity in (LOG_NO_VERBOSE, LOG_VERBOSE) \
or args.warc_file \
or args.output_file or args.append_output:
min_level = logging.INFO
if args.verbosity == LOG_DEBUG:
min_level = logging.DEBUG
if current_level > min_level:
root_logger.setLevel(min_level)
root_logger.debug(
'Wpull needs the root logger level set to {0}.'
.format(min_level)
)
if current_level <= logging.INFO:
logging.captureWarnings(True) | 0.002177 |
def _check_action_feasibility(self):
"""
Check that for every state, reward is finite for some action,
and for the case sa_pair is True, that for every state, there is
some action available.
"""
# Check that for every state, reward is finite for some action
R_max = self.s_wise_max(self.R)
if (R_max == -np.inf).any():
# First state index such that all actions yield -inf
s = np.where(R_max == -np.inf)[0][0]
raise ValueError(
'for every state the reward must be finite for some action: '
'violated for state {s}'.format(s=s)
)
if self._sa_pair:
# Check that for every state there is at least one action available
diff = np.diff(self.a_indptr)
if (diff == 0).any():
# First state index such that no action is available
s = np.where(diff == 0)[0][0]
raise ValueError(
'for every state at least one action must be available: '
'violated for state {s}'.format(s=s)
) | 0.001729 |
def df_from_groups(groups, columns=None):
"""Create DataFrame of GroupBy object with columns for each product(grouped_value, column_label)"""
if columns is None:
columns = list(groups.get_group(groups.indices.keys()[0]).columns)
df = pd.DataFrame()
for col, group_label in product(columns, groups.indices.keys()):
label = '{}_{}'.format(col, group_label)
df[label] = pd.Series(groups.get_group(group_label)[col].values)
return df | 0.004228 |
def scope(self, key, *tags, default=None):
"""Only apply tags and default for top-level key, effectively scoping the tags."""
scope = self._scopes[key]
tags = self._ensure_exclamation(tags)
default = default if not default or default.startswith("!") else "!" + default
if scope:
scope[0] = scope[0] + tags
scope[1] = default if default else scope[1]
else:
scope.append(tags)
scope.append(default) | 0.008114 |
def auth(alias=None, url=None, cfg="~/.xnat_auth"):
'''
Read connection details from an xnat_auth XML file
Example:
>>> import yaxil
>>> auth = yaxil.auth('xnatastic')
>>> auth.url, auth.username, auth.password
('https://www.xnatastic.org/', 'username', '********')
:param alias: XNAT alias
:type alias: str
:param url: XNAT URL
:type url: str
:param cfg: Configuration file
:type cfg: str
:returns: Named tuple of (url, username, password)
:rtype: :mod:`yaxil.XnatAuth`
'''
if not alias and not url:
raise ValueError('you must provide an alias or url argument')
if alias and url:
raise ValueError('cannot provide both alias and url arguments')
# check and parse config file
cfg = os.path.expanduser(cfg)
if not os.path.exists(cfg):
raise AuthError("could not locate auth file %s" % cfg)
tree = etree.parse(os.path.expanduser(cfg))
# search by alias or url
res = None
if alias:
res = tree.findall("./%s" % alias)
if url:
res = tree.findall("./*/[url='%s']" % url)
if not res:
raise AuthError("failed to locate xnat credentials within %s" % cfg)
elif len(res) > 1:
raise AuthError("found too many sets of credentials within %s" % cfg)
res = res.pop()
# get url
url = res.findall("url")
if not url:
raise AuthError("no url for %s in %s" % (alias, cfg))
elif len(url) > 1:
raise AuthError("too many urls for %s in %s" % (alias, cfg))
# get username
username = res.findall("username")
if not username:
raise AuthError("no username for %s in %s" % (alias, cfg))
elif len(username) > 1:
raise AuthError("too many usernames for %s in %s" % (alias, cfg))
# get password
password = res.findall("password")
if not password:
raise AuthError("no password for %s in %s" % (alias, cfg))
elif len(password) > 1:
raise AuthError("too many passwords for %s in %s" % (alias, cfg))
return XnatAuth(url=url.pop().text, username=username.pop().text,
password=password.pop().text) | 0.001381 |
def check_local_install(ctx, version, ext, server="local"):
"""
Upload and install works?
Uploads a distribution to PyPI, and then tests to see if I can download and
install it.
Returns:
str: string summazing operation
"""
here = Path(ctx.releaser.here).resolve()
dist_dir = here / 'dist'
all_files = list(dist_dir.glob('*.{}'.format(ext)))
the_file = all_files[0]
for f in all_files[1:]:
if f.stat().st_mtime > the_file.stat().st_mtime:
the_file = f
# this is the latest generated file of the given version
environment = 'env-{}-{}-{}'.format(version, ext, server)
if server == "local":
pass
else:
# upload to server
print("** Uploading to server **")
cmd = 'twine upload {}'.format(the_file)
# for PyPI, let twine pick the server
if server != "pypi":
cmd = cmd + ' -r {}'.format(server)
result = invoke.run(cmd, warn=True)
if result.failed:
print(textwrap.fill("[{}ERROR{}] Something broke trying to upload "
"your package. This will be the case if you "
"have already uploaded it before. To upload "
"again, use a different version number "
"(or a different build by including a '+' "
"suffix to your version number)."
.format(ERROR_COLOR, RESET_COLOR),
width=text.get_terminal_size().columns - 1,
subsequent_indent=' '*8))
# print(result.stderr)
# remove directory if it exists
if (here / 'env' / environment).exists():
shutil.rmtree('env' + os.sep + environment)
invoke.run('python -m venv env{}{}'.format(os.sep, environment))
other_dependencies(ctx, server, environment)
if server == "local":
result = invoke.run('env{0}{1}{0}Scripts{0}pip{2} install {3} --no-cache'
.format(os.sep, environment, '.exe', the_file),
hide=True)
else:
#print(" **Install from server**")
result = invoke.run('env{0}{1}{0}Scripts{0}pip{2} install -i {3} '
'{4}=={5} --no-cache'
.format(os.sep, environment, '.exe',
server_url(server),
ctx.releaser.module_name, version),
hide=True)
if result.failed:
print('[{}ERROR{}] Something broke trying to install your package.'
.format(ERROR_COLOR, RESET_COLOR))
print(result.stderr)
sys.exit(1)
print("** Test version of installed package **")
result = invoke.run('env{0}{1}{0}Scripts{0}python{2} -c '
'exec("""import {3}\\nprint({3}.__version__)""")'
.format(os.sep, environment, '.exe',
(ctx.releaser.module_name).strip()))
test_version = result.stdout.strip()
# print(test_version, type(test_version), type(expected_version))
if Version(test_version) == version:
results = '{}{} install {} works!{}'.format(GOOD_COLOR, server, ext,
RESET_COLOR)
else:
results = '{}{} install {} broken{}'.format(ERROR_COLOR, server, ext,
RESET_COLOR)
print(results)
return results | 0.00083 |
def relative_field(field, parent):
"""
RETURN field PATH WITH RESPECT TO parent
"""
if parent==".":
return field
field_path = split_field(field)
parent_path = split_field(parent)
common = 0
for f, p in _builtin_zip(field_path, parent_path):
if f != p:
break
common += 1
if len(parent_path) == common:
return join_field(field_path[common:])
else:
dots = "." * (len(parent_path) - common)
return dots + "." + join_field(field_path[common:]) | 0.003704 |
def new_cells(self, name=None, formula=None):
"""Create a cells in the space.
Args:
name: If omitted, the model is named automatically ``CellsN``,
where ``N`` is an available number.
func: The function to define the formula of the cells.
Returns:
The new cells.
"""
# Outside formulas only
return self._impl.new_cells(name, formula).interface | 0.004505 |
def _fix_dynamic_class_lookup(cls, pstfx):
"""Fix name lookup problem that prevents pickling of dynamically
defined classes.
Parameters
----------
cls : class
Dynamically generated class to which fix is to be applied
pstfx : string
Postfix that can be used to identify dynamically generated classes
that are equivalent by construction
"""
# Extended name for the class that will be added to the module namespace
extnm = '_' + cls.__name__ + '_' + pstfx
# Get the module in which the dynamic class is defined
mdl = sys.modules[cls.__module__]
# Allow lookup of the dynamically generated class within the module via
# its extended name
setattr(mdl, extnm, cls)
# Change the dynamically generated class name to the extended name
if hasattr(cls, '__qualname__'):
cls.__qualname__ = extnm
else:
cls.__name__ = extnm | 0.001094 |
def _matches_contigs(in_file, contigs, checked_file):
"""Check if the contigs in the input file match the defined contigs in the reference genome.
"""
tocheck_contigs = 2
if utils.file_exists(checked_file):
with open(checked_file) as in_handle:
return in_handle.read().strip() == "match"
else:
with utils.open_gzipsafe(in_file) as in_handle:
to_check = set([])
for line in in_handle:
if not line.startswith("#"):
to_check.add(line.split()[0])
if len(to_check) >= tocheck_contigs:
break
with open(checked_file, "w") as out_handle:
if any([c not in contigs for c in to_check]):
out_handle.write("different")
return False
else:
out_handle.write("match")
return True | 0.002217 |
def user_create(name,
passwd,
database=None,
user=None,
password=None,
host=None,
port=None):
'''
Create a cluster admin or a database user.
If a database is specified: it will create database user.
If a database is not specified: it will create a cluster admin.
name
User name for the new user to create
passwd
Password for the new user to create
database
The database to create the user in
user
The user to connect as
password
The password of the user
host
The host to connect to
port
The port to connect to
CLI Example:
.. code-block:: bash
salt '*' influxdb08.user_create <name> <passwd>
salt '*' influxdb08.user_create <name> <passwd> <database>
salt '*' influxdb08.user_create <name> <passwd> <database> <user> <password> <host> <port>
'''
if user_exists(name, database, user, password, host, port):
if database:
log.info('User \'%s\' already exists for DB \'%s\'', name, database)
else:
log.info('Cluster admin \'%s\' already exists', name)
return False
client = _client(user=user, password=password, host=host, port=port)
if not database:
return client.add_cluster_admin(name, passwd)
client.switch_database(database)
return client.add_database_user(name, passwd) | 0.002009 |
def find(self, item_id=None):
"Recursively find a menu item by its id (useful for event handlers)"
for it in self:
found = it.find(item_id)
if found:
return found | 0.008969 |
def consume(self, kind):
"""Consume one token and verify it is of the expected kind."""
next_token = self.stream.move()
assert next_token.kind == kind | 0.011494 |
def fileopenbox(msg=None, title=None, default='*', filetypes=None, multiple=False):
"""
A dialog to get a file name.
**About the "default" argument**
The "default" argument specifies a filepath that (normally)
contains one or more wildcards.
fileopenbox will display only files that match the default filepath.
If omitted, defaults to "\*" (all files in the current directory).
WINDOWS EXAMPLE::
...default="c:/myjunk/*.py"
will open in directory c:\\myjunk\\ and show all Python files.
WINDOWS EXAMPLE::
...default="c:/myjunk/test*.py"
will open in directory c:\\myjunk\\ and show all Python files
whose names begin with "test".
Note that on Windows, fileopenbox automatically changes the path
separator to the Windows path separator (backslash).
**About the "filetypes" argument**
If specified, it should contain a list of items,
where each item is either:
- a string containing a filemask # e.g. "\*.txt"
- a list of strings, where all of the strings except the last one
are filemasks (each beginning with "\*.",
such as "\*.txt" for text files, "\*.py" for Python files, etc.).
and the last string contains a filetype description
EXAMPLE::
filetypes = ["*.css", ["*.htm", "*.html", "HTML files"] ]
.. note:: If the filetypes list does not contain ("All files","*"), it will be added.
If the filetypes list does not contain a filemask that includes
the extension of the "default" argument, it will be added.
For example, if default="\*abc.py"
and no filetypes argument was specified, then
"\*.py" will automatically be added to the filetypes argument.
:param str msg: the msg to be displayed.
:param str title: the window title
:param str default: filepath with wildcards
:param object filetypes: filemasks that a user can choose, e.g. "\*.txt"
:param bool multiple: If true, more than one file can be selected
:return: the name of a file, or None if user chose to cancel
"""
localRoot = Tk()
localRoot.withdraw()
initialbase, initialfile, initialdir, filetypes = fileboxSetup(
default, filetypes)
# ------------------------------------------------------------
# if initialfile contains no wildcards; we don't want an
# initial file. It won't be used anyway.
# Also: if initialbase is simply "*", we don't want an
# initialfile; it is not doing any useful work.
# ------------------------------------------------------------
if (initialfile.find("*") < 0) and (initialfile.find("?") < 0):
initialfile = None
elif initialbase == "*":
initialfile = None
func = ut.tk_FileDialog.askopenfilenames if multiple else ut.tk_FileDialog.askopenfilename
ret_val = func(parent=localRoot, title=getFileDialogTitle(msg, title), initialdir=initialdir, initialfile=initialfile, filetypes=filetypes
)
if multiple:
f = [os.path.normpath(x) for x in localRoot.tk.splitlist(ret_val)]
else:
f = os.path.normpath(ret_val)
localRoot.destroy()
if not f:
return None
return f | 0.004063 |
def compute_utility(self, board, move, player):
"If X wins with this move, return 1; if O return -1; else return 0."
if (self.k_in_row(board, move, player, (0, 1)) or
self.k_in_row(board, move, player, (1, 0)) or
self.k_in_row(board, move, player, (1, -1)) or
self.k_in_row(board, move, player, (1, 1))):
return if_(player == 'X', +1, -1)
else:
return 0 | 0.006865 |
def safeArgs(args):
"""Iterate over valid, finite values in an iterable.
Skip any items that are None, NaN, or infinite.
"""
return (arg for arg in args
if arg is not None and not math.isnan(arg) and not math.isinf(arg)) | 0.004016 |
def u_distance_correlation_sqr(x, y, **kwargs):
"""
u_distance_correlation_sqr(x, y, *, exponent=1)
Computes the bias-corrected estimator for the squared distance correlation
between two random vectors.
Parameters
----------
x: array_like
First random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
y: array_like
Second random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
exponent: float
Exponent of the Euclidean distance, in the range :math:`(0, 2)`.
Equivalently, it is twice the Hurst parameter of fractional Brownian
motion.
Returns
-------
numpy scalar
Value of the bias-corrected estimator of the squared distance
correlation.
See Also
--------
distance_correlation
distance_correlation_sqr
Notes
-----
The algorithm uses the fast distance covariance algorithm proposed in
:cite:`b-fast_distance_correlation` when possible.
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[1], [0], [0], [1]])
>>> dcor.u_distance_correlation_sqr(a, a)
1.0
>>> dcor.u_distance_correlation_sqr(a, b)
-0.5
>>> dcor.u_distance_correlation_sqr(b, b)
1.0
>>> dcor.u_distance_correlation_sqr(a, b, exponent=0.5)
... # doctest: +ELLIPSIS
-0.4050479...
"""
if _can_use_fast_algorithm(x, y, **kwargs):
return _u_distance_correlation_sqr_fast(x, y)
else:
return _u_distance_correlation_sqr_naive(x, y, **kwargs) | 0.000532 |
def execute(self, timeSeries):
"""Creates a new TimeSeries containing the smoothed and forcasted values.
:return: TimeSeries object containing the smoothed TimeSeries,
including the forecasted values.
:rtype: TimeSeries
:note: The first normalized value is chosen as the starting point.
"""
# determine the number of values to forecast, if necessary
self._calculate_values_to_forecast(timeSeries)
# extract the required parameters, performance improvement
alpha = self._parameters["smoothingFactor"]
valuesToForecast = self._parameters["valuesToForecast"]
# initialize some variables
resultList = []
estimator = None
lastT = None
# "It's always about performance!"
append = resultList.append
# smooth the existing TimeSeries data
for idx in xrange(len(timeSeries)):
# get the current to increase performance
t = timeSeries[idx]
# get the initial estimate
if estimator is None:
estimator = t[1]
continue
# add the first value to the resultList without any correction
if 0 == len(resultList):
append([t[0], estimator])
lastT = t
continue
# calculate the error made during the last estimation
error = lastT[1] - estimator
# calculate the new estimator, based on the last occured value, the error and the smoothingFactor
estimator = estimator + alpha * error
# save the current value for the next iteration
lastT = t
# add an entry to the result
append([t[0], estimator])
# forecast additional values if requested
if valuesToForecast > 0:
currentTime = resultList[-1][0]
normalizedTimeDiff = currentTime - resultList[-2][0]
for idx in xrange(valuesToForecast):
currentTime += normalizedTimeDiff
# reuse everything
error = lastT[1] - estimator
estimator = estimator + alpha * error
# add a forecasted value
append([currentTime, estimator])
# set variables for next iteration
lastT = resultList[-1]
# return a TimeSeries, containing the result
return TimeSeries.from_twodim_list(resultList) | 0.004314 |
def set_objective(self, measured_metabolites):
'''
Updates objective function for given measured metabolites.
:param dict measured_metabolites: dict in which keys are metabolite names
and values are float numbers represent fold changes in metabolites.
'''
self.clean_objective()
for k, v in measured_metabolites.items():
m = self.model.metabolites.get_by_id(k)
total_stoichiometry = m.total_stoichiometry(
self.without_transports)
for r in m.producers(self.without_transports):
update_rate = v * r.metabolites[m] / total_stoichiometry
r.objective_coefficient += update_rate | 0.006925 |
def pop(self, name: str, default: Any=_sentinel) -> Any:
"""Pop, get and remove the named attribute of this instance."""
if default is _sentinel:
return self.__dict__.pop(name)
else:
return self.__dict__.pop(name, default) | 0.014815 |
def p_scalar__doublequote(self, p):
"""
scalar : DOUBLEQUOTE_START SCALAR DOUBLEQUOTE_END
"""
scalar = re.sub('\n\s+', ' ', str(p[2]))
p[0] = Str(scalar.replace('\\"', '"')) | 0.013953 |
def smart_email_send(self, smart_email_id, to, consent_to_track, cc=None, bcc=None, attachments=None, data=None, add_recipients_to_list=None):
"""Sends the smart email."""
validate_consent_to_track(consent_to_track)
body = {
"To": to,
"CC": cc,
"BCC": bcc,
"Attachments": attachments,
"Data": data,
"AddRecipientsToList": add_recipients_to_list,
"ConsentToTrack": consent_to_track,
}
response = self._post("/transactional/smartEmail/%s/send" %
smart_email_id, json.dumps(body))
return json_to_py(response) | 0.004498 |
def manage(self):
"""
Manage the task to handle restarts, reconfiguration, etc.
Returns True to request a shorter period before the next call,
False if nothing special is needed.
"""
log = self._params.get('log', self._discard)
if self._stopping:
log.debug("Task '%s', stopping, retrying stop()", self._name)
return self.stop()
now = time.time()
if self._started and self._limit:
if now > self._limit:
log.debug("Task '%s', time limit exceeded by %s, stopping", self._name, deltafmt(now - self._limit))
return self.stop()
else:
log.debug("Task '%s', time limit remaining %s", self._name, deltafmt(self._limit - now))
if self._legion.is_exiting():
log.debug("Not managing '%s', legion is exiting", self._name)
return False
log.debug("managing '%s'", self._name)
return self._start() | 0.004024 |
def setup(self, reason, grr_server_url, grr_username, grr_password,
approvers=None, verify=True):
"""Initializes a GRR hunt result collector.
Args:
reason: justification for GRR access.
grr_server_url: GRR server URL.
grr_username: GRR username.
grr_password: GRR password.
approvers: list of GRR approval recipients.
verify: boolean, whether to verify the GRR server's x509 certificate.
"""
grr_auth = (grr_username, grr_password)
self.approvers = []
if approvers:
self.approvers = [item.strip() for item in approvers.strip().split(',')]
self.grr_api = grr_api.InitHttp(api_endpoint=grr_server_url,
auth=grr_auth,
verify=verify)
self.output_path = tempfile.mkdtemp()
self.reason = reason | 0.003525 |
def get_result(self, indices_or_msg_ids=None, block=None):
"""Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
If the client already has the results, no request to the Hub will be made.
This is a convenient way to construct AsyncResult objects, which are wrappers
that include metadata about execution, and allow for awaiting results that
were not submitted by this Client.
It can also be a convenient way to retrieve the metadata associated with
blocking execution, since it always retrieves
Examples
--------
::
In [10]: r = client.apply()
Parameters
----------
indices_or_msg_ids : integer history index, str msg_id, or list of either
The indices or msg_ids of indices to be retrieved
block : bool
Whether to wait for the result to be done
Returns
-------
AsyncResult
A single AsyncResult object will always be returned.
AsyncHubResult
A subclass of AsyncResult that retrieves results from the Hub
"""
block = self.block if block is None else block
if indices_or_msg_ids is None:
indices_or_msg_ids = -1
if not isinstance(indices_or_msg_ids, (list,tuple)):
indices_or_msg_ids = [indices_or_msg_ids]
theids = []
for id in indices_or_msg_ids:
if isinstance(id, int):
id = self.history[id]
if not isinstance(id, basestring):
raise TypeError("indices must be str or int, not %r"%id)
theids.append(id)
local_ids = filter(lambda msg_id: msg_id in self.history or msg_id in self.results, theids)
remote_ids = filter(lambda msg_id: msg_id not in local_ids, theids)
if remote_ids:
ar = AsyncHubResult(self, msg_ids=theids)
else:
ar = AsyncResult(self, msg_ids=theids)
if block:
ar.wait()
return ar | 0.005319 |
def activate(self):
"""
Activate the scene.
"""
response = self.api_interface.set_device_state(self, None)
self._update_state_from_response(response) | 0.010309 |
def copy_previous_results(self):
"""Use the latest valid results_dir as the starting contents of the current results_dir.
Should be called after the cache is checked, since previous_results are not useful if there is
a cached artifact.
"""
# TODO(mateo): This should probably be managed by the task, which manages the rest of the
# incremental support.
if not self.previous_cache_key:
return None
previous_path = self._cache_manager._results_dir_path(self.previous_cache_key, stable=False)
if os.path.isdir(previous_path):
self.is_incremental = True
safe_rmtree(self._current_results_dir)
shutil.copytree(previous_path, self._current_results_dir)
safe_mkdir(self._current_results_dir)
relative_symlink(self._current_results_dir, self.results_dir)
# Set the self._previous last, so that it is only True after the copy completed.
self._previous_results_dir = previous_path | 0.01056 |
def on_add_cols(self, event):
"""
Show simple dialog that allows user to add a new column name
"""
col_labels = self.grid.col_labels
dia = pw.ChooseOne(self, yes="Add single columns", no="Add groups")
result1 = dia.ShowModal()
if result1 == wx.ID_CANCEL:
return
elif result1 == wx.ID_YES:
items = sorted([col_name for col_name in self.dm.index if col_name not in col_labels])
dia = pw.HeaderDialog(self, 'columns to add',
items1=list(items), groups=[])
dia.Centre()
result2 = dia.ShowModal()
else:
groups = self.dm['group'].unique()
dia = pw.HeaderDialog(self, 'groups to add',
items1=list(groups), groups=True)
dia.Centre()
result2 = dia.ShowModal()
new_headers = []
if result2 == 5100:
new_headers = dia.text_list
# if there is nothing to add, quit
if not new_headers:
return
if result1 == wx.ID_YES:
# add individual headers
errors = self.add_new_grid_headers(new_headers)
else:
# add header groups
errors = self.add_new_header_groups(new_headers)
if errors:
errors_str = ', '.join(errors)
pw.simple_warning('You are already using the following headers: {}\nSo they will not be added'.format(errors_str))
# problem: if widgets above the grid are too wide,
# the grid does not re-size when adding columns
# awkward solution (causes flashing):
if self.grid.GetWindowStyle() != wx.DOUBLE_BORDER:
self.grid.SetWindowStyle(wx.DOUBLE_BORDER)
self.main_sizer.Fit(self)
self.grid.SetWindowStyle(wx.NO_BORDER)
self.Centre()
self.main_sizer.Fit(self)
#
self.grid.changes = set(range(self.grid.GetNumberRows()))
dia.Destroy() | 0.001982 |
def _fx_mapping(raw_rates):
''' Map raw output to clearer labels '''
return {pair[0].lower(): {
'timeStamp': pair[1],
'bid': float(pair[2] + pair[3]),
'ask': float(pair[4] + pair[5]),
'high': float(pair[6]),
'low': float(pair[7])
} for pair in map(lambda x: x.split(','), raw_rates)} | 0.002985 |
def put_mapping(self, doc_type=None, mapping=None, indices=None, ignore_conflicts=None):
"""
Register specific mapping definition for a specific type against one or more indices.
(See :ref:`es-guide-reference-api-admin-indices-put-mapping`)
"""
if not isinstance(mapping, dict):
if mapping is None:
mapping = {}
if hasattr(mapping, "as_dict"):
mapping = mapping.as_dict()
if doc_type:
path = self.conn._make_path(indices, doc_type, "_mapping")
if doc_type not in mapping:
mapping = {doc_type: mapping}
else:
path = self.conn._make_path(indices, (), "_mapping")
parameters = {}
if ignore_conflicts is not None:
parameters['ignore_conflicts'] = ignore_conflicts
return self.conn._send_request('PUT', path, mapping, params=parameters) | 0.004274 |
def kill(self, typ=TaskExit, value=None, tb=None):
"""Terminates the current task by raising an exception into it.
Whatever that task might be doing; be it waiting for I/O or another
primitive, it sees an exception as soon as it yields control.
By default, this exception is TaskExit, but a specific exception
may be specified.
"""
if not self.is_alive():
return
if not value:
value = typ()
if not self._running:
# task hasn't started yet and therefore throw won't work
def just_raise():
six.reraise(typ, value, tb)
self.run = just_raise
return
evergreen.current.loop.call_soon(self.throw, typ, value, tb) | 0.002584 |
def can_vote_on_poll(self, request):
"""Based on jmbo.models.can_vote."""
# can't vote if liking is closed
if self.votes_closed:
return False, 'closed'
# can't vote if liking is disabled
if not self.votes_enabled:
return False, 'disabled'
# anonymous users can't vote if anonymous votes are disabled
if not request.user.is_authenticated() and not \
self.anonymous_votes:
return False, 'auth_required'
# return false if existing votes are found
votes = Vote.objects.filter(
object_id__in=[o.id for o in self.polloption_set.all()],
token=request.secretballot_token
)
if votes.exists():
return False, 'voted'
else:
return True, 'can_vote' | 0.002395 |
def _update_sid_to_last_existing_pid_map(pid):
"""Set chain head PID to the last existing object in the chain to which ``pid``
belongs. If SID has been set for chain, it resolves to chain head PID.
Intended to be called in MNStorage.delete() and other chain manipulation.
Preconditions:
- ``pid`` must exist and be verified to be a PID.
d1_gmn.app.views.asserts.is_existing_object()
"""
last_pid = _find_head_or_latest_connected(pid)
chain_model = _get_chain_by_pid(last_pid)
if not chain_model:
return
chain_model.head_pid = d1_gmn.app.did.get_or_create_did(last_pid)
chain_model.save() | 0.003086 |
def conference_hangup(self, call_params):
"""REST Conference Hangup helper
"""
path = '/' + self.api_version + '/ConferenceHangup/'
method = 'POST'
return self.request(path, method, call_params) | 0.008547 |
def apns_fetch_inactive_ids():
"""
Queries the APNS server for id's that are no longer active since
the last fetch
"""
with closing(_apns_create_socket_to_feedback()) as socket:
inactive_ids = []
for _, registration_id in _apns_receive_feedback(socket):
inactive_ids.append(codecs.encode(registration_id, 'hex_codec'))
return inactive_ids | 0.002525 |
async def delay(self, duration_s: int):
""" Pause and sleep
"""
self.pause()
await asyncio.sleep(duration_s)
self.resume() | 0.012346 |
def raise_for_execution_errors(nb, output_path):
"""Assigned parameters into the appropriate place in the input notebook
Parameters
----------
nb : NotebookNode
Executable notebook object
output_path : str
Path to write executed notebook
"""
error = None
for cell in nb.cells:
if cell.get("outputs") is None:
continue
for output in cell.outputs:
if output.output_type == "error":
error = PapermillExecutionError(
exec_count=cell.execution_count,
source=cell.source,
ename=output.ename,
evalue=output.evalue,
traceback=output.traceback,
)
break
if error:
# Write notebook back out with the Error Message at the top of the Notebook.
error_msg = ERROR_MESSAGE_TEMPLATE % str(error.exec_count)
error_msg_cell = nbformat.v4.new_code_cell(
source="%%html\n" + error_msg,
outputs=[
nbformat.v4.new_output(output_type="display_data", data={"text/html": error_msg})
],
metadata={"inputHidden": True, "hide_input": True},
)
nb.cells = [error_msg_cell] + nb.cells
write_ipynb(nb, output_path)
raise error | 0.002216 |
def set(self, indexes, values=None):
"""
Given indexes will set a sub-set of the Series to the values provided. This method will direct to the below
methods based on what types are passed in for the indexes. If the indexes contains values not in the Series
then new rows or columns will be added.
:param indexes: indexes value, list of indexes values, or a list of booleans.
:param values: value or list of values to set. If a list then must be the same length as the indexes parameter.
:return: nothing
"""
if isinstance(indexes, (list, blist)):
self.set_rows(indexes, values)
else:
self.set_cell(indexes, values) | 0.011096 |
def value(self):
"""Set a calculated value for this Expression.
Used when writing formulas using XlsxWriter to give cells
an initial value when the sheet is loaded without being calculated.
"""
try:
if isinstance(self.__value, Expression):
return self.__value.value
return self.__value
except AttributeError:
return 0 | 0.004796 |
def xy_reading_order(e1, e2):
"""
A comparator to sort bboxes from left to right, top to bottom
"""
b1 = e1.bbox
b2 = e2.bbox
if round(b1[x0]) == round(b2[x0]):
return float_cmp(b1[y0], b2[y0])
return float_cmp(b1[x0], b2[x0]) | 0.003817 |
def monthly_cooling_design_days_100(self):
"""A list of 12 objects representing monthly 10.0% cooling design days."""
if self.monthly_found is False or self._monthly_db_100 == [] \
or self._monthly_wb_100 == []:
return []
else:
db_conds = [DryBulbCondition(x, y) for x, y in zip(
self._monthly_db_100, self._monthly_db_range_50)]
hu_conds = [HumidityCondition(
'Wetbulb', x, self._stand_press_at_elev) for x in self._monthly_wb_100]
ws_conds = self.monthly_wind_conditions
sky_conds = self.monthly_clear_sky_conditions
return [DesignDay(
'10% Cooling Design Day for {}'.format(self._months[i]),
'SummerDesignDay', self._location,
db_conds[i], hu_conds[i], ws_conds[i], sky_conds[i])
for i in xrange(12)] | 0.005417 |
def build_image(self, conf, pushing=False):
"""Build this image"""
with conf.make_context() as context:
try:
stream = BuildProgressStream(conf.harpoon.silent_build)
with self.remove_replaced_images(conf) as info:
cached = NormalBuilder().build(conf, context, stream)
info['cached'] = cached
except (KeyboardInterrupt, Exception) as error:
exc_info = sys.exc_info()
if stream.current_container:
Runner().stage_build_intervention(conf, stream.current_container)
if isinstance(error, KeyboardInterrupt):
raise UserQuit()
else:
six.reraise(*exc_info)
finally:
if stream and stream.intermediate_images and conf.cleanup_intermediate_images:
for image in stream.intermediate_images:
log.info("Deleting intermediate image\timage=%s", image)
try:
conf.harpoon.docker_api.remove_image(image)
except Exception as error:
log.error("Failed to remove intermediate image\timage=%s\terror=%s", image, error)
return cached | 0.004521 |
def setTableType( self, tableType ):
"""
Sets the table type for this record box to the inputed table type.
:param tableType | <orb.Table>
"""
self._tableType = tableType
if tableType:
self._tableTypeName = tableType.schema().name()
else:
self._tableTypeName = '' | 0.018373 |
def encode(self, word, max_length=5, zero_pad=True):
"""Return the Roger Root code for a word.
Parameters
----------
word : str
The word to transform
max_length : int
The maximum length (default 5) of the code to return
zero_pad : bool
Pad the end of the return value with 0s to achieve a max_length
string
Returns
-------
str
The Roger Root code
Examples
--------
>>> roger_root('Christopher')
'06401'
>>> roger_root('Niall')
'02500'
>>> roger_root('Smith')
'00310'
>>> roger_root('Schmidt')
'06310'
"""
# uppercase, normalize, decompose, and filter non-A-Z out
word = unicode_normalize('NFKD', text_type(word.upper()))
word = word.replace('ß', 'SS')
word = ''.join(c for c in word if c in self._uc_set)
code = ''
pos = 0
# Do first digit(s) first
for num in range(4, 0, -1):
if word[:num] in self._init_patterns[num]:
code = self._init_patterns[num][word[:num]]
pos += num
break
# Then code subsequent digits
while pos < len(word):
for num in range(4, 0, -1): # pragma: no branch
if word[pos : pos + num] in self._med_patterns[num]:
code += self._med_patterns[num][word[pos : pos + num]]
pos += num
break
code = self._delete_consecutive_repeats(code)
code = code.replace('*', '')
if zero_pad:
code += '0' * max_length
return code[:max_length] | 0.00229 |
def make_request_parser(model_or_inst, excludes=None, only=None, for_populate=False):
"""Pass a `model class` or `model instance` to this function,
then, it will generate a `RequestParser` that extract user request data from `request.json`
according to the model class's definition.
Parameter `excludes` and `only` can be `str` or list of `str`,
then are used to specify which columns should be handled.
If you passed `excludes` and `only` at same time, only `excludes` will be used.
And, the primary key of the model will not be added to `RequestParser`'s argument list,
unless you explicitly specify it use `only` parameter.
If you pass in a model class, but not a model instance, the function will doing `required` checking,
for columns that nullable=False.
(If you pass in a model instance, the `required` checking will not proceed. Because in this situation,
we should allow the user to ignore the assignment to a field)
"""
is_inst = _is_inst(model_or_inst)
if isinstance(excludes, six.string_types):
excludes = [excludes]
if excludes and only:
only = None
elif isinstance(only, six.string_types):
only = [only]
parser = RequestPopulator() if for_populate else reqparse.RequestParser()
for col in model_or_inst.__table__.columns:
if only:
if col.name not in only:
continue
elif (excludes and col.name in excludes) or col.primary_key:
continue
col_type = col.type.python_type
kwargs = {
"type": _type_dict.get(col_type.__name__, col_type) if hasattr(col_type, '__name__') else col_type
}
# When the context was to creating a new model instance, if a field has no default value, and is not nullable,
# mark it's corresponding argument as `required`.
# 创建新数据库实例时,若一个字段既没有默认值,又不允许 NULL,则把它对应 arg 设为 required
if not is_inst and col.default is None and col.server_default is None and not col.nullable:
kwargs["required"] = True
parser.add_argument(col.name, **kwargs)
return parser | 0.005128 |
def id_lookup(paper_id, idtype):
"""Take an ID of type PMID, PMCID, or DOI and lookup the other IDs.
If the DOI is not found in Pubmed, try to obtain the DOI by doing a
reverse-lookup of the DOI in CrossRef using article metadata.
Parameters
----------
paper_id : str
ID of the article.
idtype : str
Type of the ID: 'pmid', 'pmcid', or 'doi
Returns
-------
ids : dict
A dictionary with the following keys: pmid, pmcid and doi.
"""
if idtype not in ('pmid', 'pmcid', 'doi'):
raise ValueError("Invalid idtype %s; must be 'pmid', 'pmcid', "
"or 'doi'." % idtype)
ids = {'doi': None, 'pmid': None, 'pmcid': None}
pmc_id_results = pmc_client.id_lookup(paper_id, idtype)
# Start with the results of the PMC lookup and then override with the
# provided ID
ids['pmid'] = pmc_id_results.get('pmid')
ids['pmcid'] = pmc_id_results.get('pmcid')
ids['doi'] = pmc_id_results.get('doi')
ids[idtype] = paper_id
# If we gave a DOI, then our work is done after looking for PMID and PMCID
if idtype == 'doi':
return ids
# If we gave a PMID or PMCID, we need to check to see if we got a DOI.
# If we got a DOI back, we're done.
elif ids.get('doi'):
return ids
# If we get here, then we've given PMID or PMCID and don't have a DOI yet.
# If we gave a PMCID and have neither a PMID nor a DOI, then we'll run
# into problems later on when we try to the reverse lookup using CrossRef.
# So we bail here and return what we have (PMCID only) with a warning.
if ids.get('pmcid') and ids.get('doi') is None and ids.get('pmid') is None:
logger.warning('%s: PMCID without PMID or DOI' % ids.get('pmcid'))
return ids
# To clarify the state of things at this point:
assert ids.get('pmid') is not None
assert ids.get('doi') is None
# As a last result, we try to get the DOI from CrossRef (which internally
# tries to get the DOI from Pubmed in the process of collecting the
# necessary metadata for the lookup):
ids['doi'] = crossref_client.doi_query(ids['pmid'])
# It may still be None, but at this point there's nothing we can do...
return ids | 0.000442 |
def _get_path_infomation(self):
"""Get useful infomation from the device path."""
long_identifier = self._device_path.split('/')[4]
protocol, remainder = long_identifier.split('-', 1)
identifier, _, device_type = remainder.rsplit('-', 2)
return (protocol, identifier, device_type) | 0.00625 |
def accepts_port(self, port):
"""
Query whether this Router will accept the given port.
"""
if self.rejected_ports is None and self.accepted_ports is None:
raise RuntimeError("policy hasn't been set yet")
if self.rejected_ports:
for x in self.rejected_ports:
if port == x:
return False
return True
for x in self.accepted_ports:
if port == x:
return True
return False | 0.00381 |
def child(self, **kwargs):
'''set childSelector.'''
return AutomatorDeviceObject(
self.device,
self.selector.clone().child(**kwargs)
) | 0.010989 |
def install_python(python, runas=None):
'''
Install a python implementation.
python
The version of python to install, should match one of the
versions listed by pyenv.list
CLI Example:
.. code-block:: bash
salt '*' pyenv.install_python 2.0.0-p0
'''
python = re.sub(r'^python-', '', python)
env = None
env_list = []
if __grains__['os'] in ('FreeBSD', 'NetBSD', 'OpenBSD'):
env_list.append('MAKE=gmake')
if __salt__['config.option']('pyenv.build_env'):
env_list.append(__salt__['config.option']('pyenv.build_env'))
if env_list:
env = ' '.join(env_list)
ret = {}
ret = _pyenv_exec('install', python, env=env, runas=runas, ret=ret)
if ret['retcode'] == 0:
rehash(runas=runas)
return ret['stderr']
else:
# Cleanup the failed installation so it doesn't list as installed
uninstall_python(python, runas=runas)
return False | 0.001025 |
def get_orders(self, address, chain_name='NEO', contract_version='V2', pair=None, from_epoch_time=None,
order_status=None, before_id=None, limit=50):
"""
Function to fetch the order history of the given address.
Execution of this function is as follows::
get_orders(address=neo_get_scripthash_from_address(address=address))
The expected return result for this function is as follows::
[{
'id': '7cbdf481-6acf-4bf3-a1ed-4773f31e6931',
'blockchain': 'neo',
'contract_hash': 'a195c1549e7da61b8da315765a790ac7e7633b82',
'address': 'fea2b883725ef2d194c9060f606cd0a0468a2c59',
'side': 'buy',
'offer_asset_id': 'c56f33fc6ecfcd0c225c4ab356fee59390af8560be0e930faebe74a6daff7c9b',
'want_asset_id': 'ab38352559b8b203bde5fddfa0b07d8b2525e132',
'offer_amount': '53718500',
'want_amount': '110000000000',
'transfer_amount': '0',
'priority_gas_amount': '0',
'use_native_token': True,
'native_fee_transfer_amount': 0,
'deposit_txn': None,
'created_at': '2018-08-03T02:44:47.692Z',
'status': 'processed',
'fills': [{
'id': 'b6f9e530-60ff-46ff-9a71-362097a2025e',
'offer_hash': '95b3b03be0bff8f58aa86a8dd599700bbaeaffc05078329d5b726b6b995f4cda',
'offer_asset_id': 'c56f33fc6ecfcd0c225c4ab356fee59390af8560be0e930faebe74a6daff7c9b',
'want_asset_id': 'ab38352559b8b203bde5fddfa0b07d8b2525e132',
'fill_amount': '47833882',
'want_amount': '97950000000',
'filled_amount': '',
'fee_asset_id': 'ab38352559b8b203bde5fddfa0b07d8b2525e132',
'fee_amount': '73462500',
'price': '0.00048835',
'txn': None,
'status': 'success',
'created_at': '2018-08-03T02:44:47.706Z',
'transaction_hash': '694745a09e33845ec008cfb79c73986a556e619799ec73274f82b30d85bda13a'
}],
'makes': [{
'id': '357088a0-cc80-49ab-acdd-980589c2d7d8',
'offer_hash': '420cc85abf02feaceb1bcd91489a0c1949c972d2a9a05ae922fa15d79de80c00',
'available_amount': '0',
'offer_asset_id': 'c56f33fc6ecfcd0c225c4ab356fee59390af8560be0e930faebe74a6daff7c9b',
'offer_amount': '5884618',
'want_asset_id': 'ab38352559b8b203bde5fddfa0b07d8b2525e132',
'want_amount': '12050000000',
'filled_amount': '0.0',
'txn': None,
'cancel_txn': None,
'price': '0.000488350041493775933609958506224066390041494',
'status': 'cancelled',
'created_at': '2018-08-03T02:44:47.708Z',
'transaction_hash': '1afa946546550151bbbd19f197a87cec92e9be58c44ec431cae42076298548b7',
'trades': []
}]
}, {
....
}]
:param address: The ScriptHash of the address to filter orders for.
:type address: str
:param pair: The trading pair to filter order requests on.
:type pair: str
:param chain_name: The name of the chain to find orders against.
:type chain_name: str
:param contract_version: The version of the contract to find orders against.
:type contract_version: str
:param from_epoch_time: Only return orders that are last updated at or after this time.
:type from_epoch_time: int
:param order_status: Only return orders have this status. Possible values are open, cancelled, completed.
:type order_status: str
:param before_id: Only return orders that are created before the order with this id.
:type before_id: str
:param limit: Only return up to this number of orders (min: 1, max: 200, default: 50).
:type limit: int
:return: List of dictionaries containing the orders for the given NEO address and (optional) trading pair.
"""
api_params = {
"address": address,
"contract_hash": self.get_contracts()[chain_name.upper()][contract_version.upper()],
"limit": limit
}
if pair is not None:
api_params['pair'] = pair
if from_epoch_time is not None:
api_params['from_epoch_time'] = from_epoch_time
if order_status is not None:
api_params['order_status'] = order_status
if before_id is not None:
api_params['before_id'] = before_id
return self.request.get(path='/orders', params=api_params) | 0.004023 |
def addfile(self, tarinfo, fileobj=None):
"""Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
given, tarinfo.size bytes are read from it and added to the archive.
You can create TarInfo objects using gettarinfo().
On Windows platforms, `fileobj' should always be opened with mode
'rb' to avoid irritation about the file size.
"""
self._check("aw")
tarinfo = copy.copy(tarinfo)
buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
self.fileobj.write(buf)
self.offset += len(buf)
# If there's data to follow, append it.
if fileobj is not None:
copyfileobj(fileobj, self.fileobj, tarinfo.size)
blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
if remainder > 0:
self.fileobj.write(NUL * (BLOCKSIZE - remainder))
blocks += 1
self.offset += blocks * BLOCKSIZE
self.members.append(tarinfo) | 0.001967 |
def safe_zip(*args):
"""like zip but with these properties:
- returns a list, rather than an iterator. This is the old Python2 zip behavior.
- a guarantee that all arguments are the same length.
(normal zip silently drops entries to make them the same length)
"""
length = len(args[0])
if not all(len(arg) == length for arg in args):
raise ValueError("Lengths of arguments do not match: "
+ str([len(arg) for arg in args]))
return list(zip(*args)) | 0.012295 |
def get_properties_by_type(self, type, recursive=True, parent_path=""):
"""
Returns a sorted list of fields that match the type.
:param type the type of the field "string","integer" or a list of types
:param recursive recurse to sub object
:returns a sorted list of fields the match the type
"""
if parent_path:
parent_path += "."
if isinstance(type, str):
if type == "*":
type = set(MAPPING_NAME_TYPE.keys()) - set(["nested", "multi_field", "multifield"])
else:
type = [type]
properties = []
for prop in list(self.properties.values()):
if prop.type in type:
properties.append((parent_path + prop.name, prop))
continue
elif prop.type == "multi_field" and prop.name in prop.fields and prop.fields[prop.name].type in type:
properties.append((parent_path + prop.name, prop))
continue
if not recursive:
continue
if prop.type in ["nested", "object"]:
properties.extend(
prop.get_properties_by_type(type, recursive=recursive, parent_path=parent_path + prop.name))
return sorted(properties) | 0.003826 |
def get(cls, uni_char):
"""Return the general category code (as Unicode string) for the given Unicode character"""
uni_char = unicod(uni_char) # Force to Unicode
return unicod(unicodedata.category(uni_char)) | 0.012931 |
def supports(cls, template_file=None):
"""
:return: Whether the engine can process given template file or not.
"""
if anytemplate.compat.IS_PYTHON_3:
cls._priority = 99
return False # Always as it's not ported to python 3.
return super(Engine, cls).supports(template_file=template_file) | 0.005682 |
def store_in_internal_db(args, hsm, modhex_id, public_id, kh, aead):
""" Store record (AEAD) in YubiHSM internal DB """
if args.verbose:
print " %i bytes (%s) -> internal db..." % \
(len(aead.data), shorten_aead(aead)),
try:
hsm.db_store_yubikey(public_id.decode('hex'), kh, aead)
if args.verbose:
print "OK"
except pyhsm.exception.YHSM_CommandFailed as e:
if args.verbose:
print "%s" % (pyhsm.defines.status2str(e.status))
else:
print "Storing ID %s FAILED: %s" % (modhex_id, pyhsm.defines.status2str(e.status))
return False
return True | 0.003049 |
def quote(cls, value):
"""Quote the value for the cookie. This can be any object supported
by :attr:`serialization_method`.
:param value: the value to quote.
"""
if cls.serialization_method is not None:
value = cls.serialization_method.dumps(value)
if cls.quote_base64:
value = b''.join(base64.b64encode(value).splitlines()).strip()
return value | 0.004684 |
def add_and_get(self, delta):
'''
Atomically adds `delta` to the current value.
:param delta: The delta to add.
'''
with self._lock.exclusive:
self._value += delta
return self._value | 0.008097 |
def num_throats(self, labels='all', mode='union'):
r"""
Return the number of throats of the specified labels
Parameters
----------
labels : list of strings, optional
The throat labels that should be included in the count.
If not supplied, all throats are counted.
mode : string, optional
Specifies how the count should be performed. The options are:
**'or', 'union', 'any'** : (default) Throats with *one or more* of
the given labels are counted.
**'and', 'intersection', 'all'** : Throats with *all* of the given
labels are counted.
**'xor', 'exclusive_or'** : Throats with *only one* of the given
labels are counted.
**'nor', 'none', 'not'** : Throats with *none* of the given labels
are counted.
**'nand'** : Throats with *some but not all* of the given labels
are counted.
**'xnor'** : Throats with *more than one* of the given labels are
counted.
Returns
-------
Nt : int
Number of throats with the specified labels
See Also
--------
num_pores
count
Notes
-----
Technically, *'nand'* and *'xnor'* should also count throats with
*none* of the labels, however, to make the count more useful these are
not included.
"""
# Count number of pores of specified type
Ts = self._get_indices(labels=labels, mode=mode, element='throat')
Nt = sp.shape(Ts)[0]
return Nt | 0.001212 |
def valid_id(opts, id_):
'''
Returns if the passed id is valid
'''
try:
if any(x in id_ for x in ('/', '\\', str('\0'))):
return False
return bool(clean_path(opts['pki_dir'], id_))
except (AttributeError, KeyError, TypeError, UnicodeDecodeError):
return False | 0.003175 |
def update(self, friendly_name):
"""
Update the FunctionInstance
:param unicode friendly_name: The friendly_name
:returns: Updated FunctionInstance
:rtype: twilio.rest.serverless.v1.service.function.FunctionInstance
"""
data = values.of({'FriendlyName': friendly_name, })
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return FunctionInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
) | 0.003165 |
def _checker(keywords):
"""Generate a checker which tests a given value not starts with keywords."""
def _(v):
"""Check a given value matches to keywords."""
for k in keywords:
if k in v:
return False
return True
return _ | 0.007018 |
def set_basic_selection(self, selection, value, fields=None):
"""Modify data for an item or region of the array.
Parameters
----------
selection : tuple
An integer index or slice or tuple of int/slice specifying the requested
region for each dimension of the array.
value : scalar or array-like
Value to be stored into the array.
fields : str or sequence of str, optional
For arrays with a structured dtype, one or more fields can be specified to set
data for.
Examples
--------
Setup a 1-dimensional array::
>>> import zarr
>>> import numpy as np
>>> z = zarr.zeros(100, dtype=int)
Set all array elements to the same scalar value::
>>> z.set_basic_selection(..., 42)
>>> z[...]
array([42, 42, 42, ..., 42, 42, 42])
Set a portion of the array::
>>> z.set_basic_selection(slice(10), np.arange(10))
>>> z.set_basic_selection(slice(-10, None), np.arange(10)[::-1])
>>> z[...]
array([ 0, 1, 2, ..., 2, 1, 0])
Setup a 2-dimensional array::
>>> z = zarr.zeros((5, 5), dtype=int)
Set all array elements to the same scalar value::
>>> z.set_basic_selection(..., 42)
Set a portion of the array::
>>> z.set_basic_selection((0, slice(None)), np.arange(z.shape[1]))
>>> z.set_basic_selection((slice(None), 0), np.arange(z.shape[0]))
>>> z[...]
array([[ 0, 1, 2, 3, 4],
[ 1, 42, 42, 42, 42],
[ 2, 42, 42, 42, 42],
[ 3, 42, 42, 42, 42],
[ 4, 42, 42, 42, 42]])
For arrays with a structured dtype, the `fields` parameter can be used to set
data for a specific field, e.g.::
>>> a = np.array([(b'aaa', 1, 4.2),
... (b'bbb', 2, 8.4),
... (b'ccc', 3, 12.6)],
... dtype=[('foo', 'S3'), ('bar', 'i4'), ('baz', 'f8')])
>>> z = zarr.array(a)
>>> z.set_basic_selection(slice(0, 2), b'zzz', fields='foo')
>>> z[:]
array([(b'zzz', 1, 4.2), (b'zzz', 2, 8.4), (b'ccc', 3, 12.6)],
dtype=[('foo', 'S3'), ('bar', '<i4'), ('baz', '<f8')])
Notes
-----
This method provides the underlying implementation for modifying data via square
bracket notation, see :func:`__setitem__` for equivalent examples using the
alternative notation.
See Also
--------
get_basic_selection, get_mask_selection, set_mask_selection,
get_coordinate_selection, set_coordinate_selection, get_orthogonal_selection,
set_orthogonal_selection, vindex, oindex, __getitem__, __setitem__
"""
# guard conditions
if self._read_only:
err_read_only()
# refresh metadata
if not self._cache_metadata:
self._load_metadata_nosync()
# handle zero-dimensional arrays
if self._shape == ():
return self._set_basic_selection_zd(selection, value, fields=fields)
else:
return self._set_basic_selection_nd(selection, value, fields=fields) | 0.003246 |
def arc(self, x, y, radius, start_angle, end_angle):
"""draw arc going counter-clockwise from start_angle to end_angle"""
self._add_instruction("arc", x, y, radius, start_angle, end_angle) | 0.009804 |
def create_course_completion(self, user_id, payload): # pylint: disable=unused-argument
"""
Send a completion status payload to the Degreed Completion Status endpoint
Args:
user_id: Unused.
payload: JSON encoded object (serialized from DegreedLearnerDataTransmissionAudit)
containing completion status fields per Degreed documentation.
Returns:
A tuple containing the status code and the body of the response.
Raises:
HTTPError: if we received a failure response code from Degreed
"""
return self._post(
urljoin(
self.enterprise_configuration.degreed_base_url,
self.global_degreed_config.completion_status_api_path
),
payload,
self.COMPLETION_PROVIDER_SCOPE
) | 0.005747 |
def execute(self):
"""
Stops the cluster if it's running.
"""
cluster_name = self.params.cluster
creator = make_creator(self.params.config,
storage_path=self.params.storage)
try:
cluster = creator.load_cluster(cluster_name)
except (ClusterNotFound, ConfigurationError) as err:
log.error("Cannot stop cluster `%s`: %s", cluster_name, err)
return os.EX_NOINPUT
if not self.params.yes:
confirm_or_abort(
"Do you want really want to stop cluster `{cluster_name}`?"
.format(cluster_name=cluster_name),
msg="Aborting upon user request.")
print("Destroying cluster `%s` ..." % cluster_name)
cluster.stop(force=self.params.force, wait=self.params.wait) | 0.002347 |
def acquire(self):
"""
Locks the account. Returns True on success, False if the account
is thread-local and must not be locked.
"""
if self.host:
self.parent.send(('acquire-account-for-host', self.host))
elif self.account_hash:
self.parent.send(('acquire-account-from-hash', self.account_hash))
else:
self.parent.send(('acquire-account'))
response = self.parent.recv()
if isinstance(response, Exception):
raise response
if response is None:
return False
self.account_hash, \
self.user, \
self.password, \
self.authorization_password, \
self.key = response
return True | 0.002587 |
def to_array(self):
"""
Serializes this GameHighScore to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
"""
array = super(GameHighScore, self).to_array()
array['position'] = int(self.position) # type int
array['user'] = self.user.to_array() # type User
array['score'] = int(self.score) # type int
return array | 0.004717 |
def crud_mutation_name(action, model):
"""
This function returns the name of a mutation that performs the specified
crud action on the given model service
"""
model_string = get_model_string(model)
# make sure the mutation name is correctly camelcases
model_string = model_string[0].upper() + model_string[1:]
# return the mutation name
return "{}{}".format(action, model_string) | 0.004717 |
def delete(self):
"""
delete() executes the query by delegating to delete_by_query()
"""
es = connections.get_connection(self._using)
return AttrDict(
es.delete_by_query(
index=self._index,
body=self.to_dict(),
**self._params
)
) | 0.005698 |
def extract_from_zip(path, candidates):
"""
Given a zip archive and a function to detect the presence of a given
filename, unzip the archive into a temporary directory and return the
full path of the file. Raise an IOError if the file cannot be found
within the archive.
:param path: pathname of the archive
:param candidates: list of names to search for
"""
temp_dir = tempfile.mkdtemp()
with zipfile.ZipFile(path) as archive:
archive.extractall(temp_dir)
return [f for f in collect_files(temp_dir)
if os.path.basename(f) in candidates] | 0.001661 |
def previous(self):
"""
Returns new CharacterDataWrapper
TODO: Don't lower offset below 0
"""
self.params['offset'] = str(int(self.params['offset']) - int(self.params['limit']))
return self.marvel.get_characters(self.marvel, (), **self.params) | 0.010309 |
def add_function_attribute(self, attr):
"""Only works on function value
Parameters
-----------
attr : str
attribute name
"""
if not self.is_function:
raise ValueError('expected function value, got %s' % (self._kind,))
attrname = str(attr)
attrval = ffi.lib.LLVMPY_GetEnumAttributeKindForName(
_encode_string(attrname), len(attrname))
if attrval == 0:
raise ValueError('no such attribute {!r}'.format(attrname))
ffi.lib.LLVMPY_AddFunctionAttr(self, attrval) | 0.003419 |
def _infer_stmts(stmts, context, frame=None):
"""Return an iterator on statements inferred by each statement in *stmts*."""
inferred = False
if context is not None:
name = context.lookupname
context = context.clone()
else:
name = None
context = contextmod.InferenceContext()
for stmt in stmts:
if stmt is util.Uninferable:
yield stmt
inferred = True
continue
context.lookupname = stmt._infer_name(frame, name)
try:
for inferred in stmt.infer(context=context):
yield inferred
inferred = True
except exceptions.NameInferenceError:
continue
except exceptions.InferenceError:
yield util.Uninferable
inferred = True
if not inferred:
raise exceptions.InferenceError(
"Inference failed for all members of {stmts!r}.",
stmts=stmts,
frame=frame,
context=context,
) | 0.001938 |
def tracer_config(__init__, app, args, kwargs):
"""
Wraps the Tornado web application initialization so that the
TornadoTracing instance is created around an OpenTracing-compatible tracer.
"""
__init__(*args, **kwargs)
tracing = app.settings.get('opentracing_tracing')
tracer_callable = app.settings.get('opentracing_tracer_callable')
tracer_parameters = app.settings.get('opentracing_tracer_parameters', {})
if tracer_callable is not None:
if not callable(tracer_callable):
tracer_callable = _get_callable_from_name(tracer_callable)
tracer = tracer_callable(**tracer_parameters)
tracing = TornadoTracing(tracer)
if tracing is None:
tracing = TornadoTracing() # fallback to the global tracer
app.settings['opentracing_tracing'] = tracing
tracing._trace_all = app.settings.get('opentracing_trace_all',
DEFAULT_TRACE_ALL)
tracing._trace_client = app.settings.get('opentracing_trace_client',
DEFAULT_TRACE_CLIENT)
tracing._start_span_cb = app.settings.get('opentracing_start_span_cb',
None)
httpclient._set_tracing_enabled(tracing._trace_client)
if tracing._trace_client:
httpclient._set_tracing_info(tracing._tracer_obj,
tracing._start_span_cb) | 0.000696 |
def clear_trace_filter_cache():
'''
Clear the trace filter cache.
Call this after reloading.
'''
global should_trace_hook
try:
# Need to temporarily disable a hook because otherwise
# _filename_to_ignored_lines.clear() will never complete.
old_hook = should_trace_hook
should_trace_hook = None
# Clear the linecache
linecache.clearcache()
_filename_to_ignored_lines.clear()
finally:
should_trace_hook = old_hook | 0.00198 |
def get_property(self, key):
"""Expect Django Conf property"""
_key = DJANGO_CONF[key]
return getattr(self, _key, CONF_SPEC[_key]) | 0.012987 |
def opt(self, *, exception=None, record=False, lazy=False, ansi=False, raw=False, depth=0):
r"""Parametrize a logging call to slightly change generated log message.
Parameters
----------
exception : |bool|, |tuple| or |Exception|, optional
If it does not evaluate as ``False``, the passed exception is formatted and added to the
log message. It could be an |Exception| object or a ``(type, value, traceback)`` tuple,
otherwise the exception information is retrieved from |sys.exc_info|.
record : |bool|, optional
If ``True``, the record dict contextualizing the logging call can be used to format the
message by using ``{record[key]}`` in the log message.
lazy : |bool|, optional
If ``True``, the logging call attribute to format the message should be functions which
will be called only if the level is high enough. This can be used to avoid expensive
functions if not necessary.
ansi : |bool|, optional
If ``True``, logged message will be colorized according to the markups it possibly
contains.
raw : |bool|, optional
If ``True``, the formatting of each sink will be bypassed and the message will be send
as is.
depth : |int|, optional
Specify which stacktrace should be used to contextualize the logged message. This is
useful while using the logger from inside a wrapped function to retrieve worthwhile
information.
Returns
-------
:class:`~Logger`
A logger wrapping the core logger, but transforming logged message adequately before
sending.
Examples
--------
>>> try:
... 1 / 0
... except ZeroDivisionError:
... logger.opt(exception=True).debug("Exception logged with debug level:")
...
[18:10:02] DEBUG in '<module>' - Exception logged with debug level:
Traceback (most recent call last, catch point marked):
> File "<stdin>", line 2, in <module>
ZeroDivisionError: division by zero
>>> logger.opt(record=True).info("Current line is: {record[line]}")
[18:10:33] INFO in '<module>' - Current line is: 1
>>> logger.opt(lazy=True).debug("If sink <= DEBUG: {x}", x=lambda: math.factorial(2**5))
[18:11:19] DEBUG in '<module>' - If sink <= DEBUG: 263130836933693530167218012160000000
>>> logger.opt(ansi=True).warning("We got a <red>BIG</red> problem")
[18:11:30] WARNING in '<module>' - We got a BIG problem
>>> logger.opt(raw=True).debug("No formatting\n")
No formatting
>>> def wrapped():
... logger.opt(depth=1).info("Get parent context")
...
>>> def func():
... wrapped()
...
>>> func()
[18:11:54] DEBUG in 'func' - Get parent context
"""
return Logger(self._extra, exception, record, lazy, ansi, raw, depth) | 0.005861 |
def get_historical_output(self, assessment, options):
"""
To get output of a historical Assessment
:param assessment: string
:param options: dict
"""
responseFormat=None
if options and 'format' in options and options['format'] is not None:
responseFormat = options['format']
options['format'] = None
url = '/assessment/' + str(assessment) + '/output?' + urllib.parse.urlencode(options)
response = self.http.downstream(url, responseFormat)
return response | 0.007156 |
def warsaw_up_to_warsaw(C, parameters=None, sectors=None):
"""Translate from the 'Warsaw up' basis to the Warsaw basis.
Parameters used:
- `Vus`, `Vub`, `Vcb`, `gamma`: elements of the unitary CKM matrix (defined
as the mismatch between left-handed quark mass matrix diagonalization
matrices).
"""
C_in = smeftutil.wcxf2arrays_symmetrized(C)
p = default_parameters.copy()
if parameters is not None:
# if parameters are passed in, overwrite the default values
p.update(parameters)
Uu = Ud = Ul = Ue = np.eye(3)
V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"])
Uq = V
C_out = smeftutil.flavor_rotation(C_in, Uq, Uu, Ud, Ul, Ue)
C_out = smeftutil.arrays2wcxf_nonred(C_out)
warsaw = wcxf.Basis['SMEFT', 'Warsaw']
all_wcs = set(warsaw.all_wcs) # to speed up lookup
return {k: v for k, v in C_out.items() if k in all_wcs} | 0.001083 |
def insert_picture(self, image_file):
"""
Return a |PlaceholderPicture| object depicting the image in
*image_file*, which may be either a path (string) or a file-like
object. The image is cropped to fill the entire space of the
placeholder. A |PlaceholderPicture| object has all the properties and
methods of a |Picture| shape except that the value of its
:attr:`~._BaseSlidePlaceholder.shape_type` property is
`MSO_SHAPE_TYPE.PLACEHOLDER` instead of `MSO_SHAPE_TYPE.PICTURE`.
"""
pic = self._new_placeholder_pic(image_file)
self._replace_placeholder_with(pic)
return PlaceholderPicture(pic, self._parent) | 0.002853 |
def visit_Cond(self, node):
'''
generic expression splitting algorithm. Should work for ifexp and if
using W(rap) and U(n)W(rap) to manage difference between expr and stmt
The idea is to split a BinOp in three expressions:
1. a (possibly empty) non-static expr
2. an expr containing a static expr
3. a (possibly empty) non-static expr
Once split, the if body is refactored to keep the semantic,
and then recursively split again, until all static expr are alone in a
test condition
'''
NodeTy = type(node)
if NodeTy is ast.IfExp:
def W(x):
return x
def UW(x):
return x
else:
def W(x):
return [x]
def UW(x):
return x[0]
has_static_expr = self.gather(HasStaticExpression, node.test)
if not has_static_expr:
return node
if node.test in self.static_expressions:
return node
if not isinstance(node.test, ast.BinOp):
return node
before, static = [], []
values = [node.test.right, node.test.left]
def has_static_expression(n):
return self.gather(HasStaticExpression, n)
while values and not has_static_expression(values[-1]):
before.append(values.pop())
while values and has_static_expression(values[-1]):
static.append(values.pop())
after = list(reversed(values))
test_before = NodeTy(None, None, None)
if before:
assert len(before) == 1
test_before.test = before[0]
test_static = NodeTy(None, None, None)
if static:
test_static.test = static[0]
if len(static) > 1:
if after:
assert len(after) == 1
after = [ast.BinOp(static[1], node.test.op, after[0])]
else:
after = static[1:]
test_after = NodeTy(None, None, None)
if after:
assert len(after) == 1
test_after.test = after[0]
if isinstance(node.test.op, ast.BitAnd):
if after:
test_after.body = deepcopy(node.body)
test_after.orelse = deepcopy(node.orelse)
test_after = W(test_after)
else:
test_after = deepcopy(node.body)
if static:
test_static.body = test_after
test_static.orelse = deepcopy(node.orelse)
test_static = W(test_static)
else:
test_static = test_after
if before:
test_before.body = test_static
test_before.orelse = node.orelse
node = test_before
else:
node = UW(test_static)
elif isinstance(node.test.op, ast.BitOr):
if after:
test_after.body = deepcopy(node.body)
test_after.orelse = deepcopy(node.orelse)
test_after = W(test_after)
else:
test_after = deepcopy(node.orelse)
if static:
test_static.body = deepcopy(node.body)
test_static.orelse = test_after
test_static = W(test_static)
else:
test_static = test_after
if before:
test_before.body = deepcopy(node.body)
test_before.orelse = test_static
node = test_before
else:
node = UW(test_static)
else:
raise PythranSyntaxError("operator not supported in a static if",
node)
self.update = True
return self.visit(node) | 0.000776 |
def acs2d(input, exec_path='', time_stamps=False, verbose=False, quiet=False,
exe_args=None):
r"""
Run the acs2d.e executable as from the shell.
Output is automatically named based on input suffix:
+--------------------+----------------+------------------------------+
| INPUT | OUTPUT | EXPECTED DATA |
+====================+================+==============================+
| ``*_raw.fits`` | ``*_flt.fits`` | SBC image. |
+--------------------+----------------+------------------------------+
| ``*_blv_tmp.fits`` | ``*_flt.fits`` | ACSCCD output. |
+--------------------+----------------+------------------------------+
| ``*_blc_tmp.fits`` | ``*_flc.fits`` | ACSCCD output with PCTECORR. |
+--------------------+----------------+------------------------------+
| ``*_crj_tmp.fits`` | ``*_crj.fits`` | ACSREJ output. |
+--------------------+----------------+------------------------------+
| ``*_crc_tmp.fits`` | ``*_crc.fits`` | ACSREJ output with PCTECORR. |
+--------------------+----------------+------------------------------+
Parameters
----------
input : str or list of str
Input filenames in one of these formats:
* a single filename ('j1234567q_blv_tmp.fits')
* a Python list of filenames
* a partial filename with wildcards ('\*blv_tmp.fits')
* filename of an ASN table ('j12345670_asn.fits')
* an at-file (``@input``)
exec_path : str, optional
The complete path to ACS2D executable.
If not given, run ACS2D given by 'acs2d.e'.
time_stamps : bool, optional
Set to True to turn on the printing of time stamps.
verbose : bool, optional
Set to True for verbose output.
quiet : bool, optional
Set to True for quiet output.
exe_args : list, optional
Arbitrary arguments passed to underlying executable call.
Note: Implementation uses subprocess.call and whitespace is not
permitted. E.g. use exe_args=['--nThreads', '1']
"""
from stsci.tools import parseinput # Optional package dependency
if exec_path:
if not os.path.exists(exec_path):
raise OSError('Executable not found: ' + exec_path)
call_list = [exec_path]
else:
call_list = ['acs2d.e']
# Parse input to get list of filenames to process.
# acs2d.e only takes 'file1,file2,...'
infiles, dummy_out = parseinput.parseinput(input)
call_list.append(','.join(infiles))
if time_stamps:
call_list.append('-t')
if verbose:
call_list.append('-v')
if quiet:
call_list.append('-q')
if exe_args:
call_list.extend(exe_args)
subprocess.check_call(call_list) | 0.000342 |
def replace(self, text, pattern=None):
"""Replace selected text by *text*
If *pattern* is not None, replacing selected text using regular
expression text substitution"""
cursor = self.textCursor()
cursor.beginEditBlock()
if pattern is not None:
seltxt = to_text_string(cursor.selectedText())
cursor.removeSelectedText()
if pattern is not None:
text = re.sub(to_text_string(pattern),
to_text_string(text), to_text_string(seltxt))
cursor.insertText(text)
cursor.endEditBlock() | 0.003241 |
def DeregisterHelper(cls, helper_class):
"""Deregisters a helper class.
The helper classes are identified based on their lower case name.
Args:
helper_class (type): class object of the argument helper.
Raises:
KeyError: if helper class is not set for the corresponding name.
"""
helper_name = helper_class.NAME.lower()
if helper_name not in cls._helper_classes:
raise KeyError('Helper class not set for name: {0:s}.'.format(
helper_class.NAME))
del cls._helper_classes[helper_name] | 0.00367 |
def spa_tmplt(**kwds):
""" Generate a minimal TaylorF2 approximant with optimations for the sin/cos
"""
# Pull out the input arguments
f_lower = kwds['f_lower']
delta_f = kwds['delta_f']
distance = kwds['distance']
mass1 = kwds['mass1']
mass2 = kwds['mass2']
s1z = kwds['spin1z']
s2z = kwds['spin2z']
phase_order = int(kwds['phase_order'])
#amplitude_order = int(kwds['amplitude_order'])
spin_order = int(kwds['spin_order'])
if 'out' in kwds:
out = kwds['out']
else:
out = None
amp_factor = spa_amplitude_factor(mass1=mass1, mass2=mass2) / distance
lal_pars = lal.CreateDict()
if phase_order != -1:
lalsimulation.SimInspiralWaveformParamsInsertPNPhaseOrder(
lal_pars, phase_order)
if spin_order != -1:
lalsimulation.SimInspiralWaveformParamsInsertPNSpinOrder(
lal_pars, spin_order)
#Calculate the PN terms
phasing = lalsimulation.SimInspiralTaylorF2AlignedPhasing(
float(mass1), float(mass2),
float(s1z), float(s2z),
lal_pars)
pfaN = phasing.v[0]
pfa2 = phasing.v[2] / pfaN
pfa3 = phasing.v[3] / pfaN
pfa4 = phasing.v[4] / pfaN
pfa5 = phasing.v[5] / pfaN
pfa6 = (phasing.v[6] - phasing.vlogv[6] * log(4)) / pfaN
pfa7 = phasing.v[7] / pfaN
pfl5 = phasing.vlogv[5] / pfaN
pfl6 = phasing.vlogv[6] / pfaN
piM = lal.PI * (mass1 + mass2) * lal.MTSUN_SI
kmin = int(f_lower / float(delta_f))
vISCO = 1. / sqrt(6.)
fISCO = vISCO * vISCO * vISCO / piM
kmax = int(fISCO / delta_f)
f_max = ceilpow2(fISCO)
n = int(f_max / delta_f) + 1
if not out:
htilde = FrequencySeries(zeros(n, dtype=numpy.complex64), delta_f=delta_f, copy=False)
else:
if type(out) is not Array:
raise TypeError("Output must be an instance of Array")
if len(out) < kmax:
kmax = len(out)
if out.dtype != complex64:
raise TypeError("Output array is the wrong dtype")
htilde = FrequencySeries(out, delta_f=delta_f, copy=False)
spa_tmplt_engine(htilde[kmin:kmax], kmin, phase_order, delta_f, piM, pfaN,
pfa2, pfa3, pfa4, pfa5, pfl5,
pfa6, pfl6, pfa7, amp_factor)
return htilde | 0.002093 |
def connect_async(self, connection_id, connection_string, callback):
"""Asynchronously connect to a device
Args:
connection_id (int): A unique identifier that will refer to this connection
connection_string (string): A DeviceAdapter specific string that can be used to connect to
a device using this DeviceAdapter.
callback (callable): A function that will be called when the connection attempt finishes as
callback(connection_id, adapter_id, success: bool, failure_reason: string or None)
"""
if callback is not None:
callback(connection_id, self.id, False, "connect command is not supported in device adapter") | 0.009642 |
def _call(self, vf, out):
"""Implement ``self(vf, out)``."""
if self.domain.field == ComplexNumbers():
vf[0].multiply(self._vecfield[0].conj(), out=out)
else:
vf[0].multiply(self._vecfield[0], out=out)
if self.is_weighted:
out *= self.weights[0]
if len(self.domain) == 1:
return
tmp = self.range.element()
for vfi, gi, wi in zip(vf[1:], self.vecfield[1:],
self.weights[1:]):
if self.domain.field == ComplexNumbers():
vfi.multiply(gi.conj(), out=tmp)
else:
vfi.multiply(gi, out=tmp)
if self.is_weighted:
tmp *= wi
out += tmp | 0.002635 |
def parse_text(self, formatted_text):
"""
Retursn a list of operations (draw, cup, ed,...).
Each operation consist of a command and its associated data.
:param formatted_text: text to parse with the default char format to apply.
:return: list of Operation
"""
assert isinstance(formatted_text, FormattedText)
ret_val = []
fmt = formatted_text.fmt if self._prev_fmt_closed else self._prev_fmt
fmt = QtGui.QTextCharFormat(fmt)
if not self._pending_text:
stripped_text = formatted_text.txt
else:
stripped_text = self._pending_text + formatted_text.txt
self._pending_text = ''
while stripped_text:
try:
escape_pos = stripped_text.index(self._escape[0])
except ValueError:
ret_val.append(Operation('draw', FormattedText(stripped_text, fmt)))
break
else:
if escape_pos != 0:
ret_val.append(Operation('draw', FormattedText(stripped_text[:escape_pos], fmt)))
stripped_text = stripped_text[escape_pos:]
fmt = QtGui.QTextCharFormat(fmt)
assert stripped_text[0] == self._escape[0]
while stripped_text and stripped_text[0] == self._escape[0]:
if self._escape.startswith(stripped_text):
# control sequence not complete
self._pending_text += stripped_text
stripped_text = ''
break
if not stripped_text.startswith(self._escape):
# check vt100 escape sequences
ctrl_seq = False
for alt_seq in self._escape_alts:
if stripped_text.startswith(alt_seq):
ctrl_seq = True
break
if not ctrl_seq:
# not a control sequence
self._pending_text = ''
ret_val.append(Operation('draw', FormattedText(stripped_text[:1], fmt)))
fmt = QtGui.QTextCharFormat(fmt)
stripped_text = stripped_text[1:]
continue
self._pending_text += _mid(stripped_text, 0, self._escape_len)
stripped_text = stripped_text[self._escape_len:]
# Non draw related command (cursor/erase)
if self._pending_text in [self._escape] + self._escape_alts:
m = self._supported_commands.match(stripped_text)
if m and self._pending_text == self._escape:
_, e = m.span()
n = m.group('n')
cmd = m.group('cmd')
if not n:
n = 0
ret_val.append(Operation(self._commands[cmd], n))
self._pending_text = ''
stripped_text = stripped_text[e:]
continue
else:
m = self._unsupported_command.match(stripped_text)
if m:
self._pending_text = ''
stripped_text = stripped_text[m.span()[1]:]
continue
elif self._pending_text in ['\x1b=', '\x1b>']:
self._pending_text = ''
continue
# Handle Select Graphic Rendition commands
# get the number
str_nbr = ''
numbers = []
while stripped_text:
if stripped_text[0].isdigit():
str_nbr += stripped_text[0]
else:
if str_nbr:
numbers.append(str_nbr)
if not str_nbr or stripped_text[0] != self._semicolon:
break
str_nbr = ''
self._pending_text += _mid(stripped_text, 0, 1)
stripped_text = stripped_text[1:]
if not stripped_text:
break
# remove terminating char
if not stripped_text.startswith(self._color_terminator):
# _logger().warn('removing %s', repr(self._pending_text + stripped_text[0]))
self._pending_text = ''
stripped_text = stripped_text[1:]
break
# got consistent control sequence, ok to clear pending text
self._pending_text = ''
stripped_text = stripped_text[1:]
if not numbers:
fmt = QtGui.QTextCharFormat(formatted_text.fmt)
self.end_format_scope()
i_offset = 0
n = len(numbers)
for i in range(n):
i += i_offset
code = int(numbers[i])
if self._TextColorStart <= code <= self._TextColorEnd:
fmt.setForeground(_ansi_color(code - self._TextColorStart, self.color_scheme))
self._set_format_scope(fmt)
elif self._BackgroundColorStart <= code <= self._BackgroundColorEnd:
fmt.setBackground(_ansi_color(code - self._BackgroundColorStart, self.color_scheme))
self._set_format_scope(fmt)
else:
if code == self._ResetFormat:
fmt = QtGui.QTextCharFormat(formatted_text.fmt)
self.end_format_scope()
elif code == self._BoldText:
fmt.setFontWeight(QtGui.QFont.Bold)
self._set_format_scope(fmt)
elif code == self._NotBold:
fmt.setFontWeight(QtGui.QFont.Normal)
self._set_format_scope(fmt)
elif code == self._ItalicText:
fmt.setFontItalic(True)
self._set_format_scope(fmt)
elif code == self._NotItalicNotFraktur:
fmt.setFontItalic(False)
self._set_format_scope(fmt)
elif code == self._UnderlinedText:
fmt.setUnderlineStyle(fmt.SingleUnderline)
fmt.setUnderlineColor(fmt.foreground().color())
self._set_format_scope(fmt)
elif code == self._NotUnderlined:
fmt.setUnderlineStyle(fmt.NoUnderline)
self._set_format_scope(fmt)
elif code == self._DefaultTextColor:
fmt.setForeground(formatted_text.fmt.foreground())
self._set_format_scope(fmt)
elif code == self._DefaultBackgroundColor:
fmt.setBackground(formatted_text.fmt.background())
self._set_format_scope(fmt)
elif code == self._Dim:
fmt = QtGui.QTextCharFormat(fmt)
fmt.setForeground(fmt.foreground().color().darker(self.DIM_FACTOR))
elif code == self._Negative:
normal_fmt = fmt
fmt = QtGui.QTextCharFormat(fmt)
fmt.setForeground(normal_fmt.background())
fmt.setBackground(normal_fmt.foreground())
elif code == self._Positive:
fmt = QtGui.QTextCharFormat(formatted_text.fmt)
elif code in [self._RgbBackgroundColor, self._RgbTextColor]:
# See http://en.wikipedia.org/wiki/ANSI_escape_code#Colors
i += 1
if i == n:
break
next_code = int(numbers[i])
if next_code == 2:
# RGB set with format: 38;2;<r>;<g>;<b>
if i + 3 < n:
method = fmt.setForeground if code == self._RgbTextColor else fmt.setBackground
method(QtGui.QColor(int(numbers[i + 1]), int(numbers[i + 2]), int(numbers[i + 3])))
self._set_format_scope(fmt)
i_offset = 3
elif next_code == 5:
# 256 color mode with format: 38;5;<i>
index = int(numbers[i + 1])
if index < 8:
# The first 8 colors are standard low-intensity ANSI colors.
color = _ansi_color(index, self.color_scheme)
elif index < 16:
# The next 8 colors are standard high-intensity ANSI colors.
color = _ansi_color(index - 8, self.color_scheme).lighter(150)
elif index < 232:
# The next 216 colors are a 6x6x6 RGB cube.
o = index - 16
color = QtGui.QColor((o / 36) * 51, ((o / 6) % 6) * 51, (o % 6) * 51)
else:
# The last 24 colors are a greyscale gradient.
grey = (index - 232) * 11
color = QtGui.QColor(grey, grey, grey)
if code == self._RgbTextColor:
fmt.setForeground(color)
else:
fmt.setBackground(color)
self._set_format_scope(fmt)
else:
_logger().warn('unsupported SGR code: %r', code)
return ret_val | 0.002008 |
def _install_directory_structure_file(cls):
"""
Download the latest version of `dir_structure_production.json`.
"""
# We initiate the link to the public suffix configuration.
# It is not hard coded because this method is called only if we
# are sure that the configuration file exist.
dir_structure_link = PyFunceble.CONFIGURATION["links"]["dir_structure"]
# We update the link according to our current version.
dir_structure_link = Version(True).right_url_from_version(dir_structure_link)
# We set the destination of the downloaded file.
destination = (
PyFunceble.CURRENT_DIRECTORY
+ PyFunceble.CONFIGURATION["outputs"]["default_files"]["dir_structure"]
)
if not Version(True).is_cloned() or not PyFunceble.path.isfile(destination):
# The current version is not the cloned version.
# We Download the link content and return the download status.
data = Download(dir_structure_link, destination, return_data=True).text()
File(destination).write(data, overwrite=True)
return True
# We are in the cloned version.
# We do not need to download the file, so we are returning None.
return None | 0.004591 |
def show(self, m_a):
"""
Display (with a pretty print) this object
:param m_a: :class:`MethodAnalysis` object
"""
bytecode.PrettyShow(m_a, m_a.basic_blocks.gets(), self.notes)
bytecode.PrettyShowEx(m_a.exceptions.gets()) | 0.00722 |
def parse_string_factory(
alg, sep, splitter, input_transform, component_transform, final_transform
):
"""
Create a function that will split and format a *str* into a tuple.
Parameters
----------
alg : ns enum
Indicate how to format and split the *str*.
sep : str
The string character to be inserted between adjacent numeric
objects in the returned tuple.
splitter : callable
A function the will accept a string and returns an iterable
of strings where the numbers are separated from the non-numbers.
input_transform : callable
A function to apply to the string input *before* applying
the *splitter* function. Must return a string.
component_transform : callable
A function that is operated elementwise on the output of
*splitter*. It must accept a single string and return either
a string or a number.
final_transform : callable
A function to operate on the return value as a whole. It
must accept a tuple and a string argument - the tuple
should be the result of applying the above functions, and the
string is the original input value. It must return a tuple.
Returns
-------
func : callable
A function that accepts string input and returns a tuple
containing the string split into numeric and non-numeric
components, where the numeric components are converted into
numeric objects. The first element is *always* a string,
and then alternates number then string. Intended to be
used as the *string_func* argument to *natsort_key*.
See Also
--------
natsort_key
input_string_transform_factory
string_component_transform_factory
final_data_transform_factory
"""
# Sometimes we store the "original" input before transformation,
# sometimes after.
orig_after_xfrm = not (alg & NS_DUMB and alg & ns.LOCALEALPHA)
original_func = input_transform if orig_after_xfrm else _no_op
normalize_input = _normalize_input_factory(alg)
def func(x):
# Apply string input transformation function and return to x.
# Original function is usually a no-op, but some algorithms require it
# to also be the transformation function.
x = normalize_input(x)
x, original = input_transform(x), original_func(x)
x = splitter(x) # Split string into components.
x = py23_filter(None, x) # Remove empty strings.
x = py23_map(component_transform, x) # Apply transform on components.
x = sep_inserter(x, sep) # Insert '' between numbers.
return final_transform(x, original) # Apply the final transform.
return func | 0.000364 |
def connect(self):
"""connect to the database
**Return:**
- ``dbConn`` -- the database connection
See the class docstring for usage
"""
self.log.debug('starting the ``get`` method')
dbSettings = self.dbSettings
port = False
if "tunnel" in dbSettings and dbSettings["tunnel"]:
port = self._setup_tunnel(
tunnelParameters=dbSettings["tunnel"]
)
# SETUP A DATABASE CONNECTION
host = dbSettings["host"]
user = dbSettings["user"]
passwd = dbSettings["password"]
dbName = dbSettings["db"]
dbConn = ms.connect(
host=host,
user=user,
passwd=passwd,
db=dbName,
port=port,
use_unicode=True,
charset='utf8',
local_infile=1,
client_flag=ms.constants.CLIENT.MULTI_STATEMENTS,
connect_timeout=36000,
max_allowed_packet=51200000
)
if self.autocommit:
dbConn.autocommit(True)
self.log.debug('completed the ``get`` method')
return dbConn | 0.001715 |
def _MaybePurgeOrphanedData(self, event):
"""Maybe purge orphaned data due to a TensorFlow crash.
When TensorFlow crashes at step T+O and restarts at step T, any events
written after step T are now "orphaned" and will be at best misleading if
they are included in TensorBoard.
This logic attempts to determine if there is orphaned data, and purge it
if it is found.
Args:
event: The event to use as a reference, to determine if a purge is needed.
"""
if not self.purge_orphaned_data:
return
## Check if the event happened after a crash, and purge expired tags.
if self.file_version and self.file_version >= 2:
## If the file_version is recent enough, use the SessionLog enum
## to check for restarts.
self._CheckForRestartAndMaybePurge(event)
else:
## If there is no file version, default to old logic of checking for
## out of order steps.
self._CheckForOutOfOrderStepAndMaybePurge(event) | 0.01417 |
def _render_our_module_flags(self, module, output_lines, prefix=''):
"""Returns a help string for a given module."""
flags = self._get_flags_defined_by_module(module)
if flags:
self._render_module_flags(module, flags, output_lines, prefix) | 0.007782 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.