text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def complete_rule(rule, cmd):
'''complete using one rule'''
global rline_mpstate
rule_components = rule.split(' ')
# complete the empty string (e.g "graph <TAB><TAB>")
if len(cmd) == 0:
return rule_expand(rule_components[0], "")
# check it matches so far
for i in range(len(cmd)-1):
if not rule_match(rule_components[i], cmd[i]):
return []
# expand the next rule component
expanded = rule_expand(rule_components[len(cmd)-1], cmd[-1])
return expanded | 0.001919 |
def fetch(self, category=CATEGORY_EVENT, from_date=DEFAULT_DATETIME, to_date=None,
filter_classified=False):
"""Fetch the events from the server.
This method fetches those events of a group stored on the server
that were updated since the given date. Data comments and rsvps
are included within each event.
:param category: the category of items to fetch
:param from_date: obtain events updated since this date
:param to_date: obtain events updated before this date
:param filter_classified: remove classified fields from the resulting items
:returns: a generator of events
"""
if not from_date:
from_date = DEFAULT_DATETIME
from_date = datetime_to_utc(from_date)
kwargs = {"from_date": from_date, "to_date": to_date}
items = super().fetch(category,
filter_classified=filter_classified,
**kwargs)
return items | 0.004897 |
def show_error_dialog(message: str, details: str=None):
"""
Convenience method for showing an error dialog.
"""
# TODO: i18n
message_box = QMessageBox(
QMessageBox.Critical,
"Error",
message,
QMessageBox.Ok,
None
)
if details:
message_box.setDetailedText(details)
message_box.exec_() | 0.009547 |
def from_pycode(cls, co):
"""Create a Code object from a python code object.
Parameters
----------
co : CodeType
The python code object.
Returns
-------
code : Code
The codetransformer Code object.
"""
# Make it sparse to instrs[n] is the instruction at bytecode[n]
sparse_instrs = tuple(
_sparse_args(
Instruction.from_opcode(
b.opcode,
Instruction._no_arg if b.arg is None else _RawArg(b.arg),
) for b in Bytecode(co)
),
)
for idx, instr in enumerate(sparse_instrs):
if instr is None:
# The sparse value
continue
if instr.absjmp:
instr.arg = sparse_instrs[instr.arg]
elif instr.reljmp:
instr.arg = sparse_instrs[instr.arg + idx + argsize + 1]
elif isinstance(instr, LOAD_CONST):
instr.arg = co.co_consts[instr.arg]
elif instr.uses_name:
instr.arg = co.co_names[instr.arg]
elif instr.uses_varname:
instr.arg = co.co_varnames[instr.arg]
elif instr.uses_free:
instr.arg = _freevar_argname(
instr.arg,
co.co_freevars,
co.co_cellvars,
)
elif instr.have_arg and isinstance(instr.arg, _RawArg):
instr.arg = int(instr.arg)
flags = Flag.unpack(co.co_flags)
has_vargs = flags['CO_VARARGS']
has_kwargs = flags['CO_VARKEYWORDS']
# Here we convert the varnames format into our argnames format.
paramnames = co.co_varnames[
:(co.co_argcount +
co.co_kwonlyargcount +
has_vargs +
has_kwargs)
]
# We start with the positional arguments.
new_paramnames = list(paramnames[:co.co_argcount])
# Add *args next.
if has_vargs:
new_paramnames.append('*' + paramnames[-1 - has_kwargs])
# Add positional only arguments next.
new_paramnames.extend(paramnames[
co.co_argcount:co.co_argcount + co.co_kwonlyargcount
])
# Add **kwargs last.
if has_kwargs:
new_paramnames.append('**' + paramnames[-1])
return cls(
filter(bool, sparse_instrs),
argnames=new_paramnames,
cellvars=co.co_cellvars,
freevars=co.co_freevars,
name=co.co_name,
filename=co.co_filename,
firstlineno=co.co_firstlineno,
lnotab={
lno: sparse_instrs[off] for off, lno in findlinestarts(co)
},
flags=flags,
) | 0.000703 |
def add_new_reset_method(obj):
"""
Attach a new `reset()` method to `obj` which resets the internal
seed generator of `obj` and then resets each of its constituent
field generators found in `obj.field_gens`.
"""
#
# Create and assign automatically generated reset() method
#
def new_reset(self, seed=None):
logger.debug(f'[EEE] Inside automatically generated reset() method for {self} (seed={seed})')
if seed is not None:
self.seed_generator.reset(seed)
for name, gen in self.field_gens.items():
next_seed = next(self.seed_generator)
gen.reset(next_seed)
# TODO: the following should be covered by the newly added
# reset() method in IndependentGeneratorMeta. However, for
# some reason we can't call this via the usual `orig_reset()`
# pattern, so we have to duplicate this here. Not ideal...
for c in self._dependent_generators:
c.reset_dependent_generator(seed)
return self
obj.reset = new_reset | 0.002725 |
def get_identities(self, identity=None, attrs=None):
""" Get identities matching name and attrs
of the user, as a list
:param: zobjects.Identity or identity name (string)
:param: attrs dict of attributes to return only identities matching
:returns: list of zobjects.Identity
"""
resp = self.request('GetIdentities')
if 'identity' in resp:
identities = resp['identity']
if type(identities) != list:
identities = [identities]
if identity or attrs:
wanted_identities = []
for u_identity in [
zobjects.Identity.from_dict(i) for i in identities]:
if identity:
if isinstance(identity, zobjects.Identity):
if u_identity.name == identity.name:
return [u_identity]
else:
if u_identity.name == identity:
return [u_identity]
elif attrs:
for attr, value in attrs.items():
if (attr in u_identity._a_tags and
u_identity._a_tags[attr] == value):
wanted_identities.append(u_identity)
return wanted_identities
else:
return [zobjects.Identity.from_dict(i) for i in identities]
else:
return [] | 0.001296 |
def disable(self):
"""
Disable the button, if in non-expert mode.
"""
w.ActButton.disable(self)
g = get_root(self).globals
if self._expert:
self.config(bg=g.COL['start'])
else:
self.config(bg=g.COL['startD']) | 0.006944 |
def qos_map_cos_traffic_class_cos5(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
qos = ET.SubElement(config, "qos", xmlns="urn:brocade.com:mgmt:brocade-qos")
map = ET.SubElement(qos, "map")
cos_traffic_class = ET.SubElement(map, "cos-traffic-class")
name_key = ET.SubElement(cos_traffic_class, "name")
name_key.text = kwargs.pop('name')
cos5 = ET.SubElement(cos_traffic_class, "cos5")
cos5.text = kwargs.pop('cos5')
callback = kwargs.pop('callback', self._callback)
return callback(config) | 0.004886 |
def descape(self, string, defs=None):
"""Decodes html entities from a given string"""
if defs is None:
defs = html_entities.entitydefs
f = lambda m: defs[m.group(1)] if len(m.groups()) > 0 else m.group(0)
return self.html_entity_re.sub(f, string) | 0.010345 |
def all_label_values(self, label_list_ids=None):
"""
Return a set of all label-values occurring in this corpus.
Args:
label_list_ids (list): If not None, only labels from label-lists with an id contained in this list
are considered.
Returns:
:class:`set`: A set of distinct label-values.
"""
values = set()
for utterance in self.utterances.values():
values = values.union(utterance.all_label_values(label_list_ids=label_list_ids))
return values | 0.006849 |
def configure_modrpaf(self):
"""
Installs the mod-rpaf Apache module.
https://github.com/gnif/mod_rpaf
"""
r = self.local_renderer
if r.env.modrpaf_enabled:
self.install_packages()
self.enable_mod('rpaf')
else:
if self.last_manifest.modrpaf_enabled:
self.disable_mod('mod_rpaf') | 0.005168 |
def _get_new_msgstrs(po_file_path, msgids):
"""
Write new msgids which appeared in po files with empty msgstrs values
and metadata. Look for all new msgids which are diffed with msgids list
provided as an argument.
"""
po_file = polib.pofile(po_file_path)
msgstrs = {}
for entry in po_file:
if entry.msgid not in msgids:
msgstrs[entry.msgid] = entry.msgstr
return msgstrs | 0.002326 |
def drop_reserved_params(params):
""" Drops reserved params """
from nefertari import RESERVED_PARAMS
params = params.copy()
for reserved_param in RESERVED_PARAMS:
if reserved_param in params:
params.pop(reserved_param)
return params | 0.003663 |
def verify(self, windowSize=None):
"""Verify that this protocol model is valid. Return 0 if sucessful,
a failure message otherwise
:param windowSize: acquistion window size (seconds), to check against duration, check is not performed is None provided
:type windowSize: float
:returns: 0 (int) for success, fail message (str) otherwise
"""
if self.rowCount() == 0:
return "Protocol must have at least one test"
if self.caldb is None or self.calv is None:
return "Protocol reference voltage not set"
for test in self._tests:
msg = test.verify(windowSize)
if msg:
return msg
return 0 | 0.004138 |
def Validate(self, value, **_):
"""Check that value is a valid enum."""
if value is None:
return
return rdfvalue.RDFBool(super(ProtoBoolean, self).Validate(value)) | 0.010989 |
def get_repo_info(repo_name, profile='github', ignore_cache=False):
'''
Return information for a given repo.
.. versionadded:: 2016.11.0
repo_name
The name of the repository.
profile
The name of the profile configuration to use. Defaults to ``github``.
CLI Example:
.. code-block:: bash
salt myminion github.get_repo_info salt
salt myminion github.get_repo_info salt profile='my-github-profile'
'''
org_name = _get_config_value(profile, 'org_name')
key = "github.{0}:{1}:repo_info".format(
_get_config_value(profile, 'org_name'),
repo_name.lower()
)
if key not in __context__ or ignore_cache:
client = _get_client(profile)
try:
repo = client.get_repo('/'.join([org_name, repo_name]))
if not repo:
return {}
# client.get_repo can return a github.Repository.Repository object,
# even if the repo is invalid. We need to catch the exception when
# we try to perform actions on the repo object, rather than above
# the if statement.
ret = _repo_to_dict(repo)
__context__[key] = ret
except github.UnknownObjectException:
raise CommandExecutionError(
'The \'{0}\' repository under the \'{1}\' organization could not '
'be found.'.format(
repo_name,
org_name
)
)
return __context__[key] | 0.001302 |
def _instruction_to_superop(cls, instruction):
"""Convert a QuantumCircuit or Instruction to a SuperOp."""
# Convert circuit to an instruction
if isinstance(instruction, QuantumCircuit):
instruction = instruction.to_instruction()
# Initialize an identity superoperator of the correct size
# of the circuit
op = SuperOp(np.eye(4 ** instruction.num_qubits))
op._append_instruction(instruction)
return op | 0.004193 |
def make_func_declaration(func_name, lineno, type_=None):
""" This will return a node with the symbol as a function.
"""
return symbols.FUNCDECL.make_node(func_name, lineno, type_=type_) | 0.005051 |
def account_unpin(self, id):
"""
Unpin / un-endorse a user.
Returns a `relationship dict`_ containing the updated relationship to the user.
"""
id = self.__unpack_id(id)
url = '/api/v1/accounts/{0}/unpin'.format(str(id))
return self.__api_request('POST', url) | 0.009494 |
def _get_merge_rules(properties, path=None):
"""
Yields merge rules as key-value pairs, in which the first element is a JSON path as a tuple, and the second element
is a list of merge properties whose values are `true`.
"""
if path is None:
path = ()
for key, value in properties.items():
new_path = path + (key,)
types = _get_types(value)
# `omitWhenMerged` supersedes all other rules.
# See http://standard.open-contracting.org/1.1-dev/en/schema/merging/#omit-when-merged
if value.get('omitWhenMerged') or value.get('mergeStrategy') == 'ocdsOmit':
yield (new_path, {'omitWhenMerged'})
# `wholeListMerge` supersedes any nested rules.
# See http://standard.open-contracting.org/1.1-dev/en/schema/merging/#whole-list-merge
elif 'array' in types and (value.get('wholeListMerge') or value.get('mergeStrategy') == 'ocdsVersion'):
yield (new_path, {'wholeListMerge'})
elif 'object' in types and 'properties' in value:
yield from _get_merge_rules(value['properties'], path=new_path)
elif 'array' in types and 'items' in value:
item_types = _get_types(value['items'])
# See http://standard.open-contracting.org/1.1-dev/en/schema/merging/#objects
if any(item_type != 'object' for item_type in item_types):
yield (new_path, {'wholeListMerge'})
elif 'object' in item_types and 'properties' in value['items']:
# See http://standard.open-contracting.org/1.1-dev/en/schema/merging/#whole-list-merge
if 'id' not in value['items']['properties']:
yield (new_path, {'wholeListMerge'})
else:
yield from _get_merge_rules(value['items']['properties'], path=new_path) | 0.00487 |
def convert_random_normal(node, **kwargs):
"""Map MXNet's random_normal operator attributes to onnx's RandomNormal
operator and return the created node.
"""
name, input_nodes, attrs = get_inputs(node, kwargs)
# Converting to float32
mean = float(attrs.get("loc", 0))
scale = float(attrs.get("scale", 1.0))
shape = convert_string_to_list(attrs.get('shape', '[]'))
dtype = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype(attrs.get('dtype', 'float32'))]
node = onnx.helper.make_node(
'RandomNormal',
input_nodes,
[name],
mean=mean,
scale=scale,
dtype=dtype,
shape=shape,
name=name
)
return [node] | 0.002841 |
def decode(data):
"Decodes a syncsafe integer"
value = 0
for b in data:
if b > 127: # iTunes bug
raise ValueError("Invalid syncsafe integer")
value <<= 7
value += b
return value | 0.007634 |
def before_request(self, request, method, url, headers):
"""Performs credential-specific before request logic.
Args:
request (Any): Unused. JWT credentials do not need to make an
HTTP request to refresh.
method (str): The request's HTTP method.
url (str): The request's URI. This is used as the audience claim
when generating the JWT.
headers (Mapping): The request's headers.
"""
# pylint: disable=unused-argument
# (pylint doesn't correctly recognize overridden methods.)
parts = urllib.parse.urlsplit(url)
# Strip query string and fragment
audience = urllib.parse.urlunsplit(
(parts.scheme, parts.netloc, parts.path, "", ""))
token = self._get_jwt_for_audience(audience)
self.apply(headers, token=token) | 0.002275 |
def default(self, line):
"""Overriding default to get access to any argparse commands we have specified."""
if any((line.startswith(x) for x in self.argparse_names())):
try:
args = self.argparser.parse_args(shlex.split(line))
except Exception: # intentionally catches also other errors in argparser
pass
else:
args.func(args)
else:
cmd.Cmd.default(self, line) | 0.008316 |
def _match_by_norm_func(l1, l2, norm_fn, dist_fn, thresh):
"""Matches elements in l1 and l2 using normalization functions.
Splits the elements in each list into buckets given by the normalization
function. If the same normalization value points to a bucket from the
first list and a bucket from the second list, both with a single element
we consider the elements in the list as matching if the distance between
them is less (or equal) than the threshold.
e.g. l1 = ['X1', 'Y1', 'Y2', 'Z5'], l2 = ['X1', 'Y3', 'Z1']
norm_fn = lambda x: x[0]
dist_fn = lambda e1, e2: 0 if e1 == e2 else 1
thresh = 0
The buckets will then be:
l1_bucket = {'X': ['X1'], 'Y': ['Y1', 'Y2'], 'Z': ['Z5']}
l2_bucket = {'X': ['X1'], 'Y': ['Y3'], 'Z': ['Z1']}
For each normalized value:
'X' -> consider 'X1' equal with 'X1' since the distance is equal with
the thershold
'Y' -> skip the lists since we have multiple possible matches
'Z' -> consider 'Z1' and 'Z5' as different since the distance is
greater than the threshold.
Return:
[('X1', 'X2')]
"""
common = []
l1_only_idx = set(range(len(l1)))
l2_only_idx = set(range(len(l2)))
buckets_l1 = _group_by_fn(enumerate(l1), lambda x: norm_fn(x[1]))
buckets_l2 = _group_by_fn(enumerate(l2), lambda x: norm_fn(x[1]))
for normed, l1_elements in buckets_l1.items():
l2_elements = buckets_l2.get(normed, [])
if not l1_elements or not l2_elements:
continue
_, (_, e1_first) = l1_elements[0]
_, (_, e2_first) = l2_elements[0]
match_is_ambiguous = not (
len(l1_elements) == len(l2_elements) and (
all(e2 == e2_first for (_, (_, e2)) in l2_elements) or
all(e1 == e1_first for (_, (_, e1)) in l1_elements)
)
)
if match_is_ambiguous:
continue
for (e1_idx, e1), (e2_idx, e2) in zip(l1_elements, l2_elements):
if dist_fn(e1, e2) > thresh:
continue
l1_only_idx.remove(e1_idx)
l2_only_idx.remove(e2_idx)
common.append((e1, e2))
l1_only = [l1[i] for i in l1_only_idx]
l2_only = [l2[i] for i in l2_only_idx]
return common, l1_only, l2_only | 0.000426 |
def whitespace_before_comment(logical_line, tokens):
r"""Separate inline comments by at least two spaces.
An inline comment is a comment on the same line as a statement. Inline
comments should be separated by at least two spaces from the statement.
They should start with a # and a single space.
Each line of a block comment starts with a # and a single space
(unless it is indented text inside the comment).
Okay: x = x + 1 # Increment x
Okay: x = x + 1 # Increment x
Okay: # Block comment
E261: x = x + 1 # Increment x
E262: x = x + 1 #Increment x
E262: x = x + 1 # Increment x
E265: #Block comment
E266: ### Block comment
"""
prev_end = (0, 0)
for token_type, text, start, end, line in tokens:
if token_type == tokenize.COMMENT:
inline_comment = line[:start[1]].strip()
if inline_comment:
if prev_end[0] == start[0] and start[1] < prev_end[1] + 2:
yield (prev_end,
"E261 at least two spaces before inline comment")
symbol, sp, comment = text.partition(' ')
bad_prefix = symbol not in '#:' and (symbol.lstrip('#')[:1] or '#')
if inline_comment:
if bad_prefix or comment[:1] in WHITESPACE:
yield start, "E262 inline comment should start with '# '"
elif bad_prefix and (bad_prefix != '!' or start[0] > 1):
if bad_prefix != '#':
yield start, "E265 block comment should start with '# '"
elif comment:
yield start, "E266 too many leading '#' for block comment"
elif token_type != tokenize.NL:
prev_end = end | 0.00057 |
def getGroup(self, networkId, groupNodeId, verbose=None):
"""
Returns the group specified by the `groupNodeId` and `networkId` parameters.
:param networkId: SUID of the Network
:param groupNodeId: SUID of the Node representing the Group
:param verbose: print more
:returns: 200: successful operation
"""
response=api(url=self.___url+'networks/'+str(networkId)+'/groups/'+str(groupNodeId)+'', method="GET", verbose=verbose, parse_params=False)
return response | 0.009381 |
def generate_sha1(string, salt=None):
"""
Generates a sha1 hash for supplied string. Doesn't need to be very secure
because it's not used for password checking. We got Django for that.
:param string:
The string that needs to be encrypted.
:param salt:
Optionally define your own salt. If none is supplied, will use a
random string of 5 characters.
:return: Tuple containing the salt and hash.
"""
if not salt:
salt = hashlib.sha1(str(random.random()).encode('utf-8')).hexdigest()[:5]
hash = hashlib.sha1((salt+str(string)).encode('utf-8')).hexdigest()
return (salt, hash) | 0.004622 |
def get_current_client(self):
"""Return the currently selected notebook."""
try:
client = self.tabwidget.currentWidget()
except AttributeError:
client = None
if client is not None:
return client | 0.007435 |
def _transpile_circuit(circuit_config_tuple):
"""Select a PassManager and run a single circuit through it.
Args:
circuit_config_tuple (tuple):
circuit (QuantumCircuit): circuit to transpile
transpile_config (TranspileConfig): configuration dictating how to transpile
Returns:
QuantumCircuit: transpiled circuit
"""
circuit, transpile_config = circuit_config_tuple
# if the pass manager is not already selected, choose an appropriate one.
if transpile_config.pass_manager:
pass_manager = transpile_config.pass_manager
elif transpile_config.coupling_map:
pass_manager = default_pass_manager(transpile_config.basis_gates,
transpile_config.coupling_map,
transpile_config.initial_layout,
transpile_config.seed_transpiler)
else:
pass_manager = default_pass_manager_simulator(transpile_config.basis_gates)
return pass_manager.run(circuit) | 0.002799 |
def boxplot(neurons, feature, new_fig=True, subplot=False):
'''
Plot a histogram of the selected feature for the population of neurons.
Plots x-axis versus y-axis on a scatter|histogram|binned values plot.
More information about the plot and how it works.
Parameters
----------
neurons : list
List of Neurons. Single neurons must be encapsulated in a list.
feature : str
The feature of interest.
Options
-------
subplot : bool
Default is False, which returns a matplotlib figure object. If True,
returns a matplotlib axis object, for use as a subplot.
'''
feature_values = [getattr(neu, 'get_' + feature)() for neu in neurons]
_, ax = common.get_figure(new_fig=new_fig, subplot=subplot)
ax.boxplot(feature_values)
x_labels = ['neuron_id' for _ in neurons]
ax.set_xticklabels(x_labels) | 0.001124 |
def double_centre(matrix, square_input=True):
""" Double-centres the input matrix: From each element: Subtract the row
mean Subtract the column mean Add the grand mean Divide by -2 Method
from: Torgerson, W S (1952). Multidimensional scaling: I. Theory and
method. Alternatively M = -0.5 * (I - 1/n)D[^2](I - 1/n) """
m = matrix.copy()
if square_input:
m **= 2
(rows, cols) = m.shape
cm = np.mean(m, axis=0) # column means
rm = np.mean(m, axis=1).reshape((rows, 1)) # row means
gm = np.mean(cm) # grand mean
m -= rm + cm - gm
m /= -2
return m | 0.001647 |
def clearView(self, fillColor = 0 ):
"""!
\~english
Clear up canvas with view size
@param fillColor: a color value
@note
The fillColor value range depends on the setting of _buffer_color_mode.
* If it is SS_COLOR_MODE_MONO ("1") monochrome mode, it can only select 0: black and 1: white
* If it is SS_COLOR_MODE_RGB ("RGB") color mode, RGB color values can be used
\~chinese
清除画布中当前视图大小的区域同时填充颜色
@param fillColor: 颜色值
@note
fillColor 取值范围取决于 _buffer_color_mode 的设定。
* 如果是 SS_COLOR_MODE_MONO ("1") 单色模式,只能选择 0:黑色 和 1:白色
* 如果是 SS_COLOR_MODE_RGB ("RGB") 彩色模式,可以使用 RGB 色彩值
"""
self.Canvas.rectangle(self.View.rectToArray(), outline=0, fill=fillColor) | 0.017284 |
def jhk_to_sdssr(jmag,hmag,kmag):
'''Converts given J, H, Ks mags to an SDSS r magnitude value.
Parameters
----------
jmag,hmag,kmag : float
2MASS J, H, Ks mags of the object.
Returns
-------
float
The converted SDSS r band magnitude.
'''
return convert_constants(jmag,hmag,kmag,
SDSSR_JHK,
SDSSR_JH, SDSSR_JK, SDSSR_HK,
SDSSR_J, SDSSR_H, SDSSR_K) | 0.010163 |
def locateChild(self, context, segments):
"""
Return a statically defined child or a child defined by a site root
plugin or an avatar from guard.
"""
request = IRequest(context)
webViewer = IWebViewer(self.store, None)
childAndSegments = self.siteProduceResource(request, segments, webViewer)
if childAndSegments is not None:
return childAndSegments
return NotFound | 0.006682 |
def UNEXPOSED(self, _cursor_type):
"""
Handles unexposed types.
Returns the canonical type instead.
"""
_decl = _cursor_type.get_declaration()
name = self.get_unique_name(_decl) # _cursor)
if self.is_registered(name):
obj = self.get_registered(name)
else:
obj = self.parse_cursor(_decl)
return obj | 0.005076 |
def getCurrentStrDatetime():
"""
Generating the current Datetime with a given format
Returns:
--------
string: The string of a date.
"""
# Generating current time
i = datetime.datetime.now()
strTime = "%s-%s-%s_%sh%sm" % (i.year, i.month, i.day, i.hour, i.minute)
return strTime | 0.003096 |
def terminate(self, force=False):
"""This forces a child process to terminate."""
if not self.isalive():
return True
self.kill(signal.SIGINT)
time.sleep(self.delayafterterminate)
if not self.isalive():
return True
if force:
self.kill(signal.SIGKILL)
time.sleep(self.delayafterterminate)
if not self.isalive():
return True
else:
return False | 0.004065 |
def set_priority(self, name, vrid, value=None, disable=False,
default=False, run=True):
"""Set the primary_ip property of the vrrp
Args:
name (string): The interface to configure.
vrid (integer): The vrid number for the vrrp to be managed.
value (integer): Priority to assign to the vrrp.
disable (boolean): Unset priority if True.
default (boolean): Set priority to default if True.
run (boolean): Set to True to execute the command, False to
return a string with the formatted command.
Returns:
If run is True, returns True if the command executed successfully,
error if failure.
If run is False, returns the formatted command string which can
be passed to the node
"""
if not default and not disable:
if not str(value).isdigit() or value < 1 or value > 254:
raise ValueError("vrrp property 'priority' must be "
"an integer in the range 1-254")
cmd = self.command_builder('vrrp %d priority' % vrid, value=value,
default=default, disable=disable)
# Run the command if requested
if run:
result = self.configure_interface(name, cmd)
# And verify the command succeeded
if result is False:
return self.error
return result
# Otherwise return the formatted command
return cmd | 0.001905 |
def _extrapolate_cols(self, data, first=True, last=True):
"""Extrapolate the column of data, to get the first and last together
with the data.
"""
if first:
pos = self.col_indices[:2]
first_column = _linear_extrapolate(pos,
(data[:, 0], data[:, 1]),
self.hcol_indices[0])
if last:
pos = self.col_indices[-2:]
last_column = _linear_extrapolate(pos,
(data[:, -2], data[:, -1]),
self.hcol_indices[-1])
if first and last:
return np.hstack((np.expand_dims(first_column, 1),
data,
np.expand_dims(last_column, 1)))
elif first:
return np.hstack((np.expand_dims(first_column, 1),
data))
elif last:
return np.hstack((data,
np.expand_dims(last_column, 1)))
else:
return data | 0.001754 |
def get(self, columns=None):
"""
Execute the query as a "select" statement.
:type columns: list
:rtype: orator.Collection
"""
if columns is None:
columns = ["*"]
if self._query.get_query().columns:
columns = []
select = self._get_select_columns(columns)
models = self._query.add_select(*select).get_models()
self._hydrate_pivot_relation(models)
if len(models) > 0:
models = self._query.eager_load_relations(models)
return self._related.new_collection(models) | 0.003344 |
def _set_slm(self, v, load=False):
"""
Setter method for slm, mapped from YANG variable /cfm_state/slm (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_slm is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_slm() directly.
YANG Description: CFM Y1731 SLM Details
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=slm.slm, is_container='container', presence=False, yang_name="slm", rest_name="slm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'dot1ag-slm', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag-operational', defining_module='brocade-dot1ag-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """slm must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=slm.slm, is_container='container', presence=False, yang_name="slm", rest_name="slm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'dot1ag-slm', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag-operational', defining_module='brocade-dot1ag-operational', yang_type='container', is_config=False)""",
})
self.__slm = t
if hasattr(self, '_set'):
self._set() | 0.005451 |
def write_ndef(self, ndef, slot=1):
"""
Write an NDEF tag configuration to the YubiKey NEO.
"""
if not self.capabilities.have_nfc_ndef(slot):
raise yubikey_base.YubiKeyVersionError("NDEF slot %i unsupported in %s" % (slot, self))
return self._device._write_config(ndef, _NDEF_SLOTS[slot]) | 0.008798 |
def c_transform_entropic(b, M, reg, beta):
'''
The goal is to recover u from the c-transform.
The function computes the c_transform of a dual variable from the other
dual variable:
.. math::
u = v^{c,reg} = -reg \sum_j exp((v - M)/reg) b_j
Where :
- M is the (ns,nt) metric cost matrix
- u, v are dual variables in R^IxR^J
- reg is the regularization term
It is used to recover an optimal u from optimal v solving the semi dual
problem, see Proposition 2.1 of [18]_
Parameters
----------
b : np.ndarray(nt,)
target measure
M : np.ndarray(ns, nt)
cost matrix
reg : float
regularization term > 0
v : np.ndarray(nt,)
dual variable
Returns
-------
u : np.ndarray(ns,)
dual variable
Examples
--------
>>> n_source = 7
>>> n_target = 4
>>> reg = 1
>>> numItermax = 300000
>>> a = ot.utils.unif(n_source)
>>> b = ot.utils.unif(n_target)
>>> rng = np.random.RandomState(0)
>>> X_source = rng.randn(n_source, 2)
>>> Y_target = rng.randn(n_target, 2)
>>> M = ot.dist(X_source, Y_target)
>>> method = "ASGD"
>>> asgd_pi = stochastic.solve_semi_dual_entropic(a, b, M, reg,
method, numItermax)
>>> print(asgd_pi)
References
----------
[Genevay et al., 2016] :
Stochastic Optimization for Large-scale Optimal Transport,
Advances in Neural Information Processing Systems (2016),
arXiv preprint arxiv:1605.08527.
'''
n_source = np.shape(M)[0]
alpha = np.zeros(n_source)
for i in range(n_source):
r = M[i, :] - beta
min_r = np.min(r)
exp_beta = np.exp(-(r - min_r) / reg) * b
alpha[i] = min_r - reg * np.log(np.sum(exp_beta))
return alpha | 0.00105 |
def load(self, context):
"""Returns the debugger plugin, if possible.
Args:
context: The TBContext flags including `add_arguments`.
Returns:
A DebuggerPlugin instance or None if it couldn't be loaded.
"""
if not (context.flags.debugger_data_server_grpc_port > 0 or
context.flags.debugger_port > 0):
return None
flags = context.flags
try:
# pylint: disable=g-import-not-at-top,unused-import
import tensorflow
except ImportError:
raise ImportError(
'To use the debugger plugin, you need to have TensorFlow installed:\n'
' pip install tensorflow')
try:
# pylint: disable=line-too-long,g-import-not-at-top
from tensorboard.plugins.debugger import debugger_plugin as debugger_plugin_lib
from tensorboard.plugins.debugger import interactive_debugger_plugin as interactive_debugger_plugin_lib
# pylint: enable=line-too-long,g-import-not-at-top
except ImportError as e:
e_type, e_value, e_traceback = sys.exc_info()
message = e.msg if hasattr(e, 'msg') else e.message # Handle py2 vs py3
if 'grpc' in message:
e_value = ImportError(
message +
'\n\nTo use the debugger plugin, you need to have '
'gRPC installed:\n pip install grpcio')
six.reraise(e_type, e_value, e_traceback)
if flags.debugger_port > 0:
interactive_plugin = (
interactive_debugger_plugin_lib.InteractiveDebuggerPlugin(context))
logger.info('Starting Interactive Debugger Plugin at gRPC port %d',
flags.debugger_data_server_grpc_port)
interactive_plugin.listen(flags.debugger_port)
return interactive_plugin
elif flags.debugger_data_server_grpc_port > 0:
noninteractive_plugin = debugger_plugin_lib.DebuggerPlugin(context)
logger.info('Starting Non-interactive Debugger Plugin at gRPC port %d',
flags.debugger_data_server_grpc_port)
noninteractive_plugin.listen(flags.debugger_data_server_grpc_port)
return noninteractive_plugin
raise AssertionError() | 0.012247 |
def findLastCharIndexMatching(text, func):
""" Return index of last character in string for which func(char) evaluates to True. """
for i in range(len(text) - 1, -1, -1):
if func(text[i]):
return i | 0.013699 |
def get_string_from_data(self, offset, data):
"""Get an ASCII string from within the data."""
# OC Patch
b = None
try:
b = data[offset]
except IndexError:
return ''
s = ''
while ord(b):
s += b
offset += 1
try:
b = data[offset]
except IndexError:
break
return s | 0.012931 |
def _pop_entities(self, limit=50):
"""
returns up to limit entities and pops them off the list
"""
pop = self.data['entities'][:limit]
del self.data['entities'][:limit]
return pop | 0.008811 |
def check_if_alive(self):
"""Check if the content is available on the host server. Returns `True` if available, else `False`.
This method is `lazy`-evaluated or only executes when called.
:rtype: bool
"""
try:
from urllib2 import urlopen, URLError, HTTPError
except ImportError:
from urllib.request import urlopen, URLError, HTTPError
if len(self.instance.STATUS_LINK):
check_url = self.instance.STATUS_LINK % ({'content_uid': self.get_content_uid()})
else:
# fallback
check_url = self.instance.url
try:
response = urlopen(check_url)
except (HTTPError, URLError):
return False
except ValueError:
raise URLError('Invalid URL: %s'.format(check_url))
else:
return True if response.code == 200 else False | 0.00441 |
def mk_pools(things, keyfnc=lambda x: x):
"Indexes a thing by the keyfnc to construct pools of things."
pools = {}
sthings = sorted(things, key=keyfnc)
for key, thingz in groupby(sthings, key=keyfnc):
pools.setdefault(key, []).extend(list(thingz))
return pools | 0.003472 |
def simple_predictive_sample(self, M_c, X_L, X_D, Y, Q, seed, n=1):
"""Sample values from predictive distribution of the given latent state.
:param Y: A list of constraints to apply when sampling. Each constraint
is a triplet of (r, d, v): r is the row index, d is the column
index and v is the value of the constraint
:type Y: list of lists
:param Q: A list of values to sample. Each value is doublet of (r, d):
r is the row index, d is the column index
:type Q: list of lists
:param n: the number of samples to draw
:type n: int
:returns: list of floats. Samples in the same order specified by Q
"""
get_next_seed = make_get_next_seed(seed)
samples = _do_simple_predictive_sample(
M_c, X_L, X_D, Y, Q, n, get_next_seed)
return samples | 0.00452 |
def predict_mhci_binding(job, peptfile, allele, peplen, univ_options,
mhci_options):
"""
This module will predict MHC:peptide binding for peptides in the files created in node XX to
ALLELE. ALLELE represents an MHCI allele.
This module corresponds to node 18 on the tree
"""
job.fileStore.logToMaster('Running mhci on %s:%s:%s' % (univ_options['patient'], allele,
peplen))
work_dir = job.fileStore.getLocalTempDir()
input_files = {
'peptfile.faa': peptfile}
input_files = get_files_from_filestore(job, input_files, work_dir, docker=True)
parameters = [mhci_options['pred'],
allele,
peplen,
input_files['peptfile.faa']]
with open('/'.join([work_dir, 'predictions.tsv']), 'w') as predfile:
docker_call(tool='mhci', tool_parameters=parameters, work_dir=work_dir,
dockerhub=univ_options['dockerhub'], outfile=predfile, interactive=True)
output_file = job.fileStore.writeGlobalFile(predfile.name)
return output_file | 0.00439 |
def post(self):
"""Create a new role"""
self.reqparse.add_argument('name', type=str, required=True)
self.reqparse.add_argument('color', type=str, required=True)
args = self.reqparse.parse_args()
role = Role()
role.name = args['name']
role.color = args['color']
db.session.add(role)
db.session.commit()
auditlog(event='role.create', actor=session['user'].username, data=args)
return self.make_response('Role {} has been created'.format(role.role_id), HTTP.CREATED) | 0.00722 |
def event_listen(self, timeout=None, raise_on_disconnect=True):
'''Does not return until PulseLoopStop
gets raised in event callback or timeout passes.
timeout should be in seconds (float),
0 for non-blocking poll and None (default) for no timeout.
raise_on_disconnect causes PulseDisconnected exceptions by default.
Do not run any pulse operations from these callbacks.'''
assert self.event_callback
try: self._pulse_poll(timeout)
except c.pa.CallError: pass # e.g. from mainloop_dispatch() on disconnect
if raise_on_disconnect and not self.connected: raise PulseDisconnected() | 0.026446 |
def broadcast(*sinks_):
"""The |broadcast| decorator creates a |push| object that receives a
message by ``yield`` and then sends this message on to all the given sinks.
.. |broadcast| replace:: :py:func:`broadcast`
"""
@push
def bc():
sinks = [s() for s in sinks_]
while True:
msg = yield
for s in sinks:
s.send(msg)
return bc | 0.002427 |
def rebuild( self ):
"""
Clears out all the child widgets from this widget and creates the
widget that best matches the column properties for this edit.
"""
plugins.init()
self.blockSignals(True)
self.setUpdatesEnabled(False)
# clear the old editor
if ( self._editor ):
self._editor.close()
self._editor.setParent(None)
self._editor.deleteLater()
self._editor = None
# create a new widget
plugin_class = plugins.widgets.get(self._columnType)
if ( plugin_class ):
self._editor = plugin_class(self)
self.layout().addWidget(self._editor)
self.blockSignals(False)
self.setUpdatesEnabled(True) | 0.015606 |
def sun_events(latitude, longitude, date, timezone=0, zenith=None):
"""Convenience function for calculating sunrise and sunset.
Civil twilight starts/ends when the Sun's centre is 6 degrees below
the horizon.
Nautical twilight starts/ends when the Sun's centre is 12 degrees
below the horizon.
Astronomical twilight starts/ends when the Sun's centre is 18 degrees below
the horizon.
Args:
latitude (float): Location's latitude
longitude (float): Location's longitude
date (datetime.date): Calculate rise or set for given date
timezone (int): Offset from UTC in minutes
zenith (str): Calculate rise/set events, or twilight times
Returns:
tuple of datetime.time: The time for the given events in the specified
timezone
"""
return (sun_rise_set(latitude, longitude, date, 'rise', timezone, zenith),
sun_rise_set(latitude, longitude, date, 'set', timezone, zenith)) | 0.001018 |
def lookup_explicit(self, args, kwargs):
'''
Lookup the function that will be called with a given set of arguments,
or raise DispatchError. Requires explicit tuple/dict grouping of
arguments (see DispatchGroup.lookup for a function-like interface).
'''
for bind_args, callee in self.callees:
try:
#bind to the signature and types. Raises TypeError on failure
bind_args(args, kwargs)
except TypeError:
#TypeError: failed to bind arguments. Try the next dispatch
continue
#All the parameters matched. Return the function and args
return callee
else:
#Nothing was able to bind. Error.
raise DispatchError(args, kwargs, self) | 0.007362 |
def _parse_config(self, ssh_config):
'''
This lame parser does not parse the full grammar of an ssh config
file. It makes assumptions that are (hopefully) correct for the output
of `vagrant ssh-config [vm-name]`. Specifically it assumes that there
is only one Host section, the default vagrant host. It assumes that
the parameters of the ssh config are not changing.
every line is of the form 'key value', where key is a single token
without any whitespace and value is the remaining part of the line.
Value may optionally be surrounded in double quotes. All leading and
trailing whitespace is removed from key and value. Example lines:
' User vagrant\n'
' IdentityFile "/home/robert/.vagrant.d/insecure_private_key"\n'
Lines with '#' as the first non-whitespace character are considered
comments and ignored. Whitespace-only lines are ignored. This parser
does NOT handle using an '=' in options. Values surrounded in double
quotes will have the double quotes removed.
See https://github.com/bitprophet/ssh/blob/master/ssh/config.py for a
more compliant ssh config file parser.
'''
conf = dict()
started_parsing = False
for line in ssh_config.splitlines():
if line.strip().startswith('Host ') and not started_parsing:
started_parsing = True
if not started_parsing or not line.strip() or line.strip().startswith('#'):
continue
key, value = line.strip().split(None, 1)
# Remove leading and trailing " from the values
conf[key] = value.strip('"')
return conf | 0.001714 |
def virtual(opts, virtualname, filename):
'''
Returns the __virtual__.
'''
if ((HAS_NAPALM and NAPALM_MAJOR >= 2) or HAS_NAPALM_BASE) and (is_proxy(opts) or is_minion(opts)):
return virtualname
else:
return (
False,
(
'"{vname}"" {filename} cannot be loaded: '
'NAPALM is not installed: ``pip install napalm``'
).format(
vname=virtualname,
filename='({filename})'.format(filename=filename)
)
) | 0.003643 |
def from_tibiadata(cls, content):
"""
Parses a TibiaData response into a House object.
Parameters
----------
content: :class:`str`
The JSON content of the TibiaData response.
Returns
-------
:class:`House`
The house contained in the response, if found.
Raises
------
InvalidContent
If the content is not a house JSON response from TibiaData
"""
json_content = parse_json(content)
try:
house_json = json_content["house"]
if not house_json["name"]:
return None
house = cls(house_json["name"], house_json["world"])
house.type = try_enum(HouseType, house_json["type"])
house.id = house_json["houseid"]
house.beds = house_json["beds"]
house.size = house_json["size"]
house.size = house_json["size"]
house.rent = house_json["rent"]
house.image_url = house_json["img"]
# Parsing the original status string is easier than dealing with TibiaData fields
house._parse_status(house_json["status"]["original"])
except KeyError:
raise InvalidContent("content is not a TibiaData house response.")
return house | 0.002247 |
def run_flag_hw(in_prefix, in_type, out_prefix, base_dir, options):
"""Runs step12 (flag HW).
:param in_prefix: the prefix of the input files.
:param in_type: the type of the input files.
:param out_prefix: the output prefix.
:param base_dir: the output directory.
:param options: the options needed.
:type in_prefix: str
:type in_type: str
:type out_prefix: str
:type base_dir: str
:type options: list
:returns: a tuple containing the prefix of the output files (the input
prefix for the next script) and the type of the output files
(``bfile``).
This function calls the :py:mod:`pyGenClean.FlagHW.flag_hw` module. The
required file type for this module is ``bfile``, hence the need to use the
:py:func:`check_input_files` to check if the file input file type is the
good one, or to create it if needed.
.. note::
The :py:mod:`pyGenClean.FlagHW.flag_hw` module doesn't return usable
output files. Hence, this function returns the input file prefix and
its type.
"""
# Creating the output directory
os.mkdir(out_prefix)
# We know we need bfile
required_type = "bfile"
check_input_files(in_prefix, in_type, required_type)
# We need to inject the name of the input file and the name of the output
# prefix
script_prefix = os.path.join(out_prefix, "flag_hw")
options += ["--{}".format(required_type), in_prefix,
"--out", script_prefix]
# We run the script
try:
flag_hw.main(options)
except flag_hw.ProgramError as e:
msg = "flag_hw: {}".format(e)
raise ProgramError(msg)
# Finding the two files containing the list of flagged markers
filenames = glob(script_prefix + ".snp_flag_threshold_[0-9]*")
thresholds = {}
for filename in filenames:
# Finding the threshold of the file
threshold = re.sub(
r"^flag_hw.snp_flag_threshold_",
"",
os.path.basename(filename),
)
# Counting the number of markers in the file
nb_markers = None
with open(filename, "r") as i_file:
nb_markers = len(i_file.read().splitlines())
# Saving the values
thresholds[threshold] = (nb_markers, filename)
# We create the LaTeX summary
latex_file = os.path.join(script_prefix + ".summary.tex")
try:
with open(latex_file, "w") as o_file:
print >>o_file, latex_template.subsection(
flag_hw.pretty_name
)
# Data to write
sorted_keys = sorted(thresholds.keys(), key=float)
text = (
"Markers which failed Hardy-Weinberg equilibrium test (using "
"Plink) were flagged. A total of {:,d} marker{} failed with a "
"threshold of {}. A total of {:,d} marker{} failed with a "
"threshold of {}. For a total list, check the files {} and "
"{}, respectively.".format(
thresholds[sorted_keys[0]][0],
"s" if thresholds[sorted_keys[0]][0] - 1 > 1 else "",
latex_template.format_numbers(sorted_keys[0]),
thresholds[sorted_keys[1]][0],
"s" if thresholds[sorted_keys[1]][0] - 1 > 1 else "",
latex_template.format_numbers(sorted_keys[1]),
latex_template.texttt(
latex_template.sanitize_tex(os.path.basename(
thresholds[sorted_keys[0]][1],
)),
),
latex_template.texttt(
latex_template.sanitize_tex(os.path.basename(
thresholds[sorted_keys[1]][1],
)),
),
)
)
print >>o_file, latex_template.wrap_lines(text)
except IOError:
msg = "{}: cannot write LaTeX summary".format(latex_file)
raise ProgramError(msg)
# Writing the summary results
with open(os.path.join(base_dir, "results_summary.txt"), "a") as o_file:
print >>o_file, "# {}".format(script_prefix)
print >>o_file, "Number of markers flagged for HW"
print >>o_file, " - {}\t{:,d}".format(
sorted_keys[0],
thresholds[sorted_keys[0]][0],
)
print >>o_file, " - {}\t{:,d}".format(
sorted_keys[1],
thresholds[sorted_keys[1]][0],
)
print >>o_file, "---"
# We know this step doesn't produce an new data set, so we return the old
# prefix and the old in_type
return _StepResult(
next_file=in_prefix,
next_file_type=required_type,
latex_summary=latex_file,
description=flag_hw.desc,
long_description=flag_hw.long_desc,
graph_path=None,
) | 0.000198 |
def follow_shortlinks(shortlinks):
"""Follow redirects in list of shortlinks, return dict of resulting URLs"""
links_followed = {}
for shortlink in shortlinks:
url = shortlink
request_result = requests.get(url)
redirect_history = request_result.history
# history might look like:
# (<Response [301]>, <Response [301]>)
# where each response object has a URL
all_urls = []
for redirect in redirect_history:
all_urls.append(redirect.url)
# append the final URL that we finish with
all_urls.append(request_result.url)
links_followed[shortlink] = all_urls
return links_followed | 0.001451 |
def watch(self):
"""True if the MessageHandler is being watched."""
return bool(lib.EnvGetDefmessageHandlerWatch(
self._env, self._cls, self._idx)) | 0.011429 |
def _GetTimeElementsTuple(self, timestamp):
"""Retrieves a time elements tuple from the timestamp.
A Symantec log timestamp consist of six hexadecimal octets, that represent:
First octet: Number of years since 1970
Second octet: Month, where January is represented by 0
Third octet: Day of the month
Fourth octet: Number of hours
Fifth octet: Number of minutes
Sixth octet: Number of seconds
For example, 200A13080122 represents November 19, 2002, 8:01:34 AM.
Args:
timestamp (str): hexadecimal encoded date and time values.
Returns:
tuple: containing:
year (int): year.
month (int): month, where 1 represents January.
day_of_month (int): day of month, where 1 is the first day of the month.
hours (int): hours.
minutes (int): minutes.
seconds (int): seconds.
"""
year, month, day_of_month, hours, minutes, seconds = (
int(hexdigit[0] + hexdigit[1], 16) for hexdigit in zip(
timestamp[::2], timestamp[1::2]))
return (year + 1970, month + 1, day_of_month, hours, minutes, seconds) | 0.001767 |
def get_service_certificate(self, service_name, thumbalgorithm, thumbprint):
'''
Returns the public data for the specified X.509 certificate associated
with a hosted service.
service_name:
Name of the hosted service.
thumbalgorithm:
The algorithm for the certificate's thumbprint.
thumbprint:
The hexadecimal representation of the thumbprint.
'''
_validate_not_none('service_name', service_name)
_validate_not_none('thumbalgorithm', thumbalgorithm)
_validate_not_none('thumbprint', thumbprint)
return self._perform_get(
'/' + self.subscription_id + '/services/hostedservices/' +
_str(service_name) + '/certificates/' +
_str(thumbalgorithm) + '-' + _str(thumbprint) + '',
Certificate) | 0.002336 |
def equipped(self):
""" Returns a dict of classes that have the item equipped and in what slot """
equipped = self._item.get("equipped", [])
# WORKAROUND: 0 is probably an off-by-one error
# WORKAROUND: 65535 actually serves a purpose (according to Valve)
return dict([(eq["class"], eq["slot"]) for eq in equipped if eq["class"] != 0 and eq["slot"] != 65535]) | 0.01 |
def calcparams_cec(self, effective_irradiance, temp_cell, **kwargs):
"""
Use the :py:func:`calcparams_cec` function, the input
parameters and ``self.module_parameters`` to calculate the
module currents and resistances.
Parameters
----------
effective_irradiance : numeric
The irradiance (W/m2) that is converted to photocurrent.
temp_cell : float or Series
The average cell temperature of cells within a module in C.
**kwargs
See pvsystem.calcparams_cec for details
Returns
-------
See pvsystem.calcparams_cec for details
"""
kwargs = _build_kwargs(['a_ref', 'I_L_ref', 'I_o_ref', 'R_sh_ref',
'R_s', 'alpha_sc', 'Adjust', 'EgRef', 'dEgdT',
'irrad_ref', 'temp_ref'],
self.module_parameters)
return calcparams_cec(effective_irradiance, temp_cell, **kwargs) | 0.001978 |
def signal_wrapper(f):
"""Decorator converts function's arguments from dbus types to python."""
@wraps(f)
def wrapper(*args, **kwds):
args = map(convert, args)
kwds = {convert(k): convert(v) for k, v in kwds.items()}
return f(*args, **kwds)
return wrapper | 0.00339 |
def variational_expectations(self, Fmu, Fvar, Y, epsilon=None):
r"""
Compute the expected log density of the data, given a Gaussian
distribution for the function values.
if
q(f) = N(Fmu, Fvar) - Fmu: N x D Fvar: N x D
and this object represents
p(y|f) - Y: N x 1
then this method computes
\int (\log p(y|f)) q(f) df.
Here, we implement a default Monte Carlo quadrature routine.
"""
return self._mc_quadrature(self.logp, Fmu, Fvar, Y=Y, epsilon=epsilon) | 0.003515 |
def ubridge_delete_bridge(self, name):
"""
:params name: Delete the bridge with this name
"""
if self.ubridge:
yield from self._ubridge_send("bridge delete {name}".format(name=name)) | 0.013274 |
def parse(self, data):
# type: (bytes) -> None
'''
Parse the passed in data into a UDF Partition Header Descriptor.
Parameters:
data - The data to parse.
Returns:
Nothing.
'''
if self._initialized:
raise pycdlibexception.PyCdlibInternalError('UDF Partition Header Descriptor already initialized')
(unalloc_table_length, unalloc_table_pos, unalloc_bitmap_length,
unalloc_bitmap_pos, part_integrity_table_length,
part_integrity_table_pos, freed_table_length, freed_table_pos,
freed_bitmap_length, freed_bitmap_pos,
reserved_unused) = struct.unpack_from(self.FMT, data, 0)
if unalloc_table_length != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header unallocated table length not 0')
if unalloc_table_pos != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header unallocated table position not 0')
if unalloc_bitmap_length != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header unallocated bitmap length not 0')
if unalloc_bitmap_pos != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header unallocated bitmap position not 0')
if part_integrity_table_length != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header partition integrity length not 0')
if part_integrity_table_pos != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header partition integrity position not 0')
if freed_table_length != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header freed table length not 0')
if freed_table_pos != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header freed table position not 0')
if freed_bitmap_length != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header freed bitmap length not 0')
if freed_bitmap_pos != 0:
raise pycdlibexception.PyCdlibInvalidISO('Partition Header freed bitmap position not 0')
self._initialized = True | 0.00653 |
async def revoke_cred(self, rr_id: str, cr_id) -> int:
"""
Revoke credential that input revocation registry identifier and
credential revocation identifier specify.
Return (epoch seconds) time of revocation.
Raise AbsentTails if no tails file is available for input
revocation registry identifier. Raise BadRevocation if issuer cannot
revoke specified credential for any other reason (e.g., did not issue it,
already revoked it).
:param rr_id: revocation registry identifier
:param cr_id: credential revocation identifier
:return: time of revocation, in epoch seconds
"""
LOGGER.debug('Issuer.revoke_cred >>> rr_id: %s, cr_id: %s', rr_id, cr_id)
tails_reader_handle = (await Tails(
self._dir_tails,
*rev_reg_id2cred_def_id__tag(rr_id)).open()).reader_handle
try:
rrd_json = await anoncreds.issuer_revoke_credential(
self.wallet.handle,
tails_reader_handle,
rr_id,
cr_id)
except IndyError as x_indy:
LOGGER.debug(
'Issuer.revoke_cred: <!< Could not revoke revoc reg id %s, cred rev id %s: indy error code %s',
rr_id,
cr_id,
x_indy.error_code)
raise BadRevocation(
'Could not revoke revoc reg id {}, cred rev id {}: indy error code {}'.format(
rr_id,
cr_id,
x_indy.error_code))
rre_req_json = await ledger.build_revoc_reg_entry_request(self.did, rr_id, 'CL_ACCUM', rrd_json)
resp_json = await self._sign_submit(rre_req_json)
resp = json.loads(resp_json)
rv = resp['result']['txnMetadata']['txnTime']
LOGGER.debug('Issuer.revoke_cred <<< %s', rv)
return rv | 0.003684 |
def setattr(self, name, val):
"""
Change the attribute value of the UI element. Not all attributes can be casted to text. If changing the
immutable attributes or attributes which do not exist, the InvalidOperationException exception is raised.
Args:
name: attribute name
val: new attribute value to cast
Raises:
InvalidOperationException: when it fails to set the attribute on UI element
"""
nodes = self._do_query(multiple=False)
try:
return self.poco.agent.hierarchy.setAttr(nodes, name, val)
except UnableToSetAttributeException as e:
raise InvalidOperationException('"{}" of "{}"'.format(str(e), self)) | 0.008097 |
def add_to_tor(self, protocol):
'''
Returns a Deferred which fires with 'self' after at least one
descriptor has been uploaded. Errback if no descriptor upload
succeeds.
'''
upload_d = _await_descriptor_upload(protocol, self, progress=None, await_all_uploads=False)
# _add_ephemeral_service takes a TorConfig but we don't have
# that here .. and also we're just keeping this for
# backwards-compatability anyway so instead of trying to
# re-use that helper I'm leaving this original code here. So
# this is what it supports and that's that:
ports = ' '.join(map(lambda x: 'Port=' + x.strip(), self._ports))
cmd = 'ADD_ONION %s %s' % (self._key_blob, ports)
ans = yield protocol.queue_command(cmd)
ans = find_keywords(ans.split('\n'))
self.hostname = ans['ServiceID'] + '.onion'
if self._key_blob.startswith('NEW:'):
self.private_key = ans['PrivateKey']
else:
self.private_key = self._key_blob
log.msg('Created hidden-service at', self.hostname)
log.msg("Created '{}', waiting for descriptor uploads.".format(self.hostname))
yield upload_d | 0.003239 |
def _delete(self, pos, idx):
"""Delete the item at the given (pos, idx).
Combines lists that are less than half the load level.
Updates the index when the sublist length is more than half the load
level. This requires decrementing the nodes in a traversal from the leaf
node to the root. For an example traversal see self._loc.
"""
_maxes, _lists, _index = self._maxes, self._lists, self._index
lists_pos = _lists[pos]
del lists_pos[idx]
self._len -= 1
len_lists_pos = len(lists_pos)
if len_lists_pos > self._half:
_maxes[pos] = lists_pos[-1]
if _index:
child = self._offset + pos
while child > 0:
_index[child] -= 1
child = (child - 1) >> 1
_index[0] -= 1
elif len(_lists) > 1:
if not pos:
pos += 1
prev = pos - 1
_lists[prev].extend(_lists[pos])
_maxes[prev] = _lists[prev][-1]
del _maxes[pos]
del _lists[pos]
del _index[:]
self._expand(prev)
elif len_lists_pos:
_maxes[pos] = lists_pos[-1]
else:
del _maxes[pos]
del _lists[pos]
del _index[:] | 0.002217 |
def unmangle_code_names(self, co, classname):
"""Remove __ from the end of _name_ if it starts with __classname__
return the "unmangled" name.
"""
if classname:
classname = '_' + classname.lstrip('_') + '__'
free = [ self.unmangle_name(name, classname)
for name in (co.co_cellvars + co.co_freevars) ]
names = [ self.unmangle_name(name, classname)
for name in co.co_names ]
varnames = [ self.unmangle_name(name, classname)
for name in co.co_varnames ]
else:
free = co.co_cellvars + co.co_freevars
names = co.co_names
varnames = co.co_varnames
return free, names, varnames | 0.010376 |
def factor_aug(z, DPhival, G, A):
M, N = G.shape
P, N = A.shape
"""Multiplier for inequality constraints"""
l = z[N+P:N+P+M]
"""Slacks"""
s = z[N+P+M:]
"""Sigma matrix"""
SIG = diags(l/s, 0)
"""Condensed system"""
if issparse(DPhival):
if not issparse(A):
A = csr_matrix(A)
H = DPhival + mydot(G.T, mydot(SIG, G))
J = bmat([[H, A.T], [A, None]])
else:
if issparse(A):
A = A.toarray()
J = np.zeros((N+P, N+P))
J[0:N, 0:N] = DPhival + mydot(G.T, mydot(SIG, G))
J[0:N, N:] = A.T
J[N:, 0:N] = A
LU = myfactor(J)
return LU | 0.008708 |
def register_layer(self, layer):
"""
Register the layer so that it's param will be trained.
But the output of the layer will not be stacked.
"""
if type(layer) == Block:
layer.fix()
self.parameter_count += layer.parameter_count
self.parameters.extend(layer.parameters)
self.free_parameters.extend(layer.free_parameters)
self.training_monitors.extend(layer.training_monitors)
self.testing_monitors.extend(layer.testing_monitors)
self.updates.extend(layer.updates)
self.training_updates.extend(layer.training_updates)
self.input_variables.extend(layer.external_inputs)
self.target_variables.extend(layer.external_targets)
self.training_callbacks.extend(layer.training_callbacks)
self.testing_callbacks.extend(layer.testing_callbacks)
self.epoch_callbacks.extend(layer.epoch_callbacks) | 0.002148 |
def hsll(wnd, res=20, neighbors=2):
"""
Highest Side Lobe Level (dB).
Parameters
----------
res :
Zero-padding factor. 1 for no zero-padding, 2 for twice the length, etc..
neighbors :
Number of neighbors needed by ``get_peaks`` to define a peak.
"""
spectrum = dB20(rfft(wnd, res * len(wnd)))
first_peak = next(get_peaks(spectrum, neighbors=neighbors))
return max(spectrum[first_peak:]) - spectrum[0] | 0.011655 |
def number_check(check, return_number=True):
"""
Function to verify item entered is a number
Args:
check: Thing to check for a number
return_number: Set to True it returns a number value, set to False returns True or False
Returns: Check return_number for return options
"""
try:
int(check)
good = True
except ValueError:
LOGGER.critical('Function number_check ValueError {item}'.format(item=check))
good = False
if return_number:
while not good:
print("That is not a number.")
print("Please try again.")
check = input("Please enter a number?: ")
try:
int(check)
good = True
except ValueError:
LOGGER.critical('Function number_check ValueError {item}'.format(item=check))
good = False
return check
else:
return good | 0.004215 |
def connect(
self,
funds: typing.TokenAmount,
initial_channel_target: int = 3,
joinable_funds_target: float = 0.4,
):
"""Connect to the network.
Subsequent calls to `connect` are allowed, but will only affect the spendable
funds and the connection strategy parameters for the future. `connect` will not
close any channels.
Note: the ConnectionManager does not discriminate manually opened channels from
automatically opened ones. If the user manually opened channels, those deposit
amounts will affect the funding per channel and the number of new channels opened.
Args:
funds: Target amount of tokens spendable to join the network.
initial_channel_target: Target number of channels to open.
joinable_funds_target: Amount of funds not initially assigned.
"""
token = self.raiden.chain.token(self.token_address)
token_balance = token.balance_of(self.raiden.address)
if token_balance < funds:
raise InvalidAmount(
f'Insufficient balance for token {pex(self.token_address)}',
)
if funds <= 0:
raise InvalidAmount(
'The funds to use in the connection need to be a positive integer',
)
if joinable_funds_target < 0 or joinable_funds_target > 1:
raise InvalidAmount(
f'joinable_funds_target should be between 0 and 1. Given: {joinable_funds_target}',
)
with self.lock:
self.funds = funds
self.initial_channel_target = initial_channel_target
self.joinable_funds_target = joinable_funds_target
log_open_channels(self.raiden, self.registry_address, self.token_address, funds)
qty_network_channels = views.count_token_network_channels(
views.state_from_raiden(self.raiden),
self.registry_address,
self.token_address,
)
if not qty_network_channels:
log.info(
'Bootstrapping token network.',
node=pex(self.raiden.address),
network_id=pex(self.registry_address),
token_id=pex(self.token_address),
)
self.api.channel_open(
self.registry_address,
self.token_address,
self.BOOTSTRAP_ADDR,
)
else:
self._open_channels() | 0.004224 |
def search(ctx, tags, prefix=None):
'''
List all archives matching tag search criteria
'''
_generate_api(ctx)
for i, match in enumerate(ctx.obj.api.search(*tags, prefix=prefix)):
click.echo(match, nl=False)
print('') | 0.003922 |
def remove_zero_normals(self):
"""Removes normal vectors with a zero magnitude.
Note
----
This returns nothing and updates the NormalCloud in-place.
"""
points_of_interest = np.where(np.linalg.norm(self._data, axis=0) != 0.0)[0]
self._data = self._data[:, points_of_interest] | 0.009036 |
def on_separate_dimensions(self):
"""
Checks whether the kernels in the combination act on disjoint subsets
of dimensions. Currently, it is hard to asses whether two slice objects
will overlap, so this will always return False.
:return: Boolean indicator.
"""
if np.any([isinstance(k.active_dims, slice) for k in self.kernels]):
# Be conservative in the case of a slice object
return False
else:
dimlist = [k.active_dims for k in self.kernels]
overlapping = False
for i, dims_i in enumerate(dimlist):
for dims_j in dimlist[i + 1:]:
if np.any(dims_i.reshape(-1, 1) == dims_j.reshape(1, -1)):
overlapping = True
return not overlapping | 0.002415 |
def write(self, addr, data):
'''Write to dummy memory
Parameters
----------
addr : int
The register address.
data : list, tuple
Data (byte array) to be written.
Returns
-------
nothing
'''
logger.debug(
"Dummy SiTransferLayer.write addr: %s data: %s" % (hex(addr), data))
for curr_addr, d in enumerate(data, start=addr):
self.mem[curr_addr] = array.array('B', [d])[0] | 0.005941 |
def read_text_file(filename):
# type: (str) -> str
"""Return the contents of *filename*.
Try to decode the file contents with utf-8, the preferred system encoding
(e.g., cp1252 on some Windows machines), and latin1, in that order.
Decoding a byte string with latin1 will never raise an error. In the worst
case, the returned string will contain some garbage characters.
"""
with open(filename, 'rb') as fp:
data = fp.read()
encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
for enc in encodings:
try:
# https://github.com/python/mypy/issues/1174
data = data.decode(enc) # type: ignore
except UnicodeDecodeError:
continue
break
assert not isinstance(data, bytes) # Latin1 should have worked.
return data | 0.001188 |
def _parseParams(self):
"""
Parse parameters from their string HTML representation to dictionary.
Result is saved to the :attr:`params` property.
"""
# check if there are any parameters
if " " not in self._element or "=" not in self._element:
return
# remove '<' & '>'
params = self._element.strip()[1:-1].strip()
# remove tagname
offset = params.find(self.getTagName()) + len(self.getTagName())
params = params[offset:].strip()
# parser machine
next_state = 0
key = ""
value = ""
end_quote = ""
buff = ["", ""]
for c in params:
if next_state == 0: # key
if c.strip() != "": # safer than list space, tab and all
if c == "=": # possible whitespaces in UTF
next_state = 1
else:
key += c
elif next_state == 1: # value decisioner
if c.strip() != "": # skip whitespaces
if c == "'" or c == '"':
next_state = 3
end_quote = c
else:
next_state = 2
value += c
elif next_state == 2: # one word parameter without quotes
if c.strip() == "":
next_state = 0
self.params[key] = value
key = ""
value = ""
else:
value += c
elif next_state == 3: # quoted string
if c == end_quote and (buff[0] != "\\" or (buff[0]) == "\\" and buff[1] == "\\"):
next_state = 0
self.params[key] = unescape(value, end_quote)
key = ""
value = ""
end_quote = ""
else:
value += c
buff = _rotate_buff(buff)
buff[0] = c
if key:
if end_quote and value.strip():
self.params[key] = unescape(value, end_quote)
else:
self.params[key] = value
if "/" in self.params.keys():
del self.params["/"]
self._isnonpairtag = True | 0.001269 |
def get_corrected_commands(command):
"""Returns generator with sorted and unique corrected commands.
:type command: thefuck.types.Command
:rtype: Iterable[thefuck.types.CorrectedCommand]
"""
corrected_commands = (
corrected for rule in get_rules()
if rule.is_match(command)
for corrected in rule.get_corrected_commands(command))
return organize_commands(corrected_commands) | 0.002364 |
def build_chain(self, **kwargs):
"""
Builds a new patterns chain
:param pattern:
:type pattern:
:param kwargs:
:type kwargs:
:return:
:rtype:
"""
set_defaults(self._chain_defaults, kwargs)
set_defaults(self._defaults, kwargs)
return Chain(self, **kwargs) | 0.005698 |
def read(cls, source, *args, **kwargs):
"""Read data into a `TimeSeries`
Arguments and keywords depend on the output format, see the
online documentation for full details for each format, the parameters
below are common to most formats.
Parameters
----------
source : `str`, `list`
Source of data, any of the following:
- `str` path of single data file,
- `str` path of LAL-format cache file,
- `list` of paths.
name : `str`, `~gwpy.detector.Channel`
the name of the channel to read, or a `Channel` object.
start : `~gwpy.time.LIGOTimeGPS`, `float`, `str`, optional
GPS start time of required data, defaults to start of data found;
any input parseable by `~gwpy.time.to_gps` is fine
end : `~gwpy.time.LIGOTimeGPS`, `float`, `str`, optional
GPS end time of required data, defaults to end of data found;
any input parseable by `~gwpy.time.to_gps` is fine
format : `str`, optional
source format identifier. If not given, the format will be
detected if possible. See below for list of acceptable
formats.
nproc : `int`, optional
number of parallel processes to use, serial process by
default.
pad : `float`, optional
value with which to fill gaps in the source data,
by default gaps will result in a `ValueError`.
Notes
-----"""
from .io.core import read as timeseries_reader
return timeseries_reader(cls, source, *args, **kwargs) | 0.001202 |
def _set_guard(self, v, load=False):
"""
Setter method for guard, mapped from YANG variable /interface/port_channel/spanning_tree/guard (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_guard is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_guard() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=guard.guard, is_container='container', presence=False, yang_name="guard", rest_name="guard", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u"Change an interface's spanning tree guard mode", u'display-when': u'((/protocol/spanning-tree/stp) or(/protocol/spanning-tree/rstp))', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """guard must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=guard.guard, is_container='container', presence=False, yang_name="guard", rest_name="guard", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u"Change an interface's spanning tree guard mode", u'display-when': u'((/protocol/spanning-tree/stp) or(/protocol/spanning-tree/rstp))', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-xstp', defining_module='brocade-xstp', yang_type='container', is_config=True)""",
})
self.__guard = t
if hasattr(self, '_set'):
self._set() | 0.005467 |
def get_plain_image_as_widget(self):
"""Used for generating thumbnails. Does not include overlaid
graphics.
"""
arr = self.getwin_array(order=self.rgb_order)
# convert numpy array to native image widget
image_w = self._get_wimage(arr)
return image_w | 0.006515 |
def status_mute(self, id):
"""
Mute notifications for a status.
Returns a `toot dict`_ with the now muted status
"""
id = self.__unpack_id(id)
url = '/api/v1/statuses/{0}/mute'.format(str(id))
return self.__api_request('POST', url) | 0.006944 |
def SetCursorPos(x: int, y: int) -> bool:
"""
SetCursorPos from Win32.
Set mouse cursor to point x, y.
x: int.
y: int.
Return bool, True if succeed otherwise False.
"""
return bool(ctypes.windll.user32.SetCursorPos(x, y)) | 0.003953 |
def extend_selection():
"""Checks is the selection is to be extended
The selection is to be extended, if a special modifier key (typically <Ctrl>) is being pressed.
:return: If to extend the selection
:rtype: True
"""
from rafcon.gui.singleton import main_window_controller
currently_pressed_keys = main_window_controller.currently_pressed_keys if main_window_controller else set()
if any(key in currently_pressed_keys for key in [constants.EXTEND_SELECTION_KEY,
constants.EXTEND_SELECTION_KEY_ALT]):
return True
return False | 0.008013 |
def decompose_code(code):
"""
Decomposes a MARC "code" into tag, ind1, ind2, subcode
"""
code = "%-6s" % code
ind1 = code[3:4]
if ind1 == " ": ind1 = "_"
ind2 = code[4:5]
if ind2 == " ": ind2 = "_"
subcode = code[5:6]
if subcode == " ": subcode = None
return (code[0:3], ind1, ind2, subcode) | 0.01194 |
def cache_meta(request, cache_key, start_index=0):
"""Inspect request for objects in _ultracache and set appropriate entries
in Django's cache."""
path = request.get_full_path()
# todo: cache headers on the request since they never change during the
# request.
# Reduce headers to the subset as defined by the settings
headers = OrderedDict()
for k, v in sorted(request.META.items()):
if (k == "HTTP_COOKIE") and CONSIDER_COOKIES:
cookie = SimpleCookie()
cookie.load(v)
headers["cookie"] = "; ".join([
"%s=%s" % (k, morsel.value) for k, morsel \
in sorted(cookie.items()) if k in CONSIDER_COOKIES
])
elif k.startswith("HTTP_"):
k = k[5:].replace("_", "-").lower()
if k in CONSIDER_HEADERS:
headers[k] = v
# Lists needed for cache.get_many
to_set_get_keys = []
to_set_paths_get_keys = []
to_set_content_types_get_keys = []
to_set_content_types_paths_get_keys = []
# Dictionaries needed for cache.set_many
to_set = {}
to_set_paths = {}
to_set_content_types = {}
to_set_content_types_paths = {}
to_delete = []
to_set_objects = []
for ctid, obj_pk in request._ultracache[start_index:]:
# The object appears in these cache entries. If the object is modified
# then these cache entries are deleted.
key = "ucache-%s-%s" % (ctid, obj_pk)
if key not in to_set_get_keys:
to_set_get_keys.append(key)
# The object appears in these paths. If the object is modified then any
# caches that are read from when browsing to this path are cleared.
key = "ucache-pth-%s-%s" % (ctid, obj_pk)
if key not in to_set_paths_get_keys:
to_set_paths_get_keys.append(key)
# The content type appears in these cache entries. If an object of this
# content type is created then these cache entries are cleared.
key = "ucache-ct-%s" % ctid
if key not in to_set_content_types_get_keys:
to_set_content_types_get_keys.append(key)
# The content type appears in these paths. If an object of this content
# type is created then any caches that are read from when browsing to
# this path are cleared.
key = "ucache-ct-pth-%s" % ctid
if key not in to_set_content_types_paths_get_keys:
to_set_content_types_paths_get_keys.append(key)
# A list of objects that contribute to a cache entry
tu = (ctid, obj_pk)
if tu not in to_set_objects:
to_set_objects.append(tu)
# todo: rewrite to handle absence of get_many
di = cache.get_many(to_set_get_keys)
for key in to_set_get_keys:
v = di.get(key, None)
keep = []
if v is not None:
keep, toss = reduce_list_size(v)
if toss:
to_set[key] = keep
to_delete.extend(toss)
if cache_key not in keep:
if key not in to_set:
to_set[key] = keep
to_set[key] = to_set[key] + [cache_key]
if to_set == di:
to_set = {}
di = cache.get_many(to_set_paths_get_keys)
for key in to_set_paths_get_keys:
v = di.get(key, None)
keep = []
if v is not None:
keep, toss = reduce_list_size(v)
if toss:
to_set_paths[key] = keep
if [path, headers] not in keep:
if key not in to_set_paths:
to_set_paths[key] = keep
to_set_paths[key] = to_set_paths[key] + [[path, headers]]
if to_set_paths == di:
to_set_paths = {}
di = cache.get_many(to_set_content_types_get_keys)
for key in to_set_content_types_get_keys:
v = di.get(key, None)
keep = []
if v is not None:
keep, toss = reduce_list_size(v)
if toss:
to_set_content_types[key] = keep
to_delete.extend(toss)
if cache_key not in keep:
if key not in to_set_content_types:
to_set_content_types[key] = keep
to_set_content_types[key] = to_set_content_types[key] + [cache_key]
if to_set_content_types == di:
to_set_content_types = {}
di = cache.get_many(to_set_content_types_paths_get_keys)
for key in to_set_content_types_paths_get_keys:
v = di.get(key, None)
keep = []
if v is not None:
keep, toss = reduce_list_size(v)
if toss:
to_set_content_types_paths[key] = keep
if [path, headers] not in keep:
if key not in to_set_content_types_paths:
to_set_content_types_paths[key] = keep
to_set_content_types_paths[key] = to_set_content_types_paths[key] \
+ [[path, headers]]
if to_set_content_types_paths == di:
to_set_content_types_paths = {}
# Deletion must happen first because set may set some of these keys
if to_delete:
try:
cache.delete_many(to_delete)
except NotImplementedError:
for k in to_delete:
cache.delete(k)
# Do one set_many
di = {}
di.update(to_set)
del to_set
di.update(to_set_paths)
del to_set_paths
di.update(to_set_content_types)
del to_set_content_types
di.update(to_set_content_types_paths)
del to_set_content_types_paths
if to_set_objects:
di[cache_key + "-objs"] = to_set_objects
if di:
try:
cache.set_many(di, 86400)
except NotImplementedError:
for k, v in di.items():
cache.set(k, v, 86400) | 0.000523 |
def modify(self, request, nodes, namespace, root_id, post_cut, breadcrumb):
"""
Actual modifier function
:param request: request
:param nodes: complete list of nodes
:param namespace: Menu namespace
:param root_id: eventual root_id
:param post_cut: flag for modifier stage
:param breadcrumb: flag for modifier stage
:return: nodeslist
"""
app = None
config = None
if getattr(request, 'current_page', None) and request.current_page.application_urls:
app = apphook_pool.get_apphook(request.current_page.application_urls)
if app and app.app_config:
namespace = resolve(request.path).namespace
if not self._config.get(namespace, False):
self._config[namespace] = app.get_config(namespace)
config = self._config[namespace]
try:
if config and (
not isinstance(config, BlogConfig) or
config.menu_structure != MENU_TYPE_CATEGORIES
):
return nodes
except AttributeError: # pragma: no cover
# in case `menu_structure` is not present in config
return nodes
if post_cut:
return nodes
current_post = getattr(request, get_setting('CURRENT_POST_IDENTIFIER'), None)
category = None
if current_post and current_post.__class__ == Post:
category = current_post.categories.first()
if not category:
return nodes
for node in nodes:
if '{0}-{1}'.format(category.__class__.__name__, category.pk) == node.id:
node.selected = True
return nodes | 0.003456 |
def create_event(self, institute, case, user, link, category, verb,
subject, level='specific', variant=None, content=None,
panel=None):
"""Create a Event with the parameters given.
Arguments:
institute (dict): A institute
case (dict): A case
user (dict): A User
link (str): The url to be used in the event
category (str): case or variant
verb (str): What type of event
subject (str): What is operated on
level (str): 'specific' or 'global'. Default is 'specific'
variant (dict): A variant
content (str): The content of the comment
Returns:
event(dict): The inserted event
"""
variant = variant or {}
event = dict(
institute=institute['_id'],
case=case['_id'],
user_id=user['_id'],
user_name=user['name'],
link=link,
category=category,
verb=verb,
subject=subject,
level=level,
variant_id=variant.get('variant_id'),
content=content,
panel=panel,
created_at=datetime.now(),
updated_at=datetime.now(),
)
LOG.debug("Saving Event")
self.event_collection.insert_one(event)
LOG.debug("Event Saved")
return event | 0.002786 |
Subsets and Splits