text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def match(self, key=None, year=None, event=None, type='qm', number=None, round=None, simple=False):
"""
Get data on a match.
You may either pass the match's key directly, or pass `year`, `event`, `type`, `match` (the match number), and `round` if applicable (playoffs only). The event year may be specified as part of the event key or specified in the `year` parameter.
:param key: Key of match to get data on. First option for specifying a match (see above).
:param year: Year in which match took place. Optional; if excluded then must be included in event key.
:param event: Key of event in which match took place. Including year is optional; if excluded then must be specified in `year` parameter.
:param type: One of 'qm' (qualifier match), 'qf' (quarterfinal), 'sf' (semifinal), 'f' (final). If unspecified, 'qm' will be assumed.
:param number: Match number. For example, for qualifier 32, you'd pass 32. For Semifinal 2 round 3, you'd pass 2.
:param round: For playoff matches, you will need to specify a round.
:param simple: Get only vital data.
:return: A single Match object.
"""
if key:
return Match(self._get('match/%s%s' % (key, '/simple' if simple else '')))
else:
return Match(self._get('match/{year}{event}_{type}{number}{round}{simple}'.format(year=year if not event[0].isdigit() else '',
event=event,
type=type,
number=number,
round=('m%s' % round) if not type == 'qm' else '',
simple='/simple' if simple else ''))) | 0.007839 |
def GetPatternIdTripDict(self):
"""Return a dictionary that maps pattern_id to a list of Trip objects."""
d = {}
for t in self._trips:
d.setdefault(t.pattern_id, []).append(t)
return d | 0.009709 |
def orbit(self, orbit):
"""Initialize the propagator
Args:
orbit (Orbit)
"""
self._orbit = orbit
tle = Tle.from_orbit(orbit)
lines = tle.text.splitlines()
if len(lines) == 3:
_, line1, line2 = lines
else:
line1, line2 = lines
self.tle = twoline2rv(line1, line2, wgs72) | 0.005263 |
def _load_resources(self):
""" Load all the native goldman resources.
The route or API endpoint will be automatically determined
based on the resource object instance passed in.
INFO: Only our Model based resources are supported when
auto-generating API endpoints.
"""
for resource in self.RESOURCES:
if isinstance(resource, goldman.ModelsResource):
route = '/%s' % resource.rtype
elif isinstance(resource, goldman.ModelResource):
route = '/%s/{rid}' % resource.rtype
elif isinstance(resource, goldman.RelatedResource):
route = '/%s/{rid}/{related}' % resource.rtype
else:
raise TypeError('unsupported resource type')
self.add_route(*(route, resource)) | 0.002375 |
def _string_like(self, patterns):
"""
Wildcard fuzzy matching function equivalent to the SQL LIKE directive. Use
% as a multiple-character wildcard or _ (underscore) as a single-character
wildcard.
Use re_search or rlike for regex-based matching.
Parameters
----------
pattern : str or List[str]
A pattern or list of patterns to match. If `pattern` is a list, then if
**any** pattern matches the input then the corresponding row in the
output is ``True``.
Returns
-------
matched : ir.BooleanColumn
"""
return functools.reduce(
operator.or_,
(
ops.StringSQLLike(self, pattern).to_expr()
for pattern in util.promote_list(patterns)
),
) | 0.001307 |
def _parse(cls, xml_path):
"""Parse .xml file and return parsed text as a DOM Document.
:param string xml_path: File path of xml file to be parsed.
:returns xml.dom.minidom.Document parsed_xml: Document instance containing parsed xml.
"""
try:
parsed_xml = parse(xml_path)
# Minidom is a frontend for various parsers, only Exception covers ill-formed .xml for them all.
except Exception as e:
raise cls.XmlError('Error parsing xml file at {0}: {1}'.format(xml_path, e))
return parsed_xml | 0.011278 |
def angular_errors(hyp_axes):
"""
Minimum and maximum angular errors
corresponding to 1st and 2nd axes
of PCA distribution.
Ordered as [minimum, maximum] angular error.
"""
# Not quite sure why this is sqrt but it is empirically correct
ax = N.sqrt(hyp_axes)
return tuple(N.arctan2(ax[-1],ax[:-1])) | 0.00597 |
def get_cpu_info(self) -> str:
'''Show device CPU information.'''
output, _ = self._execute(
'-s', self.device_sn, 'shell', 'cat', '/proc/cpuinfo')
return output | 0.010152 |
def call(self):
'''show a file dialog'''
from MAVProxy.modules.lib.wx_loader import wx
# remap flags to wx descriptors
flag_map = {
'open': wx.FD_OPEN,
'save': wx.FD_SAVE,
'overwrite_prompt': wx.FD_OVERWRITE_PROMPT,
}
flagsMapped = map(lambda x: flag_map[x], self.flags)
#need to OR together the elements of the flagsMapped tuple
if len(flagsMapped) == 1:
dlg = wx.FileDialog(None, self.title, '', "", self.wildcard, flagsMapped[0])
else:
dlg = wx.FileDialog(None, self.title, '', "", self.wildcard, flagsMapped[0]|flagsMapped[1])
if dlg.ShowModal() != wx.ID_OK:
return None
return "\"" + dlg.GetPath().encode('utf8') + "\"" | 0.007634 |
def validate_deprecation_semver(version_string, version_description):
"""Validates that version_string is a valid semver.
If so, returns that semver. Raises an error otherwise.
:param str version_string: A pantsbuild.pants version which affects some deprecated entity.
:param str version_description: A string used in exception messages to describe what the
`version_string` represents.
:rtype: `packaging.version.Version`
:raises DeprecationApplicationError: if the version_string parameter is invalid.
"""
if version_string is None:
raise MissingSemanticVersionError('The {} must be provided.'.format(version_description))
if not isinstance(version_string, six.string_types):
raise BadSemanticVersionError('The {} must be a version string.'.format(version_description))
try:
# NB: packaging will see versions like 1.a.0 as 1a0, and are "valid"
# We explicitly want our versions to be of the form x.y.z.
v = Version(version_string)
if len(v.base_version.split('.')) != 3:
raise BadSemanticVersionError('The given {} is not a valid version: '
'{}'.format(version_description, version_string))
if not v.is_prerelease:
raise NonDevSemanticVersionError('The given {} is not a dev version: {}\n'
'Features should generally be removed in the first `dev` release '
'of a release cycle.'.format(version_description, version_string))
return v
except InvalidVersion as e:
raise BadSemanticVersionError('The given {} {} is not a valid version: '
'{}'.format(version_description, version_string, e)) | 0.012651 |
def compile_fund(workbook, sheet, row, col):
"""
Compile funding entries. Iter both rows at the same time. Keep adding entries until both cells are empty.
:param obj workbook:
:param str sheet:
:param int row:
:param int col:
:return list of dict: l
"""
logger_excel.info("enter compile_fund")
l = []
temp_sheet = workbook.sheet_by_name(sheet)
while col < temp_sheet.ncols:
col += 1
try:
# Make a dictionary for this funding entry.
_curr = {
'agency': temp_sheet.cell_value(row, col),
'grant': temp_sheet.cell_value(row+1, col),
"principalInvestigator": temp_sheet.cell_value(row+2, col),
"country": temp_sheet.cell_value(row + 3, col)
}
# Make a list for all
_exist = [temp_sheet.cell_value(row, col), temp_sheet.cell_value(row+1, col),
temp_sheet.cell_value(row+2, col), temp_sheet.cell_value(row+3, col)]
# Remove all empty items from the list
_exist = [i for i in _exist if i]
# If we have all empty entries, then don't continue. Quit funding and return what we have.
if not _exist:
return l
# We have funding data. Add this funding block to the growing list.
l.append(_curr)
except IndexError as e:
logger_excel.debug("compile_fund: IndexError: sheet:{} row:{} col:{}, {}".format(sheet, row, col, e))
logger_excel.info("exit compile_fund")
return l | 0.005054 |
def make_certifier():
"""
Decorator that can wrap raw functions to create a certifier function.
Certifier functions support partial application. If a function wrapped by
`make_certifier` is called with a value as its first argument it will be
certified immediately. If no value is passed, then it will return a
function that can be called at a later time.
Assuming that `certify_something` has been decorated by `make_certifier`:
>>> certify_something(value, foo=1, bar=2)
Is equivalent to:
>>> certifier = certify_something(foo=1, bar=2)
>>> certifier(value)
"""
def decorator(func):
@six.wraps(func)
def wrapper(value=_undefined, **kwargs):
def certify(val):
if is_enabled():
exec_func(func, val, **kwargs)
return val
if value is not _undefined:
return certify(value)
else:
return certify
return wrapper
return decorator | 0.000955 |
def keep_entry_range(entry, lows, highs, converter, regex):
"""
Check if an entry falls into a desired range.
Every number in the entry will be extracted using *regex*,
if any are within a given low to high range the entry will
be kept.
Parameters
----------
entry : str
lows : iterable
Collection of low values against which to compare the entry.
highs : iterable
Collection of high values against which to compare the entry.
converter : callable
Function to convert a string to a number.
regex : regex object
Regular expression to locate numbers in a string.
Returns
-------
True if the entry should be kept, False otherwise.
"""
return any(
low <= converter(num) <= high
for num in regex.findall(entry)
for low, high in zip(lows, highs)
) | 0.001145 |
def append(self, value):
""" Adds certificate to stack """
if not self.need_free:
raise ValueError("Stack is read-only")
if not isinstance(value, X509):
raise TypeError('StackOfX509 can contain only X509 objects')
sk_push(self.ptr, libcrypto.X509_dup(value.cert)) | 0.00627 |
def read(self, path, params=None):
"""Read the result at the given path (GET) from the CRUD API, using the optional params dictionary
as url parameters."""
return self.handleresult(self.r.get(urljoin(self.url + CRUD_PATH,
path),
params=params)) | 0.008242 |
def _cf_string_to_unicode(value):
"""
Creates a Unicode string from a CFString object. Used entirely for error
reporting.
Yes, it annoys me quite a lot that this function is this complex.
"""
value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
string = CoreFoundation.CFStringGetCStringPtr(
value_as_void_p,
CFConst.kCFStringEncodingUTF8
)
if string is None:
buffer = ctypes.create_string_buffer(1024)
result = CoreFoundation.CFStringGetCString(
value_as_void_p,
buffer,
1024,
CFConst.kCFStringEncodingUTF8
)
if not result:
raise OSError('Error copying C string from CFStringRef')
string = buffer.value
if string is not None:
string = string.decode('utf-8')
return string | 0.001164 |
def as_dict(self):
"""
Returns the CTRL as a dictionary. "SITE" and "CLASS" are of
the form {'CATEGORY': {'TOKEN': value}}, the rest is of the
form 'TOKEN'/'CATEGORY': value. It gets the conventional standard
structure because primitive cells use the conventional
a-lattice parameter as the scaling factor and not the a-lattice
parameter of the primitive cell.
"""
ctrl_dict = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__}
if self.header is not None:
ctrl_dict["HEADER"] = self.header
if self.version is not None:
ctrl_dict["VERS"] = self.version
sga = SpacegroupAnalyzer(self.structure)
alat = sga.get_conventional_standard_structure().lattice.a
plat = self.structure.lattice.matrix/alat
"""
The following is to find the classes (atoms that are not symmetry
equivalent, and create labels. Note that LMTO only attaches
numbers with the second atom of the same species, e.g. "Bi", "Bi1",
"Bi2", etc.
"""
eq_atoms = sga.get_symmetry_dataset()['equivalent_atoms']
ineq_sites_index = list(set(eq_atoms))
sites = []
classes = []
num_atoms = {}
for s, site in enumerate(self.structure.sites):
atom = site.specie
label_index = ineq_sites_index.index(eq_atoms[s])
if atom.symbol in num_atoms:
if label_index + 1 > sum(num_atoms.values()):
num_atoms[atom.symbol] += 1
atom_label = atom.symbol + str(num_atoms[atom.symbol] - 1)
classes.append({"ATOM": atom_label, "Z": atom.Z})
else:
num_atoms[atom.symbol] = 1
classes.append({"ATOM": atom.symbol, "Z": atom.Z})
sites.append({"ATOM": classes[label_index]["ATOM"],
"POS": site.coords/alat})
ctrl_dict.update({"ALAT": alat/bohr_to_angstrom,
"PLAT": plat,
"CLASS": classes,
"SITE": sites})
return ctrl_dict | 0.000903 |
def beacon(config):
'''
Check if installed packages are the latest versions
and fire an event for those that have upgrades.
.. code-block:: yaml
beacons:
pkg:
- pkgs:
- zsh
- apache2
- refresh: True
'''
ret = []
_refresh = False
pkgs = []
for config_item in config:
if 'pkgs' in config_item:
pkgs += config_item['pkgs']
if 'refresh' in config and config['refresh']:
_refresh = True
for pkg in pkgs:
_installed = __salt__['pkg.version'](pkg)
_latest = __salt__['pkg.latest_version'](pkg, refresh=_refresh)
if _installed and _latest:
_pkg = {'pkg': pkg,
'version': _latest
}
ret.append(_pkg)
return ret | 0.001176 |
def _loadlib(lib='standard'):
"""Load rabit library."""
global _LIB
if _LIB is not None:
warnings.warn('rabit.int call was ignored because it has'\
' already been initialized', level=2)
return
if lib == 'standard':
_LIB = ctypes.cdll.LoadLibrary(WRAPPER_PATH % '')
elif lib == 'mock':
_LIB = ctypes.cdll.LoadLibrary(WRAPPER_PATH % '_mock')
elif lib == 'mpi':
_LIB = ctypes.cdll.LoadLibrary(WRAPPER_PATH % '_mpi')
else:
raise Exception('unknown rabit lib %s, can be standard, mock, mpi' % lib)
_LIB.RabitGetRank.restype = ctypes.c_int
_LIB.RabitGetWorldSize.restype = ctypes.c_int
_LIB.RabitVersionNumber.restype = ctypes.c_int | 0.005405 |
def convert(self, chain_id, residue_id, from_scheme, to_scheme):
'''The API conversion function. This converts between the different residue ID schemes.'''
# At the cost of three function calls, we ignore the case of the scheme parameters to be more user-friendly.
from_scheme = from_scheme.lower()
to_scheme = to_scheme.lower()
assert(from_scheme in ResidueRelatrix.schemes)
assert(to_scheme in ResidueRelatrix.schemes)
return self._convert(chain_id, residue_id, from_scheme, to_scheme) | 0.007353 |
def _ip_is_usable(self, current_ip):
"""
Check if the current Tor's IP is usable.
:argument current_ip: current Tor IP
:type current_ip: str
:returns bool
"""
# Consider IP addresses only.
try:
ipaddress.ip_address(current_ip)
except ValueError:
return False
# Never use real IP.
if current_ip == self.real_ip:
return False
# Do dot allow IP reuse.
if not self._ip_is_safe(current_ip):
return False
return True | 0.003478 |
def pad(self, esp):
"""
Add the correct amount of padding so that the data to encrypt is
exactly a multiple of the algorithm's block size.
Also, make sure that the total ESP packet length is a multiple of 4
bytes.
@param esp: an unencrypted _ESPPlain packet
@return: an unencrypted _ESPPlain packet with valid padding
"""
# 2 extra bytes for padlen and nh
data_len = len(esp.data) + 2
# according to the RFC4303, section 2.4. Padding (for Encryption)
# the size of the ESP payload must be a multiple of 32 bits
align = _lcm(self.block_size, 4)
# pad for block size
esp.padlen = -data_len % align
# Still according to the RFC, the default value for padding *MUST* be an # noqa: E501
# array of bytes starting from 1 to padlen
# TODO: Handle padding function according to the encryption algo
esp.padding = struct.pack("B" * esp.padlen, *range(1, esp.padlen + 1))
# If the following test fails, it means that this algo does not comply
# with the RFC
payload_len = len(esp.iv) + len(esp.data) + len(esp.padding) + 2
if payload_len % 4 != 0:
raise ValueError('The size of the ESP data is not aligned to 32 bits after padding.') # noqa: E501
return esp | 0.001464 |
def require_app(app_name, api_style=False):
"""
Request the application to be automatically loaded.
If this is used for "api" style modules, which is imported by a client
application, set api_style=True.
If this is used for client application module, set api_style=False.
"""
iterable = (inspect.getmodule(frame[0]) for frame in inspect.stack())
modules = [module for module in iterable if module is not None]
if api_style:
m = modules[2] # skip a frame for "api" module
else:
m = modules[1]
m._REQUIRED_APP = getattr(m, '_REQUIRED_APP', [])
m._REQUIRED_APP.append(app_name)
LOG.debug('require_app: %s is required by %s', app_name, m.__name__) | 0.001401 |
def _filter_gte(self, term, field_name, field_type, is_not):
"""
Private method that returns a xapian.Query that searches for any term
that is greater than `term` in a specified `field`.
"""
vrp = XHValueRangeProcessor(self.backend)
pos, begin, end = vrp('%s:%s' % (field_name, _term_to_xapian_value(term, field_type)), '*')
if is_not:
return xapian.Query(xapian.Query.OP_AND_NOT,
self._all_query(),
xapian.Query(xapian.Query.OP_VALUE_RANGE, pos, begin, end)
)
return xapian.Query(xapian.Query.OP_VALUE_RANGE, pos, begin, end) | 0.005731 |
def transformChildrenToNative(self):
"""
Recursively replace children with their native representation.
Sort to get dependency order right, like vtimezone before vevent.
"""
for childArray in (self.contents[k] for k in self.sortChildKeys()):
for child in childArray:
child = child.transformToNative()
child.transformChildrenToNative() | 0.004773 |
def has_documented_type_or_fields(self, include_inherited_fields=False):
"""Returns whether this type, or any of its fields, are documented.
Use this when deciding whether to create a block of documentation for
this type.
"""
if self.doc:
return True
else:
return self.has_documented_fields(include_inherited_fields) | 0.005141 |
def _report_exc_info(exc_info, request, extra_data, payload_data, level=None):
"""
Called by report_exc_info() wrapper
"""
if not _check_config():
return
filtered_level = _filtered_level(exc_info[1])
if level is None:
level = filtered_level
filtered_exc_info = events.on_exception_info(exc_info,
request=request,
extra_data=extra_data,
payload_data=payload_data,
level=level)
if filtered_exc_info is False:
return
cls, exc, trace = filtered_exc_info
data = _build_base_data(request)
if level is not None:
data['level'] = level
# walk the trace chain to collect cause and context exceptions
trace_chain = _walk_trace_chain(cls, exc, trace)
extra_trace_data = None
if len(trace_chain) > 1:
data['body'] = {
'trace_chain': trace_chain
}
if payload_data and ('body' in payload_data) and ('trace' in payload_data['body']):
extra_trace_data = payload_data['body']['trace']
del payload_data['body']['trace']
else:
data['body'] = {
'trace': trace_chain[0]
}
if extra_data:
extra_data = extra_data
if not isinstance(extra_data, dict):
extra_data = {'value': extra_data}
if extra_trace_data:
extra_data = dict_merge(extra_data, extra_trace_data)
data['custom'] = extra_data
if extra_trace_data and not extra_data:
data['custom'] = extra_trace_data
request = _get_actual_request(request)
_add_request_data(data, request)
_add_person_data(data, request)
_add_lambda_context_data(data)
data['server'] = _build_server_data()
if payload_data:
data = dict_merge(data, payload_data)
payload = _build_payload(data)
send_payload(payload, payload.get('access_token'))
return data['uuid'] | 0.000963 |
def compute_jacobian(ics, coordinates):
"""Construct a Jacobian for the given internal and Cartesian coordinates
Arguments:
| ``ics`` -- A list of internal coordinate objects.
| ``coordinates`` -- A numpy array with Cartesian coordinates,
shape=(N,3)
The return value will be a numpy array with the Jacobian matrix. There
will be a column for each internal coordinate, and a row for each
Cartesian coordinate (3*N rows).
"""
N3 = coordinates.size
jacobian = numpy.zeros((N3, len(ics)), float)
for j, ic in enumerate(ics):
# Let the ic object fill in each column of the Jacobian.
ic.fill_jacobian_column(jacobian[:,j], coordinates)
return jacobian | 0.002628 |
def getLong(t):
"""If t is of type long, return it, otherwise raise InvalidTypeError.
"""
i = c_long()
if PL_get_long(t, byref(i)):
return i.value
else:
raise InvalidTypeError("long") | 0.004566 |
def slithir_operations(self):
"""
list(Operation): List of the slithir operations
"""
if self._slithir_operations is None:
operations = [n.irs for n in self.nodes]
operations = [item for sublist in operations for item in sublist if item]
self._slithir_operations = operations
return self._slithir_operations | 0.007752 |
def tune(self, verbose=None):
"""
Tuning initial slice width parameter
"""
if not self._tune:
return False
else:
self.w_tune.append(
abs(self.stochastic.last_value - self.stochastic.value))
self.w = 2 * (sum(self.w_tune) / len(self.w_tune))
return True | 0.005618 |
def send_signal(self, sig):
"""Send a signal to process (see signal module constants).
On Windows only SIGTERM is valid and is treated as an alias
for kill().
"""
# safety measure in case the current process has been killed in
# meantime and the kernel reused its PID
if not self.is_running():
name = self._platform_impl._process_name
raise NoSuchProcess(self.pid, name)
if os.name == 'posix':
try:
os.kill(self.pid, sig)
except OSError:
err = sys.exc_info()[1]
name = self._platform_impl._process_name
if err.errno == errno.ESRCH:
raise NoSuchProcess(self.pid, name)
if err.errno == errno.EPERM:
raise AccessDenied(self.pid, name)
raise
else:
if sig == signal.SIGTERM:
self._platform_impl.kill_process()
else:
raise ValueError("only SIGTERM is supported on Windows") | 0.001854 |
def get_matrix(self):
"""Copies the pattern’s transformation matrix.
:retuns: A new :class:`Matrix` object.
"""
matrix = Matrix()
cairo.cairo_pattern_get_matrix(self._pointer, matrix._pointer)
self._check_status()
return matrix | 0.007018 |
def _get_cu_and_fu_status(self):
"""Submit GET request to update information."""
# adjust headers
headers = HEADERS.copy()
headers['Accept'] = '*/*'
headers['X-Requested-With'] = 'XMLHttpRequest'
headers['X-CSRFToken'] = self._parent.csrftoken
args = '?controller_serial=' + self.serial \
+ '&faucet_serial=' + self.faucet.serial
req = self._parent.client.get(STATUS_ENDPOINT + args,
headers=headers)
# token probably expired, then try again
if req.status_code == 403:
self._parent.login()
self.update()
elif req.status_code == 200:
self.attributes = req.json()
else:
req.raise_for_status() | 0.002532 |
def docker_job(self, image, config=None,
input=None, output=None, msdir=None,
shared_memory='1gb', build_label=None,
**kw):
"""
Add a task to a stimela recipe
image : stimela cab name, e.g. 'cab/simms'
name : This name will be part of the name of the contaier that will
execute the task (now optional)
config : Dictionary of options to parse to the task. This will modify
the parameters in the default parameter file which
can be viewd by running 'stimela cabs -i <cab name>', e.g 'stimela cabs -i simms'
input : input dirctory for cab
output : output directory for cab
msdir : MS directory for cab. Only specify if different from recipe ms_dir
"""
# check if name has any offending charecters
offenders = re.findall('\W', self.name)
if offenders:
raise StimelaCabParameterError('The cab name \'{:s}\' has some non-alphanumeric characters.'
' Charecters making up this name must be in [a-z,A-Z,0-9,_]'.format(self.name))
## Update I/O with values specified on command line
# TODO (sphe) I think this feature should be removed
script_context = self.recipe.stimela_context
input = script_context.get('_STIMELA_INPUT', None) or input
output = script_context.get('_STIMELA_OUTPUT', None) or output
msdir = script_context.get('_STIMELA_MSDIR', None) or msdir
build_label = script_context.get('_STIMELA_BUILD_LABEL', None) or build_label
# Get location of template parameters file
cabs_logger = stimela.get_cabs('{0:s}/{1:s}_stimela_logfile.json'.format(stimela.LOG_HOME, build_label))
try:
cabpath = cabs_logger['{0:s}_{1:s}'.format(build_label, image)]['DIR']
except KeyError:
raise StimelaCabParameterError('Cab {} has is uknown to stimela. Was it built?'.format(image))
parameter_file = cabpath+'/parameters.json'
name = '{0}-{1}{2}'.format(self.name, id(image), str(time.time()).replace('.', ''))
_cab = cab.CabDefinition(indir=input, outdir=output,
msdir=msdir, parameter_file=parameter_file)
cont = docker.Container(image, name,
label=self.label, logger=self.log,
shared_memory=shared_memory,
log_container=stimela.LOG_FILE,
time_out=self.time_out)
# Container parameter file will be updated and validated before the container is executed
cont._cab = _cab
cont.parameter_file_name = '{0}/{1}.json'.format(self.recipe.parameter_file_dir, name)
# Remove dismissable kw arguments:
ops_to_pop = []
for op in config:
if isinstance(config[op], dismissable):
ops_to_pop.append(op)
for op in ops_to_pop:
arg = config.pop(op)()
if arg is not None:
config[op] = arg
cont.config = config
# These are standard volumes and
# environmental variables. These will be
# always exist in a cab container
cont.add_volume(self.recipe.stimela_path, '/scratch/stimela', perm='ro')
cont.add_volume(self.recipe.parameter_file_dir, '/configs', perm='ro')
cont.add_environ('CONFIG', '/configs/{}.json'.format(name))
if msdir:
md = '/home/{0:s}/msdir'.format(USER)
cont.add_volume(msdir, md)
cont.add_environ('MSDIR', md)
# Keep a record of the content of the
# volume
dirname, dirs, files = [a for a in next(os.walk(msdir))]
cont.msdir_content = {
"volume" : dirname,
"dirs" : dirs,
"files" : files,
}
self.log.debug('Mounting volume \'{0}\' from local file system to \'{1}\' in the container'.format(msdir, md))
if input:
cont.add_volume( input,'/input', perm='ro')
cont.add_environ('INPUT', '/input')
# Keep a record of the content of the
# volume
dirname, dirs, files = [a for a in next(os.walk(input))]
cont.input_content = {
"volume" : dirname,
"dirs" : dirs,
"files" : files,
}
self.log.debug('Mounting volume \'{0}\' from local file system to \'{1}\' in the container'.format(input, '/input'))
if not os.path.exists(output):
os.mkdir(output)
od = '/home/{0:s}/output'.format(USER)
cont.add_environ('HOME', od)
self.logfile = cont.logfile = '{0:s}/log-{1:s}.txt'.format(self.log_dir, name.split('-')[0])
cont.add_volume(output, od)
if not os.path.exists(cont.logfile):
with open(cont.logfile, "w") as std:
pass
logfile_cont = '/home/{0:s}/{1:s}/log-{2:s}.txt'.format(USER, self.log_dir, name.split('-')[0])
cont.add_volume(cont.logfile, logfile_cont, "rw")
cont.add_environ('OUTPUT', od)
cont.add_environ('LOGFILE', logfile_cont)
self.log.debug('Mounting volume \'{0}\' from local file system to \'{1}\' in the container'.format(output, od))
cont.image = '{0}_{1}'.format(build_label, image)
# Added and ready for execution
self.job = cont
return 0 | 0.007899 |
def _read_header(f, header_param):
"""
Read and parse data from 1st line of a file.
:param f: :func:`file` or :class:`~StringIO.StringIO` object from which to
read 1st line.
:type f: file
:param header_param: Parameters used to parse the data from the header.
Contains "delimiter" and "fields".
:type header_param: dict
:returns: Dictionary of data read from header.
:rtype: dict
:raises: :exc:`~simkit.core.exceptions.UnnamedDataError`
The **header_param** argument contains keys to read the 1st line of **f**.
If "delimiter" is ``None`` or missing, the default delimiter is a comma,
otherwise "delimiter" can be any single character, integer or sequence of
``int``.
* single character -- a delimiter
* single integer -- uniform fixed width
* sequence of ``int`` -- fixed widths, the number of fields should \
correspond to the length of the sequence.
The "fields" key is a list of (parameter-name, parameter-type[, parameter-
units]) lists.
"""
# default delimiter is a comma, can't be None
header_delim = str(header_param.get('delimiter', ','))
# don't allow unnamed fields
if 'fields' not in header_param:
raise UnnamedDataError(f.name)
header_fields = {field[0]: field[1:] for field in header_param['fields']}
# header_names can't be generator b/c DictReader needs list, and can't be
# dictionary b/c must be same order as 'fields' to match data readby csv
header_names = [field[0] for field in header_param['fields']]
# read header
header_str = StringIO(f.readline()) # read the 1st line
# use csv because it will preserve quoted fields with commas
# make a csv.DictReader from header string, use header names for
# fieldnames and set delimiter to header delimiter
header_reader = csv.DictReader(header_str, header_names,
delimiter=header_delim,
skipinitialspace=True)
data = header_reader.next() # parse the header dictionary
# iterate over items in data
for k, v in data.iteritems():
header_type = header_fields[k][0] # spec'd type
# whitelist header types
if isinstance(header_type, basestring):
if header_type.lower().startswith('int'):
header_type = int # coerce to integer
elif header_type.lower().startswith('long'):
header_type = long # coerce to long integer
elif header_type.lower().startswith('float'):
header_type = float # to floating decimal point
elif header_type.lower().startswith('str'):
header_type = str # coerce to string
elif header_type.lower().startswith('bool'):
header_type = bool # coerce to boolean
else:
raise TypeError('"%s" is not a supported type.' % header_type)
# WARNING! Use of `eval` considered harmful. `header_type` is read
# from JSON file, not secure input, could be used to exploit system
data[k] = header_type(v) # cast v to type
# check for units in 3rd element
if len(header_fields[k]) > 1:
units = UREG(str(header_fields[k][1])) # spec'd units
data[k] = data[k] * units # apply units
return data | 0.000296 |
def get_health_check(name, region=None, key=None, keyid=None, profile=None):
'''
Get the health check configured for this ELB.
CLI example:
.. code-block:: bash
salt myminion boto_elb.get_health_check myelb
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
retries = 30
while True:
try:
lb = conn.get_all_load_balancers(load_balancer_names=[name])
lb = lb[0]
ret = odict.OrderedDict()
hc = lb.health_check
ret['interval'] = hc.interval
ret['target'] = hc.target
ret['healthy_threshold'] = hc.healthy_threshold
ret['timeout'] = hc.timeout
ret['unhealthy_threshold'] = hc.unhealthy_threshold
return ret
except boto.exception.BotoServerError as e:
if retries and e.code == 'Throttling':
log.debug('Throttled by AWS API, will retry in 5 seconds.')
time.sleep(5)
retries -= 1
continue
log.error('ELB %s not found.', name,
exc_info_on_logleve=logging.DEBUG)
return {} | 0.000842 |
def DeleteGroupTags(r, group, tags, dry_run=False):
"""
Deletes tags from a node group.
@type group: str
@param group: group to delete tags from
@type tags: list of string
@param tags: tags to delete
@type dry_run: bool
@param dry_run: whether to perform a dry run
@rtype: string
@return: job id
"""
query = {
"dry-run": dry_run,
"tag": tags,
}
return r.request("delete", "/2/groups/%s/tags" % group, query=query) | 0.002049 |
def get_group_permissions(self, user_obj, obj=None):
"""
Returns a set of permission strings that this user has through his/her
groups.
"""
if user_obj.is_anonymous() or obj is not None:
return set()
if not hasattr(user_obj, '_group_perm_cache'):
if user_obj.is_superuser:
perms = Permission.objects.all()
else:
user_groups_field = get_user_class()._meta.get_field('groups') # pylint: disable=W0212
user_groups_query = 'group__%s' % user_groups_field.related_query_name()
perms = Permission.objects.filter(**{user_groups_query: user_obj})
perms = perms.values_list('content_type__app_label', 'codename').order_by()
user_obj._group_perm_cache = set(["%s.%s" % (ct, name) for ct, name in perms]) # pylint: disable=W0212
return user_obj._group_perm_cache | 0.007503 |
def _get_valid_formats():
''' Calls SoX help for a lists of audio formats available with the current
install of SoX.
Returns:
--------
formats : list
List of audio file extensions that SoX can process.
'''
if NO_SOX:
return []
so = subprocess.check_output(['sox', '-h'])
if type(so) is not str:
so = str(so, encoding='UTF-8')
so = so.split('\n')
idx = [i for i in range(len(so)) if 'AUDIO FILE FORMATS:' in so[i]][0]
formats = so[idx].split(' ')[3:]
return formats | 0.001838 |
def validate(self, value):
"""Validate value."""
if self.exclusive:
if value <= self.minimum_value:
tpl = "'{value}' is lower or equal than minimum ('{min}')."
raise ValidationError(
tpl.format(value=value, min=self.minimum_value))
else:
if value < self.minimum_value:
raise ValidationError(
"'{value}' is lower than minimum ('{min}').".format(
value=value, min=self.minimum_value)) | 0.00369 |
def And(*args: Union[Bool, bool]) -> Bool:
"""Create an And expression."""
union = []
args_list = [arg if isinstance(arg, Bool) else Bool(arg) for arg in args]
for arg in args_list:
union.append(arg.annotations)
return Bool(z3.And([a.raw for a in args_list]), union) | 0.003401 |
def grid_coords_from_corners(upper_left_corner, lower_right_corner, size):
''' Points are the outer edges of the UL and LR pixels. Size is rows, columns.
GC projection type is taken from Points. '''
assert upper_left_corner.wkt == lower_right_corner.wkt
geotransform = np.array([upper_left_corner.lon, -(upper_left_corner.lon - lower_right_corner.lon) / float(size[1]), 0,
upper_left_corner.lat, 0, -(upper_left_corner.lat - lower_right_corner.lat) / float(size[0])])
return GridCoordinates(geotransform=geotransform,
wkt=upper_left_corner.wkt,
y_size=size[0],
x_size=size[1]) | 0.009763 |
def get_fieldsets(self):
"""
Hook for specifying fieldsets. If 'self.fieldsets' is
empty this will default to include all the fields in
the form with a title of None.
"""
if self.fieldsets:
return self.fieldsets
form_class = self.get_form_class()
form = self.get_form(form_class)
fields = form.base_fields.keys()
readonly_fields = self.get_readonly_fields()
if readonly_fields:
fields.extend(readonly_fields)
return [(None, {'fields': fields})] | 0.003527 |
def authorization_url(self, url, request_token=None, **kwargs):
"""Create an authorization URL by appending request_token and optional
kwargs to url.
This is the second step in the OAuth 1 workflow. The user should be
redirected to this authorization URL, grant access to you, and then
be redirected back to you. The redirection back can either be specified
during client registration or by supplying a callback URI per request.
:param url: The authorization endpoint URL.
:param request_token: The previously obtained request token.
:param kwargs: Optional parameters to append to the URL.
:returns: The authorization URL with new parameters embedded.
An example using a registered default callback URI.
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
>>> oauth_session.fetch_request_token(request_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
>>> oauth_session.authorization_url(authorization_url)
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf'
>>> oauth_session.authorization_url(authorization_url, foo='bar')
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&foo=bar'
An example using an explicit callback URI.
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
>>> oauth_session = OAuth1Session('client-key', client_secret='secret', callback_uri='https://127.0.0.1/callback')
>>> oauth_session.fetch_request_token(request_token_url)
{
'oauth_token': 'sdf0o9823sjdfsdf',
'oauth_token_secret': '2kjshdfp92i34asdasd',
}
>>> oauth_session.authorization_url(authorization_url)
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback'
"""
kwargs["oauth_token"] = request_token or self._client.client.resource_owner_key
log.debug("Adding parameters %s to url %s", kwargs, url)
return add_params_to_uri(url, kwargs.items()) | 0.001634 |
def create_symlink(source, link_name):
"""
Creates symbolic link for either operating system.
http://stackoverflow.com/questions/6260149/os-symlink-support-in-windows
"""
os_symlink = getattr(os, "symlink", None)
if isinstance(os_symlink, collections.Callable):
os_symlink(source, link_name)
else:
import ctypes
csl = ctypes.windll.kernel32.CreateSymbolicLinkW
csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
csl.restype = ctypes.c_ubyte
flags = 1 if os.path.isdir(source) else 0
if csl(link_name, source, flags) == 0:
raise ctypes.WinError() | 0.001479 |
def _build_protobuf(self):
"""Build a query protobuf.
Relies on the current state of the iterator.
:rtype:
:class:`.query_pb2.Query`
:returns: The query protobuf object for the current
state of the iterator.
"""
pb = _pb_from_query(self._query)
start_cursor = self.next_page_token
if start_cursor is not None:
pb.start_cursor = base64.urlsafe_b64decode(start_cursor)
end_cursor = self._end_cursor
if end_cursor is not None:
pb.end_cursor = base64.urlsafe_b64decode(end_cursor)
if self.max_results is not None:
pb.limit.value = self.max_results - self.num_results
if start_cursor is None and self._offset is not None:
# NOTE: We don't need to add an offset to the request protobuf
# if we are using an existing cursor, because the offset
# is only relative to the start of the result set, not
# relative to each page (this method is called per-page)
pb.offset = self._offset
return pb | 0.001757 |
def main() -> None:
"""Run command line"""
try:
_real_main()
except GuesslangError as error:
LOGGER.critical("Failed: %s", error)
sys.exit(-1)
except KeyboardInterrupt:
LOGGER.critical("Cancelled!")
sys.exit(-2) | 0.003745 |
def filter_package_list(package_list):
"""
Filter a list of packages into local and remotes.
"""
remote_pkgs = []
local_pkgs = []
possible_remotes = filter(lambda i: not os.path.exists(i), package_list)
juicer.utils.Log.log_debug("Considering %s possible remotes" % len(possible_remotes))
for item in possible_remotes:
remote_pkgs.extend(juicer.utils.Remotes.assemble_remotes(item))
juicer.utils.Log.log_notice("Remote packages: %s" % str(remote_pkgs))
possible_locals = filter(os.path.exists, package_list)
possible_locals = filter(is_rpm, possible_locals)
juicer.utils.Log.log_debug("Considering %s possible locals" % len(possible_locals))
for item in possible_locals:
for match in find_pattern(item):
local_pkgs.append(match)
juicer.utils.Log.log_notice("Local packages: %s" % str(local_pkgs))
filtered_package_list = dedupe(remote_pkgs + local_pkgs)
return filtered_package_list | 0.003064 |
def filter_data_columns(data):
"""
Given a dict of data such as those in :py:class:`~.ProjectStats` attributes,
made up of :py:class:`datetime.datetime` keys and values of dicts of column
keys to counts, return a list of the distinct column keys in sorted order.
:param data: data dict as returned by ProjectStats attributes
:type data: dict
:return: sorted list of distinct keys
:rtype: ``list``
"""
keys = set()
for dt, d in data.items():
for k in d:
keys.add(k)
return sorted([x for x in keys]) | 0.003534 |
def cli(ctx):
"""Shows the saved commands."""
json_path = os.path.join(os.path.expanduser('~'), '.keep', 'commands.json')
if not os.path.exists(json_path):
click.echo('No commands to show. Add one by `keep new`.')
else:
utils.list_commands(ctx) | 0.003623 |
def to_record(cls, attr_names, values):
"""
Convert values to a record to be inserted into a database.
:param list attr_names:
List of attributes for the converting record.
:param values: Values to be converted.
:type values: |dict|/|namedtuple|/|list|/|tuple|
:raises ValueError: If the ``values`` is invalid.
"""
try:
# from a namedtuple to a dict
values = values._asdict()
except AttributeError:
pass
try:
# from a dictionary to a list
return [cls.__to_sqlite_element(values.get(attr_name)) for attr_name in attr_names]
except AttributeError:
pass
if isinstance(values, (tuple, list)):
return [cls.__to_sqlite_element(value) for value in values]
raise ValueError("cannot convert from {} to list".format(type(values))) | 0.003243 |
def validated_type(base_type, name=None, validate=None):
"""Convenient way to create a new type by adding validation to existing type.
Example: ::
Ipv4Address = validated_type(
String, 'Ipv4Address',
# regexp simplified for demo purposes
Regexp('^\d+\.\d+\.\d+\.\d+$', error='Invalid IP address')
)
Percentage = validated_type(Integer, validate=Range(0, 100))
# The above is the same as
class Ipv4Address(String):
def __init__(self, *args, **kwargs):
super(Ipv4Address, self).__init__(*args, **kwargs)
self.validators.insert(0, Regexp('^\d+\.\d+\.\d+\.\d+$', error='Invalid IP address'))
class Percentage(Integer):
def __init__(self, *args, **kwargs):
super(Percentage, self).__init__(*args, **kwargs)
self.validators.insert(0, Range(0, 100))
:param Type base_type: Base type for a new type.
:param name str: Optional class name for new type
(will be shown in places like repr).
:param validate: A validator or list of validators for this data type.
See `Type.validate` for details.
"""
if validate is None:
validate = []
if not is_sequence(validate):
validate = [validate]
class ValidatedSubtype(base_type):
if name is not None:
__name__ = name
def __init__(self, *args, **kwargs):
super(ValidatedSubtype, self).__init__(*args, **kwargs)
for validator in reversed(validate):
self.validators.insert(0, validator)
return ValidatedSubtype | 0.010253 |
def mlength(message, N=1, word_spaced=True):
"""
Returns Morse length
>>> message = "PARIS"
>>> mlength(message)
50
>>> mlength(message, 5)
250
"""
message = _repeat_word(message, N)
if word_spaced:
message = message + " E"
lst_bin = _encode_binary(message)
N = len(lst_bin)
if word_spaced:
N -= 1 # E is one "dit" so we remove it
return N | 0.002421 |
def get_position(directory, identifier):
"""
Extracts the position of a paragraph from the identifier, and the parent directory of the
paragraph.
Parameters
----------
directory : Path
A parent directory of a paragraph.
identifier : str
An identifier of a paragraph.
Returns
-------
float
The estimated position of the paragraph in the range [0; 1).
"""
paragraph_number = get_paragraph_number(identifier)
paragraph_total = max( # Not all paragraphs are stored, e.g. because of processing errors.
get_paragraph_number(get_identifier(document)) + 1
for document in directory.iterdir())
assert paragraph_total > paragraph_number and paragraph_total > 0
position = paragraph_number / paragraph_total
return position | 0.003663 |
def get_config_applied_machine_groups(self, project_name, config_name):
""" get machine group names where the logtail config applies to
Unsuccessful opertaion will cause an LogException.
:type project_name: string
:param project_name: the Project name
:type config_name: string
:param config_name: the logtail config name used to apply
:return: GetConfigAppliedMachineGroupsResponse
:raise: LogException
"""
headers = {}
params = {}
resource = "/configs/" + config_name + "/machinegroups"
(resp, header) = self._send("GET", project_name, None, resource, params, headers)
return GetConfigAppliedMachineGroupsResponse(resp, header) | 0.00646 |
def _assertion(self, assertion, verified=False):
"""
Check the assertion
:param assertion:
:return: True/False depending on if the assertion is sane or not
"""
if not hasattr(assertion, 'signature') or not assertion.signature:
logger.debug("unsigned")
if self.require_signature:
raise SignatureError("Signature missing for assertion")
else:
logger.debug("signed")
if not verified and self.do_not_verify is False:
try:
self.sec.check_signature(assertion, class_name(assertion),
self.xmlstr)
except Exception as exc:
logger.error("correctly_signed_response: %s", exc)
raise
self.assertion = assertion
logger.debug("assertion context: %s", self.context)
logger.debug("assertion keys: %s", assertion.keyswv())
logger.debug("outstanding_queries: %s", self.outstanding_queries)
# if self.context == "AuthnReq" or self.context == "AttrQuery":
if self.context == "AuthnReq":
self.authn_statement_ok()
# elif self.context == "AttrQuery":
# self.authn_statement_ok(True)
if not self.condition_ok():
raise VerificationError("Condition not OK")
logger.debug("--- Getting Identity ---")
# if self.context == "AuthnReq" or self.context == "AttrQuery":
# self.ava = self.get_identity()
# logger.debug("--- AVA: %s", self.ava)
try:
self.get_subject()
if self.asynchop:
if self.allow_unsolicited:
pass
elif self.came_from is None:
raise VerificationError("Came from")
return True
except Exception:
logger.exception("get subject")
raise | 0.001013 |
def update(name,
cpu=0,
mem=0,
disk_profile=None,
disks=None,
nic_profile=None,
interfaces=None,
graphics=None,
live=True,
**kwargs):
'''
Update the definition of an existing domain.
:param name: Name of the domain to update
:param cpu: Number of virtual CPUs to assign to the virtual machine
:param mem: Amount of memory to allocate to the virtual machine in MiB.
:param disk_profile: disk profile to use
:param disks:
Disk definitions as documented in the :func:`init` function.
If neither the profile nor this parameter are defined, the disk devices
will not be changed. However to clear disks set this parameter to empty list.
:param nic_profile: network interfaces profile to use
:param interfaces:
Network interface definitions as documented in the :func:`init` function.
If neither the profile nor this parameter are defined, the interface devices
will not be changed. However to clear network interfaces set this parameter
to empty list.
:param graphics:
The new graphics definition as defined in init-graphics-def_. If not set,
the graphics will not be changed. To remove a graphics device, set this parameter
to ``{'type': 'none'}``.
:param live:
``False`` to avoid trying to live update the definition. In such a case, the
new definition is applied at the next start of the virtual machine. If ``True``,
not all aspects of the definition can be live updated, but as much as possible
will be attempted. (Default: ``True``)
:param connection: libvirt connection URI, overriding defaults
:param username: username to connect with, overriding defaults
:param password: password to connect with, overriding defaults
:return:
Returns a dictionary indicating the status of what has been done. It is structured in
the following way:
.. code-block:: python
{
'definition': True,
'cpu': True,
'mem': True,
'disks': {'attached': [list of actually attached disks],
'detached': [list of actually detached disks]},
'nics': {'attached': [list of actually attached nics],
'detached': [list of actually detached nics]},
'errors': ['error messages for failures']
}
.. versionadded:: 2019.2.0
CLI Example:
.. code-block:: bash
salt '*' virt.update domain cpu=2 mem=1024
'''
status = {
'definition': False,
'disk': {'attached': [], 'detached': []},
'interface': {'attached': [], 'detached': []}
}
conn = __get_conn(**kwargs)
domain = _get_domain(conn, name)
desc = ElementTree.fromstring(domain.XMLDesc(0))
need_update = False
# Compute the XML to get the disks, interfaces and graphics
hypervisor = desc.get('type')
all_disks = _disk_profile(disk_profile, hypervisor, disks, name, **kwargs)
new_desc = ElementTree.fromstring(_gen_xml(name,
cpu,
mem,
all_disks,
_get_merged_nics(hypervisor, nic_profile, interfaces),
hypervisor,
domain.OSType(),
desc.find('.//os/type').get('arch'),
graphics,
**kwargs))
# Update the cpu
cpu_node = desc.find('vcpu')
if cpu and int(cpu_node.text) != cpu:
cpu_node.text = six.text_type(cpu)
cpu_node.set('current', six.text_type(cpu))
need_update = True
# Update the memory, note that libvirt outputs all memory sizes in KiB
for mem_node_name in ['memory', 'currentMemory']:
mem_node = desc.find(mem_node_name)
if mem and int(mem_node.text) != mem * 1024:
mem_node.text = six.text_type(mem)
mem_node.set('unit', 'MiB')
need_update = True
# Update the XML definition with the new disks and diff changes
devices_node = desc.find('devices')
parameters = {'disk': ['disks', 'disk_profile'],
'interface': ['interfaces', 'nic_profile'],
'graphics': ['graphics']}
changes = {}
for dev_type in parameters:
changes[dev_type] = {}
func_locals = locals()
if [param for param in parameters[dev_type] if func_locals.get(param, None) is not None]:
old = devices_node.findall(dev_type)
new = new_desc.findall('devices/{0}'.format(dev_type))
changes[dev_type] = globals()['_diff_{0}_lists'.format(dev_type)](old, new)
if changes[dev_type]['deleted'] or changes[dev_type]['new']:
for item in old:
devices_node.remove(item)
devices_node.extend(changes[dev_type]['sorted'])
need_update = True
# Set the new definition
if need_update:
# Create missing disks if needed
if changes['disk']:
for idx, item in enumerate(changes['disk']['sorted']):
source_file = all_disks[idx]['source_file']
if item in changes['disk']['new'] and source_file and not os.path.isfile(source_file):
_qemu_image_create(all_disks[idx])
try:
conn.defineXML(salt.utils.stringutils.to_str(ElementTree.tostring(desc)))
status['definition'] = True
except libvirt.libvirtError as err:
conn.close()
raise err
# Do the live changes now that we know the definition has been properly set
# From that point on, failures are not blocking to try to live update as much
# as possible.
commands = []
if domain.isActive() and live:
if cpu:
commands.append({'device': 'cpu',
'cmd': 'setVcpusFlags',
'args': [cpu, libvirt.VIR_DOMAIN_AFFECT_LIVE]})
if mem:
commands.append({'device': 'mem',
'cmd': 'setMemoryFlags',
'args': [mem * 1024, libvirt.VIR_DOMAIN_AFFECT_LIVE]})
for dev_type in ['disk', 'interface']:
for added in changes[dev_type].get('new', []):
commands.append({'device': dev_type,
'cmd': 'attachDevice',
'args': [salt.utils.stringutils.to_str(ElementTree.tostring(added))]})
for removed in changes[dev_type].get('deleted', []):
commands.append({'device': dev_type,
'cmd': 'detachDevice',
'args': [salt.utils.stringutils.to_str(ElementTree.tostring(removed))]})
for cmd in commands:
try:
ret = getattr(domain, cmd['cmd'])(*cmd['args'])
device_type = cmd['device']
if device_type in ['cpu', 'mem']:
status[device_type] = not bool(ret)
else:
actions = {'attachDevice': 'attached', 'detachDevice': 'detached'}
status[device_type][actions[cmd['cmd']]].append(cmd['args'][0])
except libvirt.libvirtError as err:
if 'errors' not in status:
status['errors'] = []
status['errors'].append(six.text_type(err))
conn.close()
return status | 0.003164 |
def periodic_distance(a, b, periodic):
'''Periodic distance between two arrays. Periodic is a 3
dimensional array containing the 3 box sizes.
'''
delta = np.abs(a - b)
delta = np.where(delta > 0.5 * periodic, periodic - delta, delta)
return np.sqrt((delta ** 2).sum(axis=-1)) | 0.003333 |
def _process_methods(self, req, resp, resource):
"""Adds the Access-Control-Allow-Methods header to the response,
using the cors settings to determine which methods are allowed.
"""
requested_method = self._get_requested_method(req)
if not requested_method:
return False
if self._cors_config['allow_all_methods']:
allowed_methods = self._get_resource_methods(resource)
self._set_allowed_methods(resp, allowed_methods)
if requested_method in allowed_methods:
return True
elif requested_method in self._cors_config['allow_methods_list']:
resource_methods = self._get_resource_methods(resource)
# Only list methods as allowed if they exist
# on the resource AND are in the allowed_methods_list
allowed_methods = [
method for method in resource_methods
if method in self._cors_config['allow_methods_list']
]
self._set_allowed_methods(resp, allowed_methods)
if requested_method in allowed_methods:
return True
return False | 0.001696 |
def store_widget_properties(self, widget, widget_name):
"""Sets configuration values for widgets
If the widget is a window, then the size and position are stored. If the widget is a pane, then only the
position is stored. If the window is maximized the last insert position before being maximized is keep in the
config and the maximized flag set to True. The maximized state and the last size and position are strictly
separated by this.
:param widget: The widget, for which the position (and possibly the size) will be stored.
:param widget_name: The window or widget name of the widget, which constitutes a part of its key in the
configuration file.
"""
if isinstance(widget, Gtk.Window):
maximized = bool(widget.is_maximized())
self.set_config_value('{0}_MAXIMIZED'.format(widget_name), maximized)
if maximized:
return
size = widget.get_size()
self.set_config_value('{0}_SIZE'.format(widget_name), tuple(size))
position = widget.get_position()
self.set_config_value('{0}_POS'.format(widget_name), tuple(position))
else: # Gtk.Paned
position = widget.get_position()
self.set_config_value('{0}_POS'.format(widget_name), position) | 0.006691 |
def plot(self, label=None, colour='g', style='-'): # pragma: no cover
'''Plot the time series.'''
pylab = LazyImport.pylab()
pylab.plot(self.dates, self.values, '%s%s' % (colour, style), label=label)
if label is not None:
pylab.legend()
pylab.show() | 0.013289 |
def add_noise(Y, sigma):
"""Adds noise to Y"""
return Y + np.random.normal(0, sigma, Y.shape) | 0.009709 |
def user_agent(self):
"""Return the formatted user agent string."""
components = ["/".join(x) for x in self.user_agent_components.items()]
return " ".join(components) | 0.010526 |
def _set_module_names_for_sphinx(modules: List, new_name: str):
""" Trick sphinx into displaying the desired module in these objects' documentation. """
for obj in modules:
obj.__module__ = new_name | 0.009346 |
def info(self):
"""
Property for accessing :class:`InfoManager` instance, which is used to general server info.
:rtype: yagocd.resources.info.InfoManager
"""
if self._info_manager is None:
self._info_manager = InfoManager(session=self._session)
return self._info_manager | 0.009063 |
def send_capabilities_request(self, vehicle, name, m):
'''Request an AUTOPILOT_VERSION packet'''
capability_msg = vehicle.message_factory.command_long_encode(0, 0, mavutil.mavlink.MAV_CMD_REQUEST_AUTOPILOT_CAPABILITIES, 0, 1, 0, 0, 0, 0, 0, 0)
vehicle.send_mavlink(capability_msg) | 0.009868 |
def get_applications():
"""
:return: all knows applications
"""
LOGGER.debug("ApplicationService.get_applications")
args = {'http_operation': 'GET', 'operation_path': ''}
response = ApplicationService.requester.call(args)
ret = None
if response.rc == 0:
ret = []
for application in response.response_content['applications']:
ret.append(Application.json_2_application(application))
elif response.rc != 404:
err_msg = 'ApplicationService.get_applications - Problem while getting applications. ' \
'Reason: ' + str(response.response_content) + '-' + str(response.error_message) + \
" (" + str(response.rc) + ")"
LOGGER.warning(err_msg)
return ret | 0.004813 |
def main():
'''main routine'''
# process arguments
if len(sys.argv) < 3:
usage()
rgname = sys.argv[1]
vmss_name = sys.argv[2]
# Load Azure app defaults
try:
with open('azurermconfig.json') as config_file:
config_data = json.load(config_file)
except FileNotFoundError:
sys.exit('Error: Expecting azurermonfig.json in current folder')
tenant_id = config_data['tenantId']
app_id = config_data['appId']
app_secret = config_data['appSecret']
subscription_id = config_data['subscriptionId']
access_token = azurerm.get_access_token(tenant_id, app_id, app_secret)
instanceviewlist = azurerm.list_vmss_vm_instance_view(access_token, subscription_id, rgname,
vmss_name)
for vmi in instanceviewlist['value']:
instance_id = vmi['instanceId']
upgrade_domain = vmi['properties']['instanceView']['platformUpdateDomain']
fault_domain = vmi['properties']['instanceView']['platformFaultDomain']
print('Instance ID: ' + instance_id + ', UD: ' + str(upgrade_domain) + ', FD: '
+ str(fault_domain)) | 0.003381 |
def get(self, path, params=None):
"""Make a GET request, optionally including a parameters, to a path.
The path of the request is the full URL.
Parameters
----------
path : str
The URL to request
params : DataQuery, optional
The query to pass when making the request
Returns
-------
resp : requests.Response
The server's response to the request
Raises
------
HTTPError
If the server returns anything other than a 200 (OK) code
See Also
--------
get_query, get
"""
resp = self._session.get(path, params=params)
if resp.status_code != 200:
if resp.headers.get('Content-Type', '').startswith('text/html'):
text = resp.reason
else:
text = resp.text
raise requests.HTTPError('Error accessing {0}\n'
'Server Error ({1:d}: {2})'.format(resp.request.url,
resp.status_code,
text))
return resp | 0.003231 |
def is_resource_class_terminal_attribute(rc, attr_name):
"""
Checks if the given attribute name is a terminal attribute of the given
registered resource.
"""
attr = get_resource_class_attribute(rc, attr_name)
return attr.kind == RESOURCE_ATTRIBUTE_KINDS.TERMINAL | 0.003497 |
def partLon(ID, chart):
""" Returns the longitude of an arabic part. """
# Get diurnal or nocturnal formula
abc = FORMULAS[ID][0] if chart.isDiurnal() else FORMULAS[ID][1]
a = objLon(abc[0], chart)
b = objLon(abc[1], chart)
c = objLon(abc[2], chart)
return c + b - a | 0.003401 |
def decrypt(private_key, message, label=b'', hash_class=hashlib.sha1,
mgf=mgf.mgf1):
'''Decrypt a byte message using a RSA private key and the OAEP wrapping
algorithm
Parameters:
public_key - an RSA public key
message - a byte string
label - a label a per-se PKCS#1 standard
hash_class - a Python class for a message digest algorithme respecting
the hashlib interface
mgf1 - a mask generation function
Return value:
the string before encryption (decrypted)
'''
hash = hash_class()
h_len = hash.digest_size
k = private_key.byte_size
# 1. check length
if len(message) != k or k < 2 * h_len + 2:
raise ValueError('decryption error')
# 2. RSA decryption
c = primitives.os2ip(message)
m = private_key.rsadp(c)
em = primitives.i2osp(m, k)
# 4. EME-OAEP decoding
hash.update(label)
label_hash = hash.digest()
y, masked_seed, masked_db = em[0], em[1:h_len+1], em[1+h_len:]
if y != b'\x00' and y != 0:
raise ValueError('decryption error')
seed_mask = mgf(masked_db, h_len)
seed = primitives.string_xor(masked_seed, seed_mask)
db_mask = mgf(seed, k - h_len - 1)
db = primitives.string_xor(masked_db, db_mask)
label_hash_prime, rest = db[:h_len], db[h_len:]
i = rest.find(b'\x01')
if i == -1:
raise exceptions.DecryptionError
if rest[:i].strip(b'\x00') != b'':
print(rest[:i].strip(b'\x00'))
raise exceptions.DecryptionError
m = rest[i+1:]
if label_hash_prime != label_hash:
raise exceptions.DecryptionError
return m | 0.000607 |
def finish_statistics(self):
"""
Prepare/modify data for plotting
"""
# params = self.stat.setup_params(self.data)
self.stat.finish_layer(self.data, self.stat.params) | 0.009709 |
def safely_decode(unicode_or_str, encoding='utf-8'):
''' Decodes byte <str> into <unicode>. Ignores any non-utf8 chars in <str>s '''
if isinstance(unicode_or_str, unicode):
ustr = unicode_or_str
elif isinstance(unicode_or_str, str):
ustr = unicode_or_str.decode(encoding, 'ignore')
else:
raise Exception(u'Not of type unicode or str')
return ustr | 0.01897 |
def _wrap_jinja_filter(self, function):
"""Propagate exceptions as undefined values filter."""
def wrapper(*args, **kwargs):
"""Filter wrapper."""
try:
return function(*args, **kwargs)
except Exception: # pylint: disable=broad-except
return NestedUndefined()
# Copy over Jinja filter decoration attributes.
for attribute in dir(function):
if attribute.endswith('filter'):
setattr(wrapper, attribute, getattr(function, attribute))
return wrapper | 0.003431 |
def rsquare(obsvalues, # type: Union[numpy.ndarray, List[Union[float, int]]]
simvalues # type: Union[numpy.ndarray, List[Union[float, int]]]
):
# type: (...) -> Union[float, numpy.ScalarType]
"""Calculate Coefficient of determination.
Same as the square of the Pearson correlation coefficient (r),
and, the same as the built-in Excel function RSQ().
Programmed according to equation (1) in
Legates, D.R. and G.J. McCabe, 1999. Evaluating the use of "goodness of fit" measures
in hydrologic and hydroclimatic model variation. Water Resources Research 35:233-241.
Args:
obsvalues: observe values array
simvalues: simulate values array
Examples:
>>> obs = [2.92, 2.75, 2.01, 1.09, 2.87, 1.43, 1.96,\
4.00, 2.24, 29.28, 5.88, 0.86, 13.21]
>>> sim = [2.90, 2.87, 2.85, 2.83, 3.04, 2.81, 2.85,\
2.78, 2.76, 13.40, 2.70, 2.09, 1.62]
>>> MathClass.rsquare(obs, sim) # doctest: +ELLIPSIS
0.7528851650345053...
Returns:
R-square value, or raise exception
"""
if len(obsvalues) != len(simvalues):
raise ValueError("The size of observed and simulated values must be "
"the same for R-square calculation!")
if not isinstance(obsvalues, numpy.ndarray):
obsvalues = numpy.array(obsvalues)
if not isinstance(simvalues, numpy.ndarray):
simvalues = numpy.array(simvalues)
obs_avg = numpy.mean(obsvalues)
pred_avg = numpy.mean(simvalues)
obs_minus_avg_sq = numpy.sum((obsvalues - obs_avg) ** 2)
pred_minus_avg_sq = numpy.sum((simvalues - pred_avg) ** 2)
obs_pred_minus_avgs = numpy.sum((obsvalues - obs_avg) * (simvalues - pred_avg))
# Calculate R-square
yy = obs_minus_avg_sq ** 0.5 * pred_minus_avg_sq ** 0.5
if MathClass.floatequal(yy, 0.):
return 1.
return (obs_pred_minus_avgs / yy) ** 2. | 0.005201 |
def add_exposure(self, layer):
"""Add an exposure layer in the analysis.
:param layer: An exposure layer to be used for the analysis.
:type layer: QgsMapLayer
"""
self._exposures.append(layer)
self._is_ready = False | 0.007576 |
def copy_ccube(ccube, outsrcmap, hpx_order):
"""Copy a counts cube into outsrcmap file
reducing the HEALPix order to hpx_order if needed.
"""
sys.stdout.write(" Copying counts cube from %s to %s\n" % (ccube, outsrcmap))
try:
hdulist_in = fits.open(ccube)
except IOError:
hdulist_in = fits.open("%s.gz" % ccube)
hpx_order_in = hdulist_in[1].header['ORDER']
if hpx_order_in > hpx_order:
hpxmap = HpxMap.create_from_hdulist(hdulist_in)
hpxmap_out = hpxmap.ud_grade(hpx_order, preserve_counts=True)
hpxlist_out = hdulist_in
#hpxlist_out['SKYMAP'] = hpxmap_out.create_image_hdu()
hpxlist_out[1] = hpxmap_out.create_image_hdu()
hpxlist_out[1].name = 'SKYMAP'
hpxlist_out.writeto(outsrcmap)
return hpx_order
else:
os.system('cp %s %s' % (ccube, outsrcmap))
#os.system('cp %s.gz %s.gz' % (ccube, outsrcmap))
#os.system('gunzip -f %s.gz' % (outsrcmap))
return None | 0.005484 |
def sampleset(self, step=0.01, minimal=False):
"""Return ``x`` array that samples the feature.
Parameters
----------
step : float
Distance of first and last points w.r.t. bounding box.
minimal : bool
Only return the minimal points needed to define the box;
i.e., box edges and a point outside on each side.
"""
w1, w2 = self.bounding_box
if self._n_models == 1:
w = self._calc_sampleset(w1, w2, step, minimal)
else:
w = list(map(partial(
self._calc_sampleset, step=step, minimal=minimal), w1, w2))
return np.asarray(w) | 0.002946 |
def eventFilter(self, widget, event):
"""A filter to control the zooming and panning of the figure canvas."""
# ---- Zooming
if event.type() == QEvent.Wheel:
modifiers = QApplication.keyboardModifiers()
if modifiers == Qt.ControlModifier:
if event.angleDelta().y() > 0:
self.zoom_in()
else:
self.zoom_out()
return True
else:
return False
# ---- Panning
# Set ClosedHandCursor:
elif event.type() == QEvent.MouseButtonPress:
if event.button() == Qt.LeftButton:
QApplication.setOverrideCursor(Qt.ClosedHandCursor)
self._ispanning = True
self.xclick = event.globalX()
self.yclick = event.globalY()
# Reset Cursor:
elif event.type() == QEvent.MouseButtonRelease:
QApplication.restoreOverrideCursor()
self._ispanning = False
# Move ScrollBar:
elif event.type() == QEvent.MouseMove:
if self._ispanning:
dx = self.xclick - event.globalX()
self.xclick = event.globalX()
dy = self.yclick - event.globalY()
self.yclick = event.globalY()
scrollBarH = self.horizontalScrollBar()
scrollBarH.setValue(scrollBarH.value() + dx)
scrollBarV = self.verticalScrollBar()
scrollBarV.setValue(scrollBarV.value() + dy)
return QWidget.eventFilter(self, widget, event) | 0.001236 |
def semidetached(b, component, solve_for=None, **kwargs):
"""
Create a constraint to force requiv to be semidetached
"""
comp_ps = b.get_component(component=component)
requiv = comp_ps.get_parameter(qualifier='requiv')
requiv_critical = comp_ps.get_parameter(qualifier='requiv_max')
if solve_for in [requiv, None]:
lhs = requiv
rhs = 1.0*requiv_critical
else:
raise NotImplementedError
return lhs, rhs, {'component': component} | 0.002041 |
def make_query_plan(self, working_keyspace=None, query=None):
"""
Defers to the child policy's
:meth:`.LoadBalancingPolicy.make_query_plan` and filters the results.
Note that this filtering may break desirable properties of the wrapped
policy in some cases. For instance, imagine if you configure this
policy to filter out ``host2``, and to wrap a round-robin policy that
rotates through three hosts in the order ``host1, host2, host3``,
``host2, host3, host1``, ``host3, host1, host2``, repeating. This
policy will yield ``host1, host3``, ``host3, host1``, ``host3, host1``,
disproportionately favoring ``host3``.
"""
child_qp = self._child_policy.make_query_plan(
working_keyspace=working_keyspace, query=query
)
for host in child_qp:
if self.predicate(host):
yield host | 0.002162 |
def _health_check_thread(self):
"""
Health checker thread that pings the service every 30 seconds
:return: None
"""
while self._run_health_checker:
response = self._health_check(Health_pb2.HealthCheckRequest(service='predix-event-hub.grpc.health'))
logging.debug('received health check: ' + str(response))
time.sleep(30)
return | 0.007299 |
def verify_fft_options(opt,parser):
"""Parses the FFT options and verifies that they are
reasonable.
Parameters
----------
opt : object
Result of parsing the CLI with OptionParser, or any object with the
required attributes.
parser : object
OptionParser instance.
"""
if opt.fftw_measure_level not in [0,1,2,3]:
parser.error("{0} is not a valid FFTW measure level.".format(opt.fftw_measure_level))
if opt.fftw_import_system_wisdom and ((opt.fftw_input_float_wisdom_file is not None)
or (opt.fftw_input_double_wisdom_file is not None)):
parser.error("If --fftw-import-system-wisdom is given, then you cannot give"
" either of --fftw-input-float-wisdom-file or --fftw-input-double-wisdom-file")
if opt.fftw_threads_backend is not None:
if opt.fftw_threads_backend not in ['openmp','pthreads','unthreaded']:
parser.error("Invalid threads backend; must be 'openmp', 'pthreads' or 'unthreaded'") | 0.012276 |
def upsert_license_requests(cursor, uuid_, roles):
"""Given a ``uuid`` and list of ``roles`` (user identifiers)
create a license acceptance entry. If ``has_accepted`` is supplied,
it will be used to assign an acceptance value to all listed ``uids``.
"""
if not isinstance(roles, (list, set, tuple,)):
raise TypeError("``roles`` is an invalid type: {}".format(type(roles)))
acceptors = set([x['uid'] for x in roles])
# Acquire a list of existing acceptors.
cursor.execute("""\
SELECT user_id, accepted FROM license_acceptances WHERE uuid = %s""",
(uuid_,))
existing_acceptors = cursor.fetchall()
# Who's not in the existing list?
new_acceptors = acceptors.difference([x[0] for x in existing_acceptors])
# Insert the new licensor acceptors.
if new_acceptors:
args = []
values_fmt = []
for uid in new_acceptors:
has_accepted = [x.get('has_accepted', None)
for x in roles
if uid == x['uid']][0]
args.extend([uuid_, uid, has_accepted])
values_fmt.append("(%s, %s, %s)")
values_fmt = ', '.join(values_fmt)
cursor.execute("""\
INSERT INTO license_acceptances (uuid, user_id, accepted)
VALUES {}""".format(values_fmt), args)
# Update any existing license acceptors
acceptors = set([
(x['uid'], x.get('has_accepted', None),)
for x in roles
# Prevent updating newly inserted records.
if (x['uid'], x.get('has_accepted', None),) not in new_acceptors
])
existing_acceptors = set([
x for x in existing_acceptors
# Prevent updating newly inserted records.
if x[0] not in new_acceptors
])
tobe_updated_acceptors = acceptors.difference(existing_acceptors)
for uid, has_accepted in tobe_updated_acceptors:
cursor.execute("""\
UPDATE license_acceptances SET accepted = %s
WHERE uuid = %s AND user_id = %s""", (has_accepted, uuid_, uid,)) | 0.000493 |
def compatible_staticpath(path):
"""
Try to return a path to static the static files compatible all
the way back to Django 1.2. If anyone has a cleaner or better
way to do this let me know!
"""
if VERSION >= (1, 10):
# Since Django 1.10, forms.Media automatically invoke static
# lazily on the path if it is relative.
return path
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path) | 0.001205 |
def user(self, user):
"""
Sets the user of this WebCredentials.
The name of the account to login to.
:param user: The user of this WebCredentials.
:type: str
"""
if user is None:
raise ValueError("Invalid value for `user`, must not be `None`")
if user is not None and len(user) > 1024:
raise ValueError("Invalid value for `user`, length must be less than or equal to `1024`")
self._user = user | 0.00611 |
def geo2d(self, name, min=None, max=None):
""" Create a 2d index. See:
http://www.mongodb.org/display/DOCS/Geospatial+Indexing
:param name: Name of the indexed column
:param min: minimum value for the index
:param max: minimum value for the index
"""
self.components.append((name, GEO2D))
self.__min = min
self.__max = max
return self | 0.00463 |
def SetParserProp(self, prop, value):
"""Change the parser processing behaviour by changing some of
its internal properties. Note that some properties can only
be changed before any read has been done. """
ret = libxml2mod.xmlTextReaderSetParserProp(self._o, prop, value)
return ret | 0.006116 |
def createResults(config,srcfile,section='source',samples=None):
""" Create an MCMC instance """
source = ugali.analysis.source.Source()
source.load(srcfile,section=section)
loglike = ugali.analysis.loglike.createLoglike(config,source)
results = Results(config,loglike,samples)
if samples is not None:
results.load_samples(samples)
return results | 0.020779 |
def get_port_bindings(binding_key=None):
"""Returns filtered list of port bindings that may be relevant on CVX
This query is a little complex as we need all binding levels for any
binding that has a single managed physnet, but we need to filter bindings
that have no managed physnets. In order to achieve this, we join to the
binding_level_model once to filter bindings with no managed levels,
then a second time to get all levels for the remaining bindings.
The loop at the end is a convenience to associate levels with bindings
as a list. This would ideally be done through the use of an orm.relation,
but due to some sqlalchemy limitations imposed to make OVO work, we can't
add relations to existing models.
"""
session = db.get_reader_session()
with session.begin():
binding_level_model = ml2_models.PortBindingLevel
aliased_blm = aliased(ml2_models.PortBindingLevel)
port_binding_model = ml2_models.PortBinding
dist_binding_model = ml2_models.DistributedPortBinding
bindings = (session.query(port_binding_model, aliased_blm)
.join(binding_level_model,
and_(
port_binding_model.port_id ==
binding_level_model.port_id,
port_binding_model.host ==
binding_level_model.host))
.filter_unnecessary_ports()
.join(aliased_blm,
and_(port_binding_model.port_id ==
aliased_blm.port_id,
port_binding_model.host ==
aliased_blm.host)))
dist_bindings = (session.query(dist_binding_model, aliased_blm)
.join(
binding_level_model,
and_(dist_binding_model.port_id ==
binding_level_model.port_id,
dist_binding_model.host ==
binding_level_model.host))
.filter_unnecessary_ports()
.filter(dist_binding_model.status ==
n_const.PORT_STATUS_ACTIVE)
.join(aliased_blm,
and_(dist_binding_model.port_id ==
aliased_blm.port_id,
dist_binding_model.host ==
aliased_blm.host)))
if binding_key:
port_id = binding_key[0]
if type(binding_key[1]) == tuple:
switch_id = binding_key[1][0]
switch_port = binding_key[1][1]
bindings = bindings.filter(and_(
port_binding_model.port_id == port_id,
port_binding_model.profile.ilike('%%%s%%' % switch_id),
port_binding_model.profile.ilike('%%%s%%' % switch_port)))
dist_bindings = dist_bindings.filter(and_(
dist_binding_model.port_id == port_id,
dist_binding_model.profile.ilike('%%%s%%' % switch_id),
dist_binding_model.profile.ilike('%%%s%%' % switch_port)))
else:
host_id = binding_key[1]
bindings = bindings.filter(and_(
port_binding_model.port_id == port_id,
port_binding_model.host == host_id))
dist_bindings = dist_bindings.filter(and_(
dist_binding_model.port_id == port_id,
dist_binding_model.host == host_id))
binding_levels = collections.defaultdict(list)
for binding, level in bindings.all() + dist_bindings.all():
binding_levels[binding].append(level)
bindings_with_levels = list()
for binding, levels in binding_levels.items():
binding.levels = levels
bindings_with_levels.append(binding)
return bindings_with_levels | 0.000244 |
def map_keys(f, dct):
"""
Calls f with each key of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
"""
f_dict = {}
for k, v in dct.items():
f_dict[f(k)] = v
return f_dict | 0.005263 |
def _getTPDynamicState(self,):
"""
Parameters:
--------------------------------------------
retval: A dict with all the dynamic state variable names as keys and
their values at this instant as values.
"""
tpDynamicState = dict()
for variableName in self._getTPDynamicStateVariableNames():
tpDynamicState[variableName] = copy.deepcopy(self.__dict__[variableName])
return tpDynamicState | 0.004484 |
def create_property_nod_worksheets(workbook, data_list, result_info_key, identifier_keys):
"""Creates two worksheets out of the property/nod data because the data
doesn't come flat enough to make sense on one sheet.
Args:
workbook: the main workbook to add the sheets to
data_list: the main list of data
result_info_key: the key in api_data dicts that contains the data results
Should always be 'address_info' for property/nod
identifier_keys: the list of keys used as requested identifiers
(address, zipcode, city, state, etc)
"""
nod_details_list = []
nod_default_history_list = []
for prop_data in data_list:
nod_data = prop_data['property/nod']
if nod_data is None:
nod_data = {}
default_history_data = nod_data.pop('default_history', [])
_set_identifier_fields(nod_data, prop_data, result_info_key, identifier_keys)
nod_details_list.append(nod_data)
for item in default_history_data:
_set_identifier_fields(item, prop_data, result_info_key, identifier_keys)
nod_default_history_list.append(item)
worksheet = workbook.create_sheet(title='NOD Details')
write_data(worksheet, nod_details_list)
worksheet = workbook.create_sheet(title='NOD Default History')
write_data(worksheet, nod_default_history_list) | 0.003439 |
def rnd_datetime_array(self,
size, start=datetime(1970, 1, 1), end=datetime.now()):
"""Array or Matrix of random datetime generator.
"""
if isinstance(start, string_types):
start = self.str2datetime(start)
if isinstance(end, str):
end = self.str2datetime(end)
if start > end:
raise ValueError("start time has to be earlier than end time")
return self.randn(size, self._rnd_datetime, start, end) | 0.007905 |
def handleError(self, record):
"""
Handles any errors raised during the :meth:`emit` method. Will only try to pass exceptions to fallback notifier
(if defined) in case the exception is a sub-class of :exc:`~notifiers.exceptions.NotifierException`
:param record: :class:`logging.LogRecord`
"""
if logging.raiseExceptions:
t, v, tb = sys.exc_info()
if issubclass(t, NotifierException) and self.fallback:
msg = f"Could not log msg to provider '{self.provider.name}'!\n{v}"
self.fallback_defaults["message"] = msg
self.fallback.notify(**self.fallback_defaults)
else:
super().handleError(record) | 0.006766 |
def get_input_list(self):
"""
Description:
Get input list
Returns an ordered list of all available input keys and names
"""
inputs = [' '] * len(self.command['input'])
for key in self.command['input']:
inputs[self.command['input'][key]['order']] = {"key":key, "name":self.command['input'][key]['name']}
return inputs | 0.012438 |
def dispatch_queue(self):
"""
Dispatch any queued requests.
Called by the debugger when it stops.
"""
self.queue_lock.acquire()
q = list(self.queue)
self.queue = []
self.queue_lock.release()
log.debug("Dispatching requests: {}".format(q))
for req in q:
req.response = self.dispatch_request(req)
for req in q:
req.signal() | 0.004608 |
def gabc(key, value, fmt, meta): # pylint:disable=I0011,W0613
"""Handle gabc file inclusion and gabc code block."""
if key == 'Code':
[[ident, classes, kvs], contents] = value # pylint:disable=I0011,W0612
kvs = {key: value for key, value in kvs}
if "gabc" in classes:
if fmt == "latex":
if ident == "":
label = ""
else:
label = '\\label{' + ident + '}'
return latex(
"\n\\smallskip\n{%\n" +
latexsnippet('\\gregorioscore{' + contents + '}', kvs) +
"%\n}" +
label
)
else:
infile = contents + (
'.gabc' if '.gabc' not in contents else ''
)
with open(infile, 'r') as doc:
code = doc.read().split('%%\n')[1]
return [Image(['', [], []], [], [
png(
contents,
latexsnippet('\\gregorioscore', kvs)
),
""
])]
elif key == 'CodeBlock':
[[ident, classes, kvs], contents] = value
kvs = {key: value for key, value in kvs}
if "gabc" in classes:
if fmt == "latex":
if ident == "":
label = ""
else:
label = '\\label{' + ident + '}'
return [latexblock(
"\n\\smallskip\n{%\n" +
latexsnippet('\\gabcsnippet{' + contents + '}', kvs) +
"%\n}" +
label
)]
else:
return Para([Image(['', [], []], [], [
png(
contents,
latexsnippet('\\gabcsnippet', kvs)
),
""
])]) | 0.000499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.