text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def getblock(self, hash: str) -> dict:
'''Returns information about the block with the given hash.'''
return cast(dict, self.api_fetch('getblock?hash=' + hash)) | 0.011299 |
def main():
"""Writes out newsfile if significant version bump"""
last_known = '0'
if os.path.isfile(metafile):
with open(metafile) as fh:
last_known = fh.read()
import mbed_cloud
current = mbed_cloud.__version__
# how significant a change in version scheme should trigger a new changelog entry
# (api major, api minor, sdk major, sdk minor, sdk patch)
sigfigs = 4
current_version = LooseVersion(current).version
last_known_version = LooseVersion(last_known).version
should_towncrier = current_version[:sigfigs] != last_known_version[:sigfigs]
print('%s -- %s :: current vs previous changelog build' % (current, last_known))
if should_towncrier:
print('%s >> %s :: running changelog build' % (current, last_known))
subprocess.check_call(
['towncrier', '--yes'],
cwd=os.path.join(PROJECT_ROOT, 'docs', 'changelog')
)
with open(metafile, 'w') as fh:
fh.write(current) | 0.003964 |
def set_led(self, colorcode):
""" Set the LED Color of Herkulex
Args:
colorcode (int): The code for colors
(0x00-OFF
0x02-BLUE
0x03-CYAN
0x04-RED
0x05-ORANGE
0x06-VIOLET
0x07-WHITE
"""
data = []
data.append(0x0A)
data.append(self.servoid)
data.append(RAM_WRITE_REQ)
data.append(LED_CONTROL_RAM)
data.append(0x01)
data.append(colorcode)
send_data(data) | 0.00458 |
def collapseBefore( self, handle ):
"""
Collapses the splitter before the inputed handle.
:param handle | <XSplitterHandle>
"""
self.setUpdatesEnabled(False)
# collapse all items after the current handle
if ( handle.isCollapsed() ):
self.setSizes(handle.restoreSizes())
# collapse all items before the current handle
found = False
sizes = self.sizes()
handle.storeSizes(sizes)
for c in range(self.count()):
if ( self.handle(c) == handle ):
break
sizes[c] = 0
self.setSizes(sizes)
self.setUpdatesEnabled(True) | 0.020053 |
def read_envfile(path=None, **overrides):
"""
Read a .env file (line delimited KEY=VALUE) into os.environ.
If not given a path to the file, recurses up the directory tree until
found.
Uses code from Honcho (github.com/nickstenning/honcho) for parsing the
file.
"""
if path is None:
frame = inspect.currentframe().f_back
caller_dir = os.path.dirname(frame.f_code.co_filename)
path = os.path.join(os.path.abspath(caller_dir), '.env')
try:
with open(path, 'r') as f:
content = f.read()
except getattr(__builtins__, 'FileNotFoundError', IOError):
logger.debug('envfile not found at %s, looking in parent dir.',
path)
filedir, filename = os.path.split(path)
pardir = os.path.abspath(os.path.join(filedir, os.pardir))
path = os.path.join(pardir, filename)
if filedir != pardir:
Env.read_envfile(path, **overrides)
else:
# Reached top level directory.
warnings.warn('Could not any envfile.')
return
logger.debug('Reading environment variables from: %s', path)
for line in content.splitlines():
tokens = list(shlex.shlex(line, posix=True))
# parses the assignment statement
if len(tokens) < 3:
continue
name, op = tokens[:2]
value = ''.join(tokens[2:])
if op != '=':
continue
if not re.match(r'[A-Za-z_][A-Za-z_0-9]*', name):
continue
value = value.replace(r'\n', '\n').replace(r'\t', '\t')
os.environ.setdefault(name, value)
for name, value in overrides.items():
os.environ.setdefault(name, value) | 0.001059 |
def obj_deref(ref):
"""Returns the object identified by `ref`"""
from indico_livesync.models.queue import EntryType
if ref['type'] == EntryType.category:
return Category.get_one(ref['category_id'])
elif ref['type'] == EntryType.event:
return Event.get_one(ref['event_id'])
elif ref['type'] == EntryType.session:
return Session.get_one(ref['session_id'])
elif ref['type'] == EntryType.contribution:
return Contribution.get_one(ref['contrib_id'])
elif ref['type'] == EntryType.subcontribution:
return SubContribution.get_one(ref['subcontrib_id'])
else:
raise ValueError('Unexpected object type: {}'.format(ref['type'])) | 0.001435 |
def _readFile(self, fileName):
"""
Returns the bytes of the file.
"""
with open(fileName, 'rb') as binFile:
b = binFile.read()
return to_ubyte_array(b) | 0.009662 |
def _generate_command(self, func, name=None, **kwargs):
"""Generates a command parser for given func.
:param func: func to generate related command parser
:param type: function
:param name: command name
:param type: str
:param **kwargs: keyword arguments those passed through to
:py:class:``argparse.ArgumentParser.add_parser``
:param type: dict
"""
func_pointer = name or func.__name__
storm_config = get_storm_config()
aliases, additional_kwarg = None, None
if 'aliases' in storm_config:
for command, alias_list in \
six.iteritems(storm_config.get("aliases")):
if func_pointer == command:
aliases = alias_list
break
func_help = func.__doc__ and func.__doc__.strip()
subparser = self.subparsers.add_parser(name or func.__name__,
aliases=aliases,
help=func_help)
spec = inspect.getargspec(func)
opts = reversed(list(izip_longest(reversed(spec.args or []),
reversed(spec.defaults or []),
fillvalue=self._POSITIONAL())))
for k, v in opts:
argopts = getattr(func, 'argopts', {})
args, kwargs = argopts.get(k, ([], {}))
args = list(args)
is_positional = isinstance(v, self._POSITIONAL)
options = [arg for arg in args if arg.startswith('-')]
if isinstance(v, list):
kwargs.update({
'action': 'append',
})
if is_positional:
if options:
args = options
kwargs.update({'required': True, 'dest': k})
else:
args = [k]
else:
args = options or ['--%s' % k]
kwargs.update({'default': v, 'dest': k})
arg = subparser.add_argument(*args, **kwargs)
subparser.set_defaults(**{self._COMMAND_FLAG: func})
return func | 0.000897 |
def _process_coref_span_annotations_for_word(label: str,
word_index: int,
clusters: DefaultDict[int, List[Tuple[int, int]]],
coref_stacks: DefaultDict[int, List[int]]) -> None:
"""
For a given coref label, add it to a currently open span(s), complete a span(s) or
ignore it, if it is outside of all spans. This method mutates the clusters and coref_stacks
dictionaries.
Parameters
----------
label : ``str``
The coref label for this word.
word_index : ``int``
The word index into the sentence.
clusters : ``DefaultDict[int, List[Tuple[int, int]]]``
A dictionary mapping cluster ids to lists of inclusive spans into the
sentence.
coref_stacks: ``DefaultDict[int, List[int]]``
Stacks for each cluster id to hold the start indices of active spans (spans
which we are inside of when processing a given word). Spans with the same id
can be nested, which is why we collect these opening spans on a stack, e.g:
[Greg, the baker who referred to [himself]_ID1 as 'the bread man']_ID1
"""
if label != "-":
for segment in label.split("|"):
# The conll representation of coref spans allows spans to
# overlap. If spans end or begin at the same word, they are
# separated by a "|".
if segment[0] == "(":
# The span begins at this word.
if segment[-1] == ")":
# The span begins and ends at this word (single word span).
cluster_id = int(segment[1:-1])
clusters[cluster_id].append((word_index, word_index))
else:
# The span is starting, so we record the index of the word.
cluster_id = int(segment[1:])
coref_stacks[cluster_id].append(word_index)
else:
# The span for this id is ending, but didn't start at this word.
# Retrieve the start index from the document state and
# add the span to the clusters for this id.
cluster_id = int(segment[:-1])
start = coref_stacks[cluster_id].pop()
clusters[cluster_id].append((start, word_index)) | 0.006597 |
def handle_command(self, master, mpstate, args):
'''handle parameter commands'''
param_wildcard = "*"
usage="Usage: param <fetch|save|set|show|load|preload|forceload|diff|download|help>"
if len(args) < 1:
print(usage)
return
if args[0] == "fetch":
if len(args) == 1:
master.param_fetch_all()
self.mav_param_set = set()
print("Requested parameter list")
else:
found = False
pname = args[1].upper()
for p in self.mav_param.keys():
if fnmatch.fnmatch(p, pname):
master.param_fetch_one(p)
if p not in self.fetch_one:
self.fetch_one[p] = 0
self.fetch_one[p] += 1
found = True
print("Requested parameter %s" % p)
if not found and args[1].find('*') == -1:
master.param_fetch_one(pname)
if pname not in self.fetch_one:
self.fetch_one[pname] = 0
self.fetch_one[pname] += 1
print("Requested parameter %s" % pname)
elif args[0] == "save":
if len(args) < 2:
print("usage: param save <filename> [wildcard]")
return
if len(args) > 2:
param_wildcard = args[2]
else:
param_wildcard = "*"
self.mav_param.save(args[1], param_wildcard, verbose=True)
elif args[0] == "diff":
wildcard = '*'
if len(args) < 2 or args[1].find('*') != -1:
if self.vehicle_name is None:
print("Unknown vehicle type")
return
filename = mp_util.dot_mavproxy("%s-defaults.parm" % self.vehicle_name)
if not os.path.exists(filename):
print("Please run 'param download' first (vehicle_name=%s)" % self.vehicle_name)
return
if len(args) >= 2:
wildcard = args[1]
else:
filename = args[1]
if len(args) == 3:
wildcard = args[2]
print("%-16.16s %12.12s %12.12s" % ('Parameter', 'Defaults', 'Current'))
self.mav_param.diff(filename, wildcard=wildcard)
elif args[0] == "set":
if len(args) < 2:
print("Usage: param set PARMNAME VALUE")
return
if len(args) == 2:
self.mav_param.show(args[1])
return
param = args[1]
value = args[2]
if value.startswith('0x'):
value = int(value, base=16)
if not param.upper() in self.mav_param:
print("Unable to find parameter '%s'" % param)
return
uname = param.upper()
ptype = None
if uname in self.param_types:
ptype = self.param_types[uname]
self.mav_param.mavset(master, uname, value, retries=3, parm_type=ptype)
if (param.upper() == "WP_LOITER_RAD" or param.upper() == "LAND_BREAK_PATH"):
#need to redraw rally points
mpstate.module('rally').rallyloader.last_change = time.time()
#need to redraw loiter points
mpstate.module('wp').wploader.last_change = time.time()
elif args[0] == "load":
if len(args) < 2:
print("Usage: param load <filename> [wildcard]")
return
if len(args) > 2:
param_wildcard = args[2]
else:
param_wildcard = "*"
self.mav_param.load(args[1], param_wildcard, master)
elif args[0] == "preload":
if len(args) < 2:
print("Usage: param preload <filename>")
return
self.mav_param.load(args[1])
elif args[0] == "forceload":
if len(args) < 2:
print("Usage: param forceload <filename> [wildcard]")
return
if len(args) > 2:
param_wildcard = args[2]
else:
param_wildcard = "*"
self.mav_param.load(args[1], param_wildcard, master, check=False)
elif args[0] == "download":
self.param_help_download()
elif args[0] == "apropos":
self.param_apropos(args[1:])
elif args[0] == "help":
self.param_help(args[1:])
elif args[0] == "set_xml_filepath":
self.param_set_xml_filepath(args[1:])
elif args[0] == "show":
if len(args) > 1:
pattern = args[1]
else:
pattern = "*"
self.mav_param.show(pattern)
elif args[0] == "status":
print("Have %u/%u params" % (len(self.mav_param_set), self.mav_param_count))
else:
print(usage) | 0.002533 |
def child_allocation(self):
""" The sum of all child asset classes' allocations """
sum = Decimal(0)
if self.classes:
for child in self.classes:
sum += child.child_allocation
else:
# This is not a branch but a leaf. Return own allocation.
sum = self.allocation
return sum | 0.005495 |
def parseSOAPMessage(data, ipAddr):
"parse raw XML data string, return a (minidom) xml document"
try:
dom = minidom.parseString(data)
except Exception:
#print('Failed to parse message from %s\n"%s": %s' % (ipAddr, data, ex), file=sys.stderr)
return None
if dom.getElementsByTagNameNS(NS_S, "Fault"):
#print('Fault received from %s:' % (ipAddr, data), file=sys.stderr)
return None
soapAction = dom.getElementsByTagNameNS(NS_A, "Action")[0].firstChild.data.strip()
if soapAction == ACTION_PROBE:
return parseProbeMessage(dom)
elif soapAction == ACTION_PROBE_MATCH:
return parseProbeMatchMessage(dom)
elif soapAction == ACTION_RESOLVE:
return parseResolveMessage(dom)
elif soapAction == ACTION_RESOLVE_MATCH:
return parseResolveMatchMessage(dom)
elif soapAction == ACTION_BYE:
return parseByeMessage(dom)
elif soapAction == ACTION_HELLO:
return parseHelloMessage(dom) | 0.005005 |
def get_data(self, smoothed=True, masked=True, safe_copy=False):
"""Get the data in the image.
If save_copy is True, will perform a deep copy of the data and return it.
Parameters
----------
smoothed: (optional) bool
If True and self._smooth_fwhm > 0 will smooth the data before masking.
masked: (optional) bool
If True and self.has_mask will return the masked data, the plain data otherwise.
safe_copy: (optional) bool
Returns
-------
np.ndarray
"""
if not safe_copy and smoothed == self._is_data_smooth and masked == self._is_data_masked:
if self.has_data_loaded() and self._caching == 'fill':
return self.get_data()
if safe_copy:
data = get_data(self.img)
else:
data = self.img.get_data(caching=self._caching)
is_smoothed = False
if smoothed and self._smooth_fwhm > 0:
try:
data = _smooth_data_array(data, self.get_affine(), self._smooth_fwhm, copy=False)
except ValueError as ve:
raise ValueError('Error smoothing image {} with a {}mm FWHM '
'kernel.'.format(self.img, self._smooth_fwhm)) from ve
else:
is_smoothed = True
is_data_masked = False
if masked and self.has_mask():
try:
data = self.unmask(self._mask_data(data)[0])
except:
raise
else:
is_data_masked = True
if not safe_copy:
self._is_data_masked = is_data_masked
self._is_data_smooth = is_smoothed
return data | 0.005158 |
def get_server_api(token=None, site=None, cls=None, config=None, **kwargs):
"""
Get the anaconda server api class
"""
if not cls:
from binstar_client import Binstar
cls = Binstar
config = config if config is not None else get_config(site=site)
url = config.get('url', DEFAULT_URL)
logger.info("Using Anaconda API: %s", url)
if token:
logger.debug("Using token from command line args")
elif 'BINSTAR_API_TOKEN' in os.environ:
logger.debug("Using token from environment variable BINSTAR_API_TOKEN")
token = os.environ['BINSTAR_API_TOKEN']
elif 'ANACONDA_API_TOKEN' in os.environ:
logger.debug("Using token from environment variable ANACONDA_API_TOKEN")
token = os.environ['ANACONDA_API_TOKEN']
else:
token = load_token(url)
verify = config.get('ssl_verify', config.get('verify_ssl', True))
return cls(token, domain=url, verify=verify, **kwargs) | 0.002075 |
def query(self, query, param=None):
""" Perform a SQL based query
This will abort on a failure to communicate with
the database.
:query: string query
:params: parameters for the query
:return: RecordList from psycopg2
"""
with self.conn.cursor() as curs:
print 'XXX QUERY', curs.mogrify(query, param)
try:
curs.execute(query, param)
except BaseException as exc:
msg = 'query: {}, param: {}, exc: {}'.format(query, param, exc)
if hasattr(exc, 'pgcode'):
msg = '{}, exc code: {}'.format(msg, exc.pgcode)
print msg
handle_exc(exc)
result = curs.fetchall()
return result | 0.002522 |
def VerifyScripts(verifiable):
"""
Verify the scripts of the provided `verifiable` object.
Args:
verifiable (neo.IO.Mixins.VerifiableMixin):
Returns:
bool: True if verification is successful. False otherwise.
"""
try:
hashes = verifiable.GetScriptHashesForVerifying()
except Exception as e:
logger.debug("couldn't get script hashes %s " % e)
return False
if len(hashes) != len(verifiable.Scripts):
logger.debug(f"hash - verification script length mismatch ({len(hashes)}/{len(verifiable.Scripts)})")
return False
blockchain = GetBlockchain()
for i in range(0, len(hashes)):
verification = verifiable.Scripts[i].VerificationScript
if len(verification) == 0:
sb = ScriptBuilder()
sb.EmitAppCall(hashes[i].Data)
verification = sb.ms.getvalue()
else:
verification_hash = Crypto.ToScriptHash(verification, unhex=False)
if hashes[i] != verification_hash:
logger.debug(f"hash {hashes[i]} does not match verification hash {verification_hash}")
return False
state_reader = GetStateReader()
script_table = CachedScriptTable(DBCollection(blockchain._db, DBPrefix.ST_Contract, ContractState))
engine = ApplicationEngine(TriggerType.Verification, verifiable, script_table, state_reader, Fixed8.Zero())
engine.LoadScript(verification)
invocation = verifiable.Scripts[i].InvocationScript
engine.LoadScript(invocation)
try:
success = engine.Execute()
state_reader.ExecutionCompleted(engine, success)
except Exception as e:
state_reader.ExecutionCompleted(engine, False, e)
if engine.ResultStack.Count != 1 or not engine.ResultStack.Pop().GetBoolean():
Helper.EmitServiceEvents(state_reader)
if engine.ResultStack.Count > 0:
logger.debug(f"Result stack failure! Count: {engine.ResultStack.Count} bool value: {engine.ResultStack.Pop().GetBoolean()}")
else:
logger.debug(f"Result stack failure! Count: {engine.ResultStack.Count}")
return False
Helper.EmitServiceEvents(state_reader)
return True | 0.004021 |
def accel_reset_terminal(self, *args):
# TODO KEYBINDINGS ONLY
"""Callback to reset and clean the terminal"""
HidePrevention(self.window).prevent()
current_term = self.get_notebook().get_current_terminal()
current_term.reset(True, True)
HidePrevention(self.window).allow()
return True | 0.008824 |
def to_meshpoint(meshcode, lat_multiplier, lon_multiplier):
"""地域メッシュコードから緯度経度を算出する。
下記のメッシュに対応している。
1次(80km四方):1
40倍(40km四方):40000
20倍(20km四方):20000
16倍(16km四方):16000
2次(10km四方):2
8倍(8km四方):8000
5倍(5km四方):5000
4倍(4km四方):4000
2.5倍(2.5km四方):2500
2倍(2km四方):2000
3次(1km四方):3
4次(500m四方):4
5次(250m四方):5
6次(125m四方):6
Args:
meshcode: 指定次の地域メッシュコード
lat_multiplier: 当該メッシュの基準点(南西端)から、緯度座標上の点の位置を当該メッシュの単位緯度の倍数で指定
lon_multiplier: 当該メッシュの基準点(南西端)から、経度座標上の点の位置を当該メッシュの単位経度の倍数で指定
Return:
lat: 世界測地系の緯度(度単位)
lon: 世界測地系の経度(度単位)
"""
def mesh_cord(func_higher_cord, func_unit_cord, func_multiplier):
return func_higher_cord() + func_unit_cord() * func_multiplier()
lat_multiplier_lv = lambda: lat_multiplier
lon_multiplier_lv = lambda: lon_multiplier
lat_multiplier_lv1 = _functools.partial(
lambda meshcode: int(meshcode[0:2]), meshcode=meshcode)
lon_multiplier_lv1 = _functools.partial(
lambda meshcode: int(meshcode[2:4]), meshcode=meshcode)
lat_multiplier_40000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_40000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[4:5])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_20000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_20000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[5:6])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_16000 = _functools.partial(
lambda meshcode: int(meshcode[4:5])/2, meshcode=meshcode)
lon_multiplier_16000 = _functools.partial(
lambda meshcode: int(meshcode[5:6])/2, meshcode=meshcode)
lat_multiplier_lv2 = _functools.partial(
lambda meshcode: int(meshcode[4:5]), meshcode=meshcode)
lon_multiplier_lv2 = _functools.partial(
lambda meshcode: int(meshcode[5:6]), meshcode=meshcode)
lat_multiplier_8000 = _functools.partial(
lambda meshcode: int(meshcode[4:5]), meshcode=meshcode)
lon_multiplier_8000 = _functools.partial(
lambda meshcode: int(meshcode[5:6]), meshcode=meshcode)
lat_multiplier_5000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_5000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[6:7])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_4000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_4000 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_2500 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_2500 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[7:8])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_2000 = _functools.partial(
lambda meshcode: int(meshcode[6:7])/2, meshcode=meshcode)
lon_multiplier_2000 = _functools.partial(
lambda meshcode: int(meshcode[7:8])/2, meshcode=meshcode)
lat_multiplier_lv3 = _functools.partial(
lambda meshcode: int(meshcode[6:7]), meshcode=meshcode)
lon_multiplier_lv3 = _functools.partial(
lambda meshcode: int(meshcode[7:8]), meshcode=meshcode)
lat_multiplier_lv4 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_lv4 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[8:9])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_lv5 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_lv5 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[9:10])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
lat_multiplier_lv6 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[0:1]), meshcode=meshcode)
lon_multiplier_lv6 = _functools.partial(
lambda meshcode: int(bin(int(meshcode[10:11])-1)[2:].zfill(2)[1:2]), meshcode=meshcode)
mesh_lv1_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=lambda: 0,
func_unit_cord=_unit_lat_lv1,
func_multiplier=lat_multiplier_lv1)
mesh_lv1_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=lambda: 100,
func_unit_cord=_unit_lon_lv1,
func_multiplier=lon_multiplier_lv1)
mesh_40000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_40000,
func_multiplier=lat_multiplier_40000)
mesh_40000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_40000,
func_multiplier=lon_multiplier_40000)
mesh_20000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lat,
func_unit_cord=_unit_lat_20000,
func_multiplier=lat_multiplier_20000)
mesh_20000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lon,
func_unit_cord=_unit_lon_20000,
func_multiplier=lon_multiplier_20000)
mesh_16000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_16000,
func_multiplier=lat_multiplier_16000)
mesh_16000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_16000,
func_multiplier=lon_multiplier_16000)
mesh_lv2_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_lv2,
func_multiplier=lat_multiplier_lv2)
mesh_lv2_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_lv2,
func_multiplier=lon_multiplier_lv2)
mesh_8000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_8000,
func_multiplier=lat_multiplier_8000)
mesh_8000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_8000,
func_multiplier=lon_multiplier_8000)
mesh_5000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_5000,
func_multiplier=lat_multiplier_5000)
mesh_5000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_5000,
func_multiplier=lon_multiplier_5000)
mesh_4000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lat,
func_unit_cord=_unit_lat_4000,
func_multiplier=lat_multiplier_4000)
mesh_4000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lon,
func_unit_cord=_unit_lon_4000,
func_multiplier=lon_multiplier_4000)
mesh_2500_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lat,
func_unit_cord=_unit_lat_2500,
func_multiplier=lat_multiplier_2500)
mesh_2500_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lon,
func_unit_cord=_unit_lon_2500,
func_multiplier=lon_multiplier_2500)
mesh_2000_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_2000,
func_multiplier=lat_multiplier_2000)
mesh_2000_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_2000,
func_multiplier=lon_multiplier_2000)
mesh_lv3_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_lv3,
func_multiplier=lat_multiplier_lv3)
mesh_lv3_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_lv3,
func_multiplier=lon_multiplier_lv3)
mesh_lv4_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lat,
func_unit_cord=_unit_lat_lv4,
func_multiplier=lat_multiplier_lv4)
mesh_lv4_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lon,
func_unit_cord=_unit_lon_lv4,
func_multiplier=lon_multiplier_lv4)
mesh_lv5_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lat,
func_unit_cord=_unit_lat_lv5,
func_multiplier=lat_multiplier_lv5)
mesh_lv5_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lon,
func_unit_cord=_unit_lon_lv5,
func_multiplier=lon_multiplier_lv5)
mesh_lv6_default_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lat,
func_unit_cord=_unit_lat_lv6,
func_multiplier=lat_multiplier_lv6)
mesh_lv6_default_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lon,
func_unit_cord=_unit_lon_lv6,
func_multiplier=lon_multiplier_lv6)
mesh_lv1_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lat,
func_unit_cord=_unit_lat_lv1,
func_multiplier=lat_multiplier_lv)
mesh_lv1_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv1_default_lon,
func_unit_cord=_unit_lon_lv1,
func_multiplier=lon_multiplier_lv)
mesh_40000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lat,
func_unit_cord=_unit_lat_40000,
func_multiplier=lat_multiplier_lv)
mesh_40000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_40000_default_lon,
func_unit_cord=_unit_lon_40000,
func_multiplier=lon_multiplier_lv)
mesh_20000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_20000_default_lat,
func_unit_cord=_unit_lat_20000,
func_multiplier=lat_multiplier_lv)
mesh_20000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_20000_default_lon,
func_unit_cord=_unit_lon_20000,
func_multiplier=lon_multiplier_lv)
mesh_16000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_16000_default_lat,
func_unit_cord=_unit_lat_16000,
func_multiplier=lat_multiplier_lv)
mesh_16000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_16000_default_lon,
func_unit_cord=_unit_lon_16000,
func_multiplier=lon_multiplier_lv)
mesh_lv2_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lat,
func_unit_cord=_unit_lat_lv2,
func_multiplier=lat_multiplier_lv)
mesh_lv2_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv2_default_lon,
func_unit_cord=_unit_lon_lv2,
func_multiplier=lon_multiplier_lv)
mesh_8000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lat,
func_unit_cord=_unit_lat_8000,
func_multiplier=lat_multiplier_lv)
mesh_8000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_8000_default_lon,
func_unit_cord=_unit_lon_8000,
func_multiplier=lon_multiplier_lv)
mesh_5000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lat,
func_unit_cord=_unit_lat_5000,
func_multiplier=lat_multiplier_lv)
mesh_5000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_5000_default_lon,
func_unit_cord=_unit_lon_5000,
func_multiplier=lon_multiplier_lv)
mesh_4000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_4000_default_lat,
func_unit_cord=_unit_lat_4000,
func_multiplier=lat_multiplier_lv)
mesh_4000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_4000_default_lon,
func_unit_cord=_unit_lon_4000,
func_multiplier=lon_multiplier_lv)
mesh_2500_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2500_default_lat,
func_unit_cord=_unit_lat_2500,
func_multiplier=lat_multiplier_lv)
mesh_2500_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2500_default_lon,
func_unit_cord=_unit_lon_2500,
func_multiplier=lon_multiplier_lv)
mesh_2000_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2000_default_lat,
func_unit_cord=_unit_lat_2000,
func_multiplier=lat_multiplier_lv)
mesh_2000_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_2000_default_lon,
func_unit_cord=_unit_lon_2000,
func_multiplier=lon_multiplier_lv)
mesh_lv3_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lat,
func_unit_cord=_unit_lat_lv3,
func_multiplier=lat_multiplier_lv)
mesh_lv3_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv3_default_lon,
func_unit_cord=_unit_lon_lv3,
func_multiplier=lon_multiplier_lv)
mesh_lv4_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lat,
func_unit_cord=_unit_lat_lv4,
func_multiplier=lat_multiplier_lv)
mesh_lv4_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv4_default_lon,
func_unit_cord=_unit_lon_lv4,
func_multiplier=lon_multiplier_lv)
mesh_lv5_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lat,
func_unit_cord=_unit_lat_lv5,
func_multiplier=lat_multiplier_lv)
mesh_lv5_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv5_default_lon,
func_unit_cord=_unit_lon_lv5,
func_multiplier=lon_multiplier_lv)
mesh_lv6_lat = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv6_default_lat,
func_unit_cord=_unit_lat_lv6,
func_multiplier=lat_multiplier_lv)
mesh_lv6_lon = _functools.partial(
mesh_cord,
func_higher_cord=mesh_lv6_default_lon,
func_unit_cord=_unit_lon_lv6,
func_multiplier=lon_multiplier_lv)
level = to_meshlevel(meshcode)
if level == 1:
return mesh_lv1_lat(), mesh_lv1_lon()
if level == 40000:
return mesh_40000_lat(), mesh_40000_lon()
if level == 20000:
return mesh_20000_lat(), mesh_20000_lon()
if level == 16000:
return mesh_16000_lat(), mesh_16000_lon()
if level == 2:
return mesh_lv2_lat(), mesh_lv2_lon()
if level == 8000:
return mesh_8000_lat(), mesh_8000_lon()
if level == 5000:
return mesh_5000_lat(), mesh_5000_lon()
if level == 4000:
return mesh_4000_lat(), mesh_4000_lon()
if level == 2500:
return mesh_2500_lat(), mesh_2500_lon()
if level == 2000:
return mesh_2000_lat(), mesh_2000_lon()
if level == 3:
return mesh_lv3_lat(), mesh_lv3_lon()
if level == 4:
return mesh_lv4_lat(), mesh_lv4_lon()
if level == 5:
return mesh_lv5_lat(), mesh_lv5_lon()
if level == 6:
return mesh_lv6_lat(), mesh_lv6_lon()
raise ValueError("the level is unsupported.") | 0.001148 |
def from_string(cls, cl_function, dependencies=(), nmr_constraints=None):
"""Parse the given CL function into a SimpleCLFunction object.
Args:
cl_function (str): the function we wish to turn into an object
dependencies (list or tuple of CLLibrary): The list of CL libraries this function depends on
Returns:
SimpleCLFunction: the CL data type for this parameter declaration
"""
return_type, function_name, parameter_list, body = split_cl_function(cl_function)
return SimpleConstraintFunction(return_type, function_name, parameter_list, body, dependencies=dependencies,
nmr_constraints=nmr_constraints) | 0.006878 |
def parse_question_container(html_question):
"""Parse the question info container of a given HTML question.
The method parses the information available in the question information
container. The container can have up to 2 elements: the first one
contains the information related with the user who generated the question
and the date (if any). The second one contains the date of the updated,
and the user who updated it (if not the same who generated the question).
:param html_question: raw HTML question element
:returns: an object with the parsed information
"""
container_info = {}
bs_question = bs4.BeautifulSoup(html_question, "html.parser")
question = AskbotParser._find_question_container(bs_question)
container = question.select("div.post-update-info")
created = container[0]
container_info['author'] = AskbotParser.parse_user_info(created)
try:
container[1]
except IndexError:
pass
else:
updated = container[1]
if AskbotParser.parse_user_info(updated):
container_info['updated_by'] = AskbotParser.parse_user_info(updated)
return container_info | 0.003928 |
def iter_relation(self, relation, **kwargs):
"""
Generic method to iterate relation from any resource.
Query the client with the object's known parameters
and try to retrieve the provided relation type. This
is not meant to be used directly by a client, it's more
a helper method for the child objects.
"""
# pylint: disable=E1101
index = 0
while 1:
items = self.get_relation(relation, index=index, **kwargs)
for item in items:
yield (item)
if len(items) == 0:
break
index += len(items) | 0.003086 |
def send_success_response(self, msgid, methodname):
"""Send a CIM-XML response message back to the WBEM server that
indicates success."""
resp_xml = cim_xml.CIM(
cim_xml.MESSAGE(
cim_xml.SIMPLEEXPRSP(
cim_xml.EXPMETHODRESPONSE(
methodname),
), # noqa: E123
msgid, IMPLEMENTED_PROTOCOL_VERSION),
IMPLEMENTED_CIM_VERSION, IMPLEMENTED_DTD_VERSION)
resp_body = '<?xml version="1.0" encoding="utf-8" ?>\n' + \
resp_xml.toxml()
if isinstance(resp_body, six.text_type):
resp_body = resp_body.encode("utf-8")
http_code = 200
self.send_response(http_code, http_client.responses.get(http_code, ''))
self.send_header("Content-Type", "text/html")
self.send_header("Content-Length", str(len(resp_body)))
self.send_header("CIMExport", "MethodResponse")
self.end_headers()
self.wfile.write(resp_body) | 0.001929 |
def locate_unlinked(gn, size=100, step=20, threshold=.1, blen=None):
"""Locate variants in approximate linkage equilibrium, where r**2 is
below the given `threshold`.
Parameters
----------
gn : array_like, int8, shape (n_variants, n_samples)
Diploid genotypes at biallelic variants, coded as the number of
alternate alleles per call (i.e., 0 = hom ref, 1 = het, 2 = hom alt).
size : int
Window size (number of variants).
step : int
Number of variants to advance to the next window.
threshold : float
Maximum value of r**2 to include variants.
blen : int, optional
Block length to use for chunked computation.
Returns
-------
loc : ndarray, bool, shape (n_variants)
Boolean array where True items locate variants in approximate
linkage equilibrium.
Notes
-----
The value of r**2 between each pair of variants is calculated using the
method of Rogers and Huff (2008).
"""
# check inputs
if not hasattr(gn, 'shape') or not hasattr(gn, 'dtype'):
gn = np.asarray(gn, dtype='i1')
if gn.ndim != 2:
raise ValueError('gn must have two dimensions')
# setup output
loc = np.ones(gn.shape[0], dtype='u1')
# compute in chunks to avoid loading big arrays into memory
blen = get_blen_array(gn, blen)
blen = max(blen, 10*size) # avoid too small chunks
n_variants = gn.shape[0]
for i in range(0, n_variants, blen):
# N.B., ensure overlap with next window
j = min(n_variants, i+blen+size)
gnb = np.asarray(gn[i:j], dtype='i1')
gnb = memoryview_safe(gnb)
locb = loc[i:j]
gn_locate_unlinked_int8(gnb, locb, size, step, threshold)
return loc.astype('b1') | 0.000561 |
def flasher(msg, severity=None):
"""Flask's flash if available, logging call if not"""
try:
flash(msg, severity)
except RuntimeError:
if severity == 'danger':
logging.error(msg)
else:
logging.info(msg) | 0.003831 |
def decode_complex(data, complex_names=(None, None)):
""" Decodes possibly complex data read from an HDF5 file.
Decodes possibly complex datasets read from an HDF5 file. HDF5
doesn't have a native complex type, so they are stored as
H5T_COMPOUND types with fields such as 'r' and 'i' for the real and
imaginary parts. As there is no standardization for field names, the
field names have to be given explicitly, or the fieldnames in `data`
analyzed for proper decoding to figure out the names. A variety of
reasonably expected combinations of field names are checked and used
if available to decode. If decoding is not possible, it is returned
as is.
Parameters
----------
data : arraylike
The data read from an HDF5 file, that might be complex, to
decode into the proper Numpy complex type.
complex_names : tuple of 2 str and/or Nones, optional
``tuple`` of the names to use (in order) for the real and
imaginary fields. A ``None`` indicates that various common
field names should be tried.
Returns
-------
c : decoded data or data
If `data` can be decoded into a complex type, the decoded
complex version is returned. Otherwise, `data` is returned
unchanged.
See Also
--------
encode_complex
Notes
-----
Currently looks for real field names of ``('r', 're', 'real')`` and
imaginary field names of ``('i', 'im', 'imag', 'imaginary')``
ignoring case.
"""
# Now, complex types are stored in HDF5 files as an H5T_COMPOUND type
# with fields along the lines of ('r', 're', 'real') and ('i', 'im',
# 'imag', 'imaginary') for the real and imaginary parts, which most
# likely won't be properly extracted back into making a Python
# complex type unless the proper h5py configuration is set. Since we
# can't depend on it being set and adjusting it is hazardous (the
# setting is global), it is best to just decode it manually. These
# fields are obtained from the fields of its dtype. Obviously, if
# there are no fields, then there is nothing to do.
if data.dtype.fields is None:
return data
fields = list(data.dtype.fields)
# If there aren't exactly two fields, then it can't be complex.
if len(fields) != 2:
return data
# We need to grab the field names for the real and imaginary
# parts. This will be done by seeing which list, if any, each field
# is and setting variables to the proper name if it is in it (they
# are initialized to None so that we know if one isn't found).
real_fields = ['r', 're', 'real']
imag_fields = ['i', 'im', 'imag', 'imaginary']
cnames = list(complex_names)
for s in fields:
if s.lower() in real_fields:
cnames[0] = s
elif s.lower() in imag_fields:
cnames[1] = s
# If the real and imaginary fields were found, construct the complex
# form from the fields. This is done by finding the complex type
# that they cast to, making an array, and then setting the
# parts. Otherwise, return what we were given because it isn't in
# the right form.
if cnames[0] is not None and cnames[1] is not None:
cdata = np.result_type(data[cnames[0]].dtype, \
data[cnames[1]].dtype, 'complex64').type(data[cnames[0]])
cdata.imag = data[cnames[1]]
return cdata
else:
return data | 0.000863 |
def AgregarBalanceLitrosPorcentajesSolidos(self, litros_remitidos, litros_decomisados,
kg_grasa, kg_proteina, **kwargs):
"Agrega balance litros y porcentajes sólidos a la liq. (obligatorio)"
d = {'litrosRemitidos': litros_remitidos,
'litrosDecomisados': litros_decomisados,
'kgGrasa': kg_grasa,
'kgProteina': kg_proteina}
self.solicitud['balanceLitrosPorcentajesSolidos'] = d | 0.012346 |
def get_details(self, ids):
"""
Locu Venue Details API Call Wrapper
Args:
list of ids : ids of a particular venues to get insights about. Can process up to 5 ids
"""
if isinstance(ids, list):
if len(ids) > 5:
ids = ids[:5]
id_param = ';'.join(ids) + '/'
else:
ids = str(ids)
id_param = ids + '/'
header, content = self._http_request(id_param)
resp = json.loads(content)
if not self._is_http_response_ok(header):
error = resp.get('error_message', 'Unknown Error')
raise HttpException(header.status, header.reason, error)
return resp | 0.006906 |
def eth_getBlockHeaderByNumber(self, number):
"""Get block header by block number.
:param number:
:return:
"""
block_hash = self.reader._get_block_hash(number)
block_number = _format_block_number(number)
return self.reader._get_block_header(block_hash, block_number) | 0.006192 |
def fetch_submissions(self, submissions_callback, *args):
"""Wrap the submissions_callback function."""
logger.debug('Fetching submissions')
submissions_callback(*args)
logger.info('Found {} submissions'.format(len(self.submissions)))
if not self.submissions:
return
self.min_date = min(x.created_utc for x in self.submissions.values())
self.max_date = max(x.created_utc for x in self.submissions.values())
self.process_submitters()
self.process_commenters() | 0.003663 |
def _setEndpoint(self, location):
'''
Set the endpoint after when Salesforce returns the URL after successful login()
'''
# suds 0.3.7+ supports multiple wsdl services, but breaks setlocation :(
# see https://fedorahosted.org/suds/ticket/261
try:
self._sforce.set_options(location = location)
except:
self._sforce.wsdl.service.setlocation(location)
self._location = location | 0.016706 |
def stat( self, *args ):
'''Check process completion and consume pending I/O data'''
self.pipe.poll()
if not self.pipe.returncode is None:
'''cleanup handlers and timeouts'''
if not self.expiration is None:
self.ioloop.remove_timeout(self.expiration)
for fd, dest in self.streams:
self.ioloop.remove_handler(fd)
'''schedulle callback (first try to read all pending data)'''
self.ioloop.add_callback(self.on_finish)
for fd, dest in self.streams:
while True:
try:
data = os.read(fd, 4096)
if len(data) == 0:
break
print(data.rstrip())
except:
break | 0.010976 |
def create_user(self, username, email, password, active=False,
send_email=True):
"""
A simple wrapper that creates a new :class:`User`.
:param username:
String containing the username of the new user.
:param email:
String containing the email address of the new user.
:param password:
String containing the password for the new user.
:param active:
Boolean that defines if the user requires activation by clicking
on a link in an email. Defauts to ``True``.
:param send_email:
Boolean that defines if the user should be send an email. You
could set this to ``False`` when you want to create a user in
your own code, but don't want the user to activate through email.
:return: :class:`User` instance representing the new user.
"""
user = super(AccountActivationManager, self).create_user(username, email, password)
if isinstance(user.username, str):
username = user.username.encode('utf-8')
salt, activation_key = generate_sha1(username)
user.is_active = active
user.activation_key = activation_key
user.save(using=self._db)
if send_email:
user.send_activation_email()
return user | 0.005839 |
def eval_hessian(self, *args, **kwargs):
"""
:return: Hessian evaluated at the specified point.
"""
# Evaluate the hessian model and use the resulting Ans namedtuple as a
# dict. From this, take the relevant components.
eval_hess_dict = self.hessian_model(*args, **kwargs)._asdict()
hess = [[[np.broadcast_to(eval_hess_dict.get(D(var, p1, p2), 0),
eval_hess_dict[var].shape)
for p2 in self.params]
for p1 in self.params]
for var in self
]
# Use numpy to broadcast these arrays together and then stack them along
# the parameter dimension. We do not include the component direction in
# this, because the components can have independent shapes.
for idx, comp in enumerate(hess):
hess[idx] = np.stack(np.broadcast_arrays(*comp))
Ans = variabletuple('Ans', self.keys())
return Ans(*hess) | 0.006054 |
def get_host_name():
"""Get host name provide by operating system
"""
if sys.platform == 'win32':
host = os.getenv('COMPUTERNAME')
else:
host = os.uname()[1]
return host | 0.009615 |
def get_dev_asset_details(ipaddress, auth, url):
"""Takes in ipaddress as input to fetch device assett details from HP IMC RESTFUL API
:param ipaddress: IP address of the device you wish to gather the asset details
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: object of type list containing the device asset details, with each asset contained in a dictionary
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.netassets import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> single_asset = get_dev_asset_details('10.101.0.1', auth.creds, auth.url)
>>> assert type(single_asset) is list
>>> assert 'name' in single_asset[0]
"""
get_dev_asset_url = "/imcrs/netasset/asset?assetDevice.ip=" + str(ipaddress)
f_url = url + get_dev_asset_url
# creates the URL using the payload variable as the contents
r = requests.get(f_url, auth=auth, headers=HEADERS)
# r.status_code
try:
if r.status_code == 200:
dev_asset_info = (json.loads(r.text))
if len(dev_asset_info) > 0:
dev_asset_info = dev_asset_info['netAsset']
if type(dev_asset_info) == dict:
dev_asset_info = [dev_asset_info]
if type(dev_asset_info) == list:
dev_asset_info[:] = [dev for dev in dev_asset_info if dev.get('deviceIp') == ipaddress]
return dev_asset_info
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + ' get_dev_asset_details: An Error has occured' | 0.006311 |
def dispatch(self, *args, **kwargs):
"""
Decorate the view dispatcher with csrf_exempt.
"""
return super(EntryTrackback, self).dispatch(*args, **kwargs) | 0.01087 |
def _get_pubkey_hash(cert):
'''
Returns the sha1 hash of the modulus of a public key in a cert
Used for generating subject key identifiers
'''
sha_hash = hashlib.sha1(cert.get_pubkey().get_modulus()).hexdigest()
return _pretty_hex(sha_hash) | 0.003788 |
def set_execution_mode(self, execution_mode, notify=True):
""" An observed setter for the execution mode of the state machine status. This is necessary for the
monitoring client to update the local state machine in the same way as the root state machine of the server.
:param execution_mode: the new execution mode of the state machine
:raises exceptions.TypeError: if the execution mode is of the wrong type
"""
if not isinstance(execution_mode, StateMachineExecutionStatus):
raise TypeError("status must be of type StateMachineExecutionStatus")
self._status.execution_mode = execution_mode
if notify:
self._status.execution_condition_variable.acquire()
self._status.execution_condition_variable.notify_all()
self._status.execution_condition_variable.release() | 0.006865 |
def pl2nvp(plane):
"""
Return a unit normal vector and point that define a specified plane.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pl2nvp_c.html
:param plane: A SPICE plane.
:type plane: supporttypes.Plane
:return: A unit normal vector and point that define plane.
:rtype: tuple
"""
assert (isinstance(plane, stypes.Plane))
normal = stypes.emptyDoubleVector(3)
point = stypes.emptyDoubleVector(3)
libspice.pl2nvp_c(ctypes.byref(plane), normal, point)
return stypes.cVectorToPython(normal), stypes.cVectorToPython(point) | 0.001692 |
def solve(self):
""" Runs dynamic simulation.
@rtype: dict
@return: Solution dictionary with the following keys:
- C{angles} - generator angles
- C{speeds} - generator speeds
- C{eq_tr} - q component of transient voltage behind
reactance
- C{ed_tr} - d component of transient voltage behind
reactance
- C{efd} - Excitation voltage
- C{pm} - mechanical power
- C{voltages} - bus voltages
- C{stepsize} - step size integration method
- C{errest} - estimation of integration error
- C{failed} - failed steps
- C{time} - time points
"""
t0 = time()
buses = self.dyn_case.buses
solution = NewtonPF(self.case).solve()
if not solution["converged"]:
logger.error("Power flow did not converge. Exiting...")
return {}
elif self.verbose:
logger.info("Power flow converged.")
# Construct augmented Ybus.
if self.verbose:
logger.info("Constructing augmented admittance matrix...")
gbus = [g.bus._i for g in self.dyn_generators]
ng = len(gbus)
Um = array([bus.v_magnitude for bus in buses])
Ua = array([bus.v_angle * (pi / 180.0) for bus in buses])
U0 = Um * exp(1j * Ua)
U00 = U0
augYbus = self.dyn_case.getAugYbus(U0, gbus)
augYbus_solver = splu(augYbus)
# Calculate initial machine state.
if self.verbose:
logger.info("Calculating initial state...")
Efd0, Xgen0 = self.dyn_case.generatorInit(U0)
omega0 = Xgen0[:, 1]
Id0, Iq0, Pe0 = self.dyn_case.machineCurrents(Xgen0, U0)
Vgen0 = r_[Id0, Iq0, Pe0]
# Exciter initial conditions.
Vexc0 = abs(U0[gbus])
Xexc0, Pexc0 = self.dyn_case.exciterInit(Efd0, Vexc0)
# Governor initial conditions.
Pm0 = Pe0
Xgov0, Pgov0 = self.dyn_case.governorInit(Pm0, omega0)
Vgov0 = omega0
# Check steady-state.
Fexc0 = self.dyn_case.exciter(Xexc0, Pexc0, Vexc0)
Fgov0 = self.dyn_case.governor(Xgov0, Pgov0, Vgov0)
Fgen0 = self.dyn_case.generator(Xgen0, Xexc0, Xgov0, Vgen0)
# Check Generator Steady-state
if sum(abs(Fgen0)) > 1e-06:
logger.error("Generator not in steady-state. Exiting...")
return {}
# Check Exciter Steady-state
if sum(abs(Fexc0)) > 1e-06:
logger.error("Exciter not in steady-state. Exiting...")
return {}
# Check Governor Steady-state
if sum(abs(Fgov0)) > 1e-06:
logger.error("Governor not in steady-state. Exiting...")
return {}
if self.verbose:
logger.info("System in steady-state.")
# Initialization of main stability loop.
t = -0.02 # simulate 0.02s without applying events
erst = False
failed = False
eulerfailed = False
stoptime = self.dyn_case.stoptime
if (isinstance(self.method, RungeKuttaFehlberg) or
isinstance(self.method, RungeKuttaHighamHall)):
stepsize = self.minstep
else:
stepsize = self.dyn_case.stepsize
ev = 0
eventhappened = False
i = 0
# Allocate memory for variables.
if self.verbose:
logger.info("Allocating memory...")
chunk = 5000
time = zeros(chunk)
time[0, :] = t
errest = zeros(chunk)
errest[0, :] = erst
stepsizes = zeros(chunk)
stepsizes[0, :] = stepsize
# System variables
voltages = zeros(chunk)
voltages[0, :] = U0.H
# Generator
angles = zeros((chunk, ng))
angles[0, :] = Xgen0[:, 0] * 180.0 / pi
speeds = zeros((chunk, ng))
speeds[0, :] = Xgen0[:, 0] / 2 * pi * self.dyn_case.freq
Eq_tr = zeros((chunk, ng))
Eq_tr[0, :] = Xgen0[:, 2]
Ed_tr = zeros((chunk, ng))
Ed_tr[0, :] = Xgen0[:, 3]
# Exciter and governor
Efd = zeros((chunk, ng))
Efd[0, :] = Efd0[:, 0]
PM = zeros((chunk, ng))
PM[0, :] = Pm0[:, 0]
# Main stability loop.
while t < stoptime + stepsize:
i += 1
if i % 45 == 0 and self.verbose:
logger.info("%6.2f%% completed." % t / stoptime * 100)
# Numerical Method.
Xgen0, self.Pgen0, Vgen0, Xexc0, Pexc0, Vexc0, Xgov0, Pgov0, \
Vgov0, U0, t, newstepsize = self.method.solve(t, Xgen0,
self.Pgen0, Vgen0, Xexc0, Pexc0, Vexc0, Xgov0, Pgov0,
Vgov0, augYbus_solver, gbus, stepsize)
# if self.method == MODIFIED_EULER:
# solver = ModifiedEuler(t, Xgen0, self.Pgen0, Vgen0, Xexc0,
# Pexc0, Vexc0, Xgov0, Pgov0, Vgov0,
# augYbus_solver, gbus, stepsize)
#
# Xgen0, self.Pgen0, Vgen0, Xexc0, Pexc0, Vexc0, Xgov0, Pgov0,
# Vgov0, U0, t, newstepsize = solver.solve()
# elif self.method == RUNGE_KUTTA:
# pass
# elif self.method == RUNGE_KUTTA_FEHLBERG:
# pass
# elif self.method == HIGHAM_HALL:
# pass
# elif self.method == MODIFIED_EULER2:
# pass
# else:
# raise ValueError
if eulerfailed:
logger.info("No solution found. Exiting... ")
return {}
if failed:
t = t - stepsize
# End exactly at stop time.
if t + newstepsize > stoptime:
newstepsize = stoptime - t
elif stepsize < self.minstep:
logger.info("No solution found with minimum step size. Exiting... ")
return {}
# Allocate new memory chunk if matrices are full.
if i > time.shape[0]:
time = zeros(chunk)
errest = zeros(chunk)
stepsize = zeros(chunk)
voltages = zeros(chunk)
angles = zeros((chunk, ng))
speeds = zeros((chunk, ng))
Eq_tr = zeros((chunk, ng))
Ed_tr = zeros((chunk, ng))
Efd = zeros((chunk, ng))
PM = zeros((chunk, ng))
# Save values.
stepsizes[i, :] = stepsize
errest[i, :] = erst
time[i, :] = t
voltages[i, :] = U0
# Exciters
Efd[i, :] = Xexc0[:, 0]
# TODO: Set Efd to zero when using classical generator model.
# Governors
PM[i, :] = Xgov0[:, 0]
# Generators
angles[i, :] = Xgen0[:, 0] * 180.0 / pi
speeds[i, :] = Xgen0[:, 1] * (2 * pi * self.dyn_case.freq)
Eq_tr[i, :] = Xgen0[:, 2]
Ed_tr[i, :] = Xgen0[:, 3]
# Adapt step size if event will occur in next step.
if (len(self.events) > 0 and ev <= len(self.events) and
isinstance(self.method, RungeKuttaFehlberg) and
isinstance(self.method, RungeKutta)):
if t + newstepsize >= self.events[ev].t:
if self.events[ev] - t < newstepsize:
newstepsize = self.events[ev].t - t
# Check for events.
if len(self.events) > 0 and ev <= len(self.events):
for event in self.events:
if (abs(t - self.events[ev].t) > 10 * EPS or
ev > len(self.events)):
break
else:
eventhappened = True
event.obj.set_attr(event.param, event.newval)
ev += 1
if eventhappened:
# Refactorise.
self.dyn_case.getAugYbus(U00, gbus)
U0 = self.dyn_case.solveNetwork(Xgen0, self.Pgen0,
augYbus_solver, gbus)
Id0, Iq0, Pe0 = self.dyn_case.machineCurrents(Xgen0,
self.Pgen0,
U0[gbus])
Vgen0 = r_[Id0, Iq0, Pe0]
Vexc0 = abs(U0[gbus])
# Decrease stepsize after event occured.
if (isinstance(self.method, RungeKuttaFehlberg) or
isinstance(self.method, RungeKuttaHighamHall)):
newstepsize = self.minstepsize
# If event occurs, save values at t- and t+.
i += 1
# Save values
stepsize[i, :] = stepsize.T
errest[i, :] = erst.T
time[i, :] = t
voltages[i, :] = U0.T
# Exciters.
# Set Efd to zero when using classical generator model.
# Efd[i, :] = Xexc0[:, 1] * (flatnonzero(genmodel > 1))
# Governors.
PM[i, :] = Xgov0[:, 1]
# Generators.
angles[i, :] = Xgen0[:, 0] * 180.0 / pi
speeds[i, :] = Xgen0[:, 1] / (2.0 * pi * self.freq)
Eq_tr[i, :] = Xgen0[:, 2]
Ed_tr[i, :] = Xgen0[:, 3]
eventhappened = False
# Advance time
stepsize = newstepsize
t += stepsize
# End of main stability loop ------------------------------------------
# Output --------------------------------------------------------------
if self.verbose:
logger.info("100%% completed")
elapsed = time() - t0
logger.info("Simulation completed in %5.2f seconds." % elapsed)
# Save only the first i elements.
angles = angles[0:i, :]
speeds = speeds[0:i, :]
Eq_tr = Eq_tr[0:i, :]
Ed_tr = Ed_tr[0:i, :]
Efd = Efd[0:i, :]
PM = PM[0:i, :]
voltages = voltages[0:i, :]
stepsize = stepsize[0:i, :]
errest = errest[0:i, :]
time = time[0:i, :]
if self.plot:
raise NotImplementedError
return {} | 0.000941 |
def details_handler(args):
"""usage: {program} details <anchor-id> [<path>]
Get the details of a single anchor.
"""
repo = _open_repo(args)
_, anchor = _get_anchor(repo, args['<anchor-id>'])
print("""path: {file_path}
encoding: {encoding}
[before]
{before}
--------------
[topic]
{topic}
--------------
[after]
{after}
--------------
offset: {offset}
width: {width}""".format(
file_path=anchor.file_path,
encoding=anchor.encoding,
before=anchor.context.before,
topic=anchor.context.topic,
after=anchor.context.after,
offset=anchor.context.offset,
width=anchor.context.width))
return ExitCode.OK | 0.001458 |
def main():
"""The Main function/pipeline for GSEApy."""
# Parse options...
argparser = prepare_argparser()
args = argparser.parse_args()
subcommand = args.subcommand_name
if subcommand == "replot":
# reproduce plots using GSEAPY
from .gsea import Replot
rep = Replot(indir=args.indir, outdir=args.outdir, weighted_score_type=args.weight,
figsize=args.figsize, graph_num=args.graph,
format=args.format, verbose=args.verbose)
rep.run()
elif subcommand == "gsea":
# compute using GSEAPY
from .gsea import GSEA
gs = GSEA(args.data, args.gmt, args.cls, args.outdir,
args.mins, args.maxs, args.n, args.weight,
args.type, args.method, args.ascending, args.threads,
args.figsize, args.format, args.graph, args.noplot, args.seed, args.verbose)
gs.run()
elif subcommand == "prerank":
from .gsea import Prerank
pre = Prerank(args.rnk, args.gmt, args.outdir, args.label[0], args.label[1],
args.mins, args.maxs, args.n, args.weight, args.ascending, args.threads,
args.figsize, args.format, args.graph, args.noplot, args.seed, args.verbose)
pre.run()
elif subcommand == "ssgsea":
from .gsea import SingleSampleGSEA
ss = SingleSampleGSEA(data=args.data, gene_sets=args.gmt, outdir=args.outdir,
sample_norm_method=args.norm,
min_size=args.mins, max_size=args.maxs, permutation_num=args.n,
weighted_score_type=args.weight, scale=args.scale,
ascending=args.ascending, processes=args.threads,
figsize=args.figsize, format=args.format, graph_num=args.graph,
no_plot=args.noplot, seed=args.seed, verbose=args.verbose)
ss.run()
elif subcommand == "enrichr":
# calling enrichr API
from .enrichr import Enrichr
enr = Enrichr(gene_list=args.gene_list, descriptions=args.descrip,
gene_sets=args.library, organism=args.organism,
outdir=args.outdir, format=args.format, cutoff=args.thresh,
background=args.bg, figsize=args.figsize,
top_term=args.term, no_plot=args.noplot, verbose=args.verbose)
enr.run()
elif subcommand == "biomart":
from .parser import Biomart
# read input file or a argument
name, value = args.filter
if os.path.isfile(value):
with open(value, 'r') as val:
lines = val.readlines()
value = [ l.strip() for l in lines]
# run query
bm = Biomart(host=args.host, verbose=args.verbose)
bm.query(dataset=args.bg, attributes=args.attrs.split(","),
filters={name : value}, filename=args.ofile)
else:
argparser.print_help()
sys.exit(0) | 0.006215 |
def _post_login_page(self):
"""Login to Janrain."""
# Prepare post data
data = {
"form": "signInForm",
"client_id": JANRAIN_CLIENT_ID,
"redirect_uri": "https://www.fido.ca/pages/#/",
"response_type": "token",
"locale": "en-US",
"userID": self.username,
"currentPassword": self.password,
}
# HTTP request
try:
raw_res = yield from self._session.post(LOGIN_URL,
headers=self._headers,
data=data,
timeout=self._timeout)
except OSError:
raise PyFidoError("Can not sign in")
return True | 0.002469 |
def find_inspectable_lines(lines, pos):
"""Find lines in home that are inspectable.
Walk back from the err line up to 3 lines, but don't walk back over
changes in indent level.
Walk forward up to 3 lines, counting \ separated lines as 1. Don't walk
over changes in indent level (unless part of an extended line)
"""
cnt = re.compile(r'\\[\s\n]*$')
df = re.compile(r':[\s\n]*$')
ind = re.compile(r'^(\s*)')
toinspect = []
home = lines[pos]
home_indent = ind.match(home).groups()[0]
before = lines[max(pos-3, 0):pos]
before.reverse()
after = lines[pos+1:min(pos+4, len(lines))]
for line in before:
if ind.match(line).groups()[0] == home_indent:
toinspect.append(line)
else:
break
toinspect.reverse()
toinspect.append(home)
home_pos = len(toinspect)-1
continued = cnt.search(home)
for line in after:
if ((continued or ind.match(line).groups()[0] == home_indent)
and not df.search(line)):
toinspect.append(line)
continued = cnt.search(line)
else:
break
log.debug("Inspecting lines '''%s''' around %s", toinspect, home_pos)
return toinspect, home_pos | 0.00399 |
def node_restart(self):
"""Restarts device"""
log.info('Restart')
res = self.__exchange('node.restart()')
log.info(res)
return res | 0.011765 |
def make_vec_env(env_id, env_type, num_env, seed,
wrapper_kwargs=None,
start_index=0,
reward_scale=1.0,
flatten_dict_observations=True,
gamestate=None):
"""
Create a wrapped, monitored SubprocVecEnv for Atari and MuJoCo.
"""
wrapper_kwargs = wrapper_kwargs or {}
mpi_rank = MPI.COMM_WORLD.Get_rank() if MPI else 0
seed = seed + 10000 * mpi_rank if seed is not None else None
logger_dir = logger.get_dir()
def make_thunk(rank):
return lambda: make_env(
env_id=env_id,
env_type=env_type,
mpi_rank=mpi_rank,
subrank=rank,
seed=seed,
reward_scale=reward_scale,
gamestate=gamestate,
flatten_dict_observations=flatten_dict_observations,
wrapper_kwargs=wrapper_kwargs,
logger_dir=logger_dir
)
set_global_seeds(seed)
if num_env > 1:
return SubprocVecEnv([make_thunk(i + start_index) for i in range(num_env)])
else:
return DummyVecEnv([make_thunk(start_index)]) | 0.002646 |
def _component_of(name):
"""Get the root package or module of the passed module.
"""
# Get the registered package this model belongs to.
segments = name.split('.')
while segments:
# Is this name a registered package?
test = '.'.join(segments)
if test in settings.get('COMPONENTS', []):
# This is the component we are in.
return test
# Remove the right-most segment.
segments.pop()
if not segments and '.models' in name:
# No package was found to be registered; attempt to guess the
# right package name; strip all occurrances of '.models' from the
# pacakge name.
return _component_of(name.replace('.models', '')) | 0.001359 |
def load_result_json(result_path, json_file_name):
"""load_result_json."""
json_path = os.path.join(result_path, json_file_name)
_list = []
if os.path.isfile(json_path):
with open(json_path) as json_data:
try:
_list = json.load(json_data)
except ValueError as err:
logger.error(
'Failed to load json: {}, {}'.format(json_path, err))
return _list | 0.002217 |
def version(**kwargs):
"""
Detects the new version according to git log and semver. Writes the new version
number and commits it, unless the noop-option is True.
"""
retry = kwargs.get("retry")
if retry:
click.echo('Retrying publication of the same version...')
else:
click.echo('Creating new version..')
try:
current_version = get_current_version()
except GitError as e:
click.echo(click.style(str(e), 'red'), err=True)
return False
click.echo('Current version: {0}'.format(current_version))
level_bump = evaluate_version_bump(current_version, kwargs['force_level'])
new_version = get_new_version(current_version, level_bump)
if new_version == current_version and not retry:
click.echo(click.style('No release will be made.', fg='yellow'))
return False
if kwargs['noop'] is True:
click.echo('{0} Should have bumped from {1} to {2}.'.format(
click.style('No operation mode.', fg='yellow'),
current_version,
new_version
))
return False
if config.getboolean('semantic_release', 'check_build_status'):
click.echo('Checking build status..')
owner, name = get_repository_owner_and_name()
if not check_build_status(owner, name, get_current_head_hash()):
click.echo(click.style('The build has failed', 'red'))
return False
click.echo(click.style('The build was a success, continuing the release', 'green'))
if retry:
# No need to make changes to the repo, we're just retrying.
return True
if config.get('semantic_release', 'version_source') == 'commit':
set_new_version(new_version)
commit_new_version(new_version)
tag_new_version(new_version)
click.echo('Bumping with a {0} version to {1}.'.format(level_bump, new_version))
return True | 0.002083 |
def create_auth_manifest(**kwargs):
"""
Creates a basic authentication manifest for logging in, logging out and
registering new accounts.
"""
class AuthProgram(Program):
pre_input_middleware = [AuthenticationMiddleware]
def register(username, password, password2):
"""
Decorated version of basic_register with a callback added.
"""
result = basic_register(username, password, password2)
callback = kwargs.get('post_register_callback', None)
if callback:
user = User.objects.get(username=username)
callback(user)
return result
return Manifest({
'login': [
AuthProgram(
"""
Prints out the HTML form for logging in.
""",
name="Login (form)",
input_middleware=[NotAuthenticatedOrRedirect('/')],
view=BasicView(
html=jinja_template('login.html'),
),
),
AuthProgram(
"""
Matches up the username/password against the database, and adds the auth cookies.
""",
name="Login (post)",
input_middleware=[NotAuthenticatedOrDie],
controllers=['http-post', 'cmd'],
model=[create_session, {'username': 'mock_user', 'session_key': 'XXXXXXXXXXXXXXX'}],
view=BasicView(
persist=lambda m: {'giotto_session': m['session_key']},
html=lambda m: Redirection('/'),
),
),
],
'logout': AuthProgram(
"""
Send the user here to log them out. Removes their cookies and deletes the auth session.
""",
name="Logout",
view=BasicView(
html=Redirection('/'),
),
output_middleware=[LogoutMiddleware],
),
'register': [
AuthProgram(
"""
This program returns the HTML page with the form for registering a new account.
HTTP-get only.
""",
name="Register (form)",
input_middleware=[NotAuthenticatedOrRedirect('/')],
view=BasicView(
html=jinja_template('register.html'),
),
),
AuthProgram(
"""
When you POST the register form, this program handles creating the new user, then redirecting you to '/'
""",
name="Register (post)",
controllers=['http-post'],
model=[register],
view=BasicView(
persist=lambda m: {'giotto_session': m['session_key']},
html=lambda m: Redirection('/'),
),
),
],
}) | 0.002039 |
def alpha_shape(points, alpha):
"""
Compute the alpha shape (concave hull) of a set
of points.
@param points: Iterable container of points.
@param alpha: alpha value to influence the
gooeyness of the border. Smaller numbers
don't fall inward as much as larger numbers.
Too large, and you lose everything!
"""
if len(points) < 4:
# When you have a triangle, there is no sense
# in computing an alpha shape.
return geometry.MultiPoint(list(points)).convex_hull
#coords = np.array([point.coords[0] for point in points])
coords = np.array(points)
print(coords)
tri = Delaunay(coords)
edges = set()
edge_points = []
# loop over triangles:
# ia, ib, ic = indices of corner points of the
# triangle
for ia, ib, ic in tri.vertices:
pa = coords[ia]
pb = coords[ib]
pc = coords[ic]
# Lengths of sides of triangle
a = math.sqrt((pa[0]-pb[0])**2 + (pa[1]-pb[1])**2)
b = math.sqrt((pb[0]-pc[0])**2 + (pb[1]-pc[1])**2)
c = math.sqrt((pc[0]-pa[0])**2 + (pc[1]-pa[1])**2)
# Semiperimeter of triangle
s = (a + b + c)/2.0
# Area of triangle by Heron's formula
area = math.sqrt(s*(s-a)*(s-b)*(s-c))
circum_r = a*b*c/(4.0*area)
# Here's the radius filter.
#print circum_r
if circum_r < 1.0/alpha:
add_edge(edges, edge_points, coords, ia, ib)
add_edge(edges, edge_points, coords, ib, ic)
add_edge(edges, edge_points, coords, ic, ia)
m = geometry.MultiLineString(edge_points)
triangles = list(polygonize(m))
return (cascaded_union(triangles), edge_points)
print (cascaded_union(triangles), edge_points) | 0.002256 |
def _prttex_summary_cnts(self, prt, cnts):
"""Write summary of level and depth counts for active GO Terms."""
# Count level(shortest path to root) and depth(longest path to root)
# values for all unique GO Terms.
prt.write("\n\n% LaTeX Table for GO counts at each level and depth in the GO DAG\n")
prt.write(r"\begin{table}[bt]" "\n")
prt.write(r"\begin{tabular}{|r |r |r |r |r |r |r|}" "\n")
title = self.title.replace('_', r'\_')
prt.write(r"\hline" "\n")
prt.write(r"\rowcolor{gray!10}" "\n")
prt.write(" ".join([r"\multicolumn{7}{|l|}{", title, r"} \\", "\n"]))
prt.write(r"\hline" "\n")
prt.write(r"\rowcolor{gray!10}" "\n")
prt.write(r"Depth &" "\n")
prt.write(r"\multicolumn{3}{c|}{Depth} &" "\n")
prt.write(r"\multicolumn{3}{c|}{Level} \\" "\n")
prt.write(r"\cline{2-7}" "\n")
prt.write(r"\rowcolor{gray!10}" "\n")
prt.write(r"or Level & BP & MF & CC & BP & MF & CC \\" "\n")
prt.write(r"\hline" "\n")
max_val = max(max(dep for dep in cnts['depth']), max(lev for lev in cnts['level']))
for i in range(max_val+1):
vals = ['{:>5}'.format(cnts[desc][i][ns]) for desc in cnts for ns in self.nss]
self.log.write('{:>02} & {} \\\\\n'.format(i, ' & '.join(vals)))
if i%2 == 0:
prt.write(r"\rowcolor{gray!7}" "\n")
prt.write(r"\hline" "\n")
prt.write(r"\end{tabular}" "\n")
prt.write(r"\end{table}" "\n") | 0.003871 |
def save(self, obj):
"""Required functionality."""
if not obj.id:
obj.id = uuid()
stored_data = {
'_id': obj.id,
'value': json.loads(obj.to_data())
}
index_vals = obj.indexes() or {}
for key in obj.__class__.index_names() or []:
val = index_vals.get(key, '')
stored_data[key] = str(val)
coll = self.get_collection(obj.__class__.get_table_name())
coll.update({"_id": obj.id}, stored_data, upsert=True) | 0.003795 |
def iteritems_breadth_first(a_mapping, include_dicts=False):
"""a generator that returns all the keys in a set of nested
Mapping instances. The keys take the form X.Y.Z"""
subordinate_mappings = []
for key, value in six.iteritems(a_mapping):
if isinstance(value, collections.Mapping):
subordinate_mappings.append((key, value))
if include_dicts:
yield key, value
else:
yield key, value
for key, a_map in subordinate_mappings:
for sub_key, value in iteritems_breadth_first(a_map, include_dicts):
yield '%s.%s' % (key, sub_key), value | 0.001563 |
def get_core(self):
"""
Get an unsatisfiable core if the formula was previously
unsatisfied.
"""
if self.minisat and self.status == False:
return pysolvers.minisatgh_core(self.minisat) | 0.012245 |
def parse_variant_playlist(cls, session_, url, name_key="name",
name_prefix="", check_streams=False,
force_restart=False, name_fmt=None,
start_offset=0, duration=None,
**request_params):
"""Attempts to parse a variant playlist and return its streams.
:param url: The URL of the variant playlist.
:param name_key: Prefer to use this key as stream name, valid keys are:
name, pixels, bitrate.
:param name_prefix: Add this prefix to the stream names.
:param check_streams: Only allow streams that are accessible.
:param force_restart: Start at the first segment even for a live stream
:param name_fmt: A format string for the name, allowed format keys are
name, pixels, bitrate.
"""
locale = session_.localization
# Backwards compatibility with "namekey" and "nameprefix" params.
name_key = request_params.pop("namekey", name_key)
name_prefix = request_params.pop("nameprefix", name_prefix)
audio_select = session_.options.get("hls-audio-select") or []
res = session_.http.get(url, exception=IOError, **request_params)
try:
parser = hls_playlist.load(res.text, base_uri=res.url)
except ValueError as err:
raise IOError("Failed to parse playlist: {0}".format(err))
streams = {}
for playlist in filter(lambda p: not p.is_iframe, parser.playlists):
names = dict(name=None, pixels=None, bitrate=None)
audio_streams = []
fallback_audio = []
default_audio = []
preferred_audio = []
for media in playlist.media:
if media.type == "VIDEO" and media.name:
names["name"] = media.name
elif media.type == "AUDIO":
audio_streams.append(media)
for media in audio_streams:
# Media without a uri is not relevant as external audio
if not media.uri:
continue
if not fallback_audio and media.default:
fallback_audio = [media]
# if the media is "audoselect" and it better matches the users preferences, use that
# instead of default
if not default_audio and (media.autoselect and locale.equivalent(language=media.language)):
default_audio = [media]
# select the first audio stream that matches the users explict language selection
if (('*' in audio_select or media.language in audio_select or media.name in audio_select) or
((not preferred_audio or media.default) and locale.explicit and locale.equivalent(
language=media.language))):
preferred_audio.append(media)
# final fallback on the first audio stream listed
fallback_audio = fallback_audio or (len(audio_streams) and
audio_streams[0].uri and [audio_streams[0]])
if playlist.stream_info.resolution:
width, height = playlist.stream_info.resolution
names["pixels"] = "{0}p".format(height)
if playlist.stream_info.bandwidth:
bw = playlist.stream_info.bandwidth
if bw >= 1000:
names["bitrate"] = "{0}k".format(int(bw / 1000.0))
else:
names["bitrate"] = "{0}k".format(bw / 1000.0)
if name_fmt:
stream_name = name_fmt.format(**names)
else:
stream_name = (names.get(name_key) or names.get("name") or
names.get("pixels") or names.get("bitrate"))
if not stream_name:
continue
if stream_name in streams: # rename duplicate streams
stream_name = "{0}_alt".format(stream_name)
num_alts = len(list(filter(lambda n: n.startswith(stream_name), streams.keys())))
# We shouldn't need more than 2 alt streams
if num_alts >= 2:
continue
elif num_alts > 0:
stream_name = "{0}{1}".format(stream_name, num_alts + 1)
if check_streams:
try:
session_.http.get(playlist.uri, **request_params)
except KeyboardInterrupt:
raise
except Exception:
continue
external_audio = preferred_audio or default_audio or fallback_audio
if external_audio and FFMPEGMuxer.is_usable(session_):
external_audio_msg = ", ".join([
"(language={0}, name={1})".format(x.language, (x.name or "N/A"))
for x in external_audio
])
log.debug("Using external audio tracks for stream {0} {1}", name_prefix + stream_name,
external_audio_msg)
stream = MuxedHLSStream(session_,
video=playlist.uri,
audio=[x.uri for x in external_audio if x.uri],
force_restart=force_restart,
start_offset=start_offset,
duration=duration,
**request_params)
else:
stream = cls(session_,
playlist.uri,
force_restart=force_restart,
start_offset=start_offset,
duration=duration,
**request_params)
streams[name_prefix + stream_name] = stream
return streams | 0.002645 |
def distances(a, b, shape, squared=False, axis=1):
'''
distances(a, b, (n,d)) yields a potential function whose output is equivalent to the row-norms
of reshape(a(x), (n,d)) - reshape(b(x), (n,d)).
The shape argument (n,m) may alternately be a matrix of parameter indices, as can be passed to
row_norms and col_norms.
The following optional arguments are accepted:
* squared (default: False) specifies whether the output should be the square distance or the
distance.
* axis (default: 1) specifies whether the rows (axis = 1) or columns (axis = 0) are treated
as the vectors between which the distances should be calculated.
'''
a = to_potential(a)
b = to_potential(b)
if axis == 1: return row_norms(shape, a - b, squared=squared)
else: return col_norms(shape, a - b, squared=squared) | 0.010321 |
def AllTypes():
"""
Get a list of all available asset types.
Returns:
list: of AssetType items.
"""
return [AssetType.CreditFlag, AssetType.DutyFlag, AssetType.GoverningToken,
AssetType.UtilityToken, AssetType.Currency, AssetType.Share,
AssetType.Invoice, AssetType.Token] | 0.008403 |
def GenerateLibSig(short_name):
"""Generates a library signature suitable for a user agent field.
Args:
short_name: The short, product-specific string name for the library.
Returns:
A library signature string to append to user-supplied user-agent value.
"""
with _UTILITY_LOCK:
utilities_used = ', '.join([utility for utility
in sorted(_utility_registry)])
_utility_registry.Clear()
if utilities_used:
return ' (%s, %s, %s, %s)' % (short_name, _COMMON_LIB_SIG, _PYTHON_VERSION,
utilities_used)
else:
return ' (%s, %s, %s)' % (short_name, _COMMON_LIB_SIG, _PYTHON_VERSION) | 0.007386 |
def skip_if(self, condition: bool, default: Any = None) -> 'Question':
"""Skip the question if flag is set and return the default instead."""
self.should_skip_question = condition
self.default = default
return self | 0.008097 |
def reference_id_from_html(html):
"""\
Extracts the cable's reference identifier from the provided HTML string.
`html`
The HTML page of the cable.
"""
m = _REFERENCE_ID_FROM_HTML_PATTERN.search(html)
if m:
return m.group(1)
raise ValueError("Cannot extract the cable's reference id") | 0.003049 |
def StoreCSRFCookie(user, response):
"""Decorator for WSGI handler that inserts CSRF cookie into response."""
csrf_token = GenerateCSRFToken(user, None)
response.set_cookie(
"csrftoken", csrf_token, max_age=CSRF_TOKEN_DURATION.seconds) | 0.016129 |
def update(self):
"""
Updates the bundle
"""
with self._lock:
# Was it active ?
restart = self._state == Bundle.ACTIVE
# Send the update event
self._fire_bundle_event(BundleEvent.UPDATE_BEGIN)
try:
# Stop the bundle
self.stop()
except:
# Something wrong occurred, notify listeners
self._fire_bundle_event(BundleEvent.UPDATE_FAILED)
raise
# Change the source file age
module_stat = None
module_file = getattr(self.__module, "__file__", None)
if module_file is not None and os.path.isfile(module_file):
try:
module_stat = os.stat(module_file)
# Change modification time to bypass weak time resolution
# of the underlying file system
os.utime(
module_file,
(module_stat.st_atime, module_stat.st_mtime + 1),
)
except OSError:
# Can't touch the file
_logger.warning(
"Failed to update the modification time of '%s'. "
"The bundle update might not reflect the latest "
"changes.",
module_file,
)
# Clean up the module constants (otherwise kept by reload)
# Keep special members (__name__, __file__, ...)
old_content = self.__module.__dict__.copy()
for name in list(self.__module.__dict__):
if not (name.startswith("__") and name.endswith("__")):
del self.__module.__dict__[name]
try:
# Reload the module
reload_module(self.__module)
except (ImportError, SyntaxError) as ex:
# Exception raised if the file is unreadable
_logger.exception("Error updating %s: %s", self.__name, ex)
# Reset module content
self.__module.__dict__.clear()
self.__module.__dict__.update(old_content)
if module_stat is not None:
try:
# Reset times
os.utime(
module_file,
(module_stat.st_atime, module_stat.st_mtime),
)
except OSError:
# Shouldn't occur, since we succeeded before the update
_logger.debug(
"Failed to reset the modification time of '%s'",
module_file,
)
if restart:
try:
# Re-start the bundle
self.start()
except:
# Something wrong occurred, notify listeners
self._fire_bundle_event(BundleEvent.UPDATE_FAILED)
raise
# Bundle update finished
self._fire_bundle_event(BundleEvent.UPDATED) | 0.001252 |
def monday_of_week(year, week):
"""
Returns a datetime for the monday of the given week of the given year.
"""
str_time = time.strptime('{0} {1} 1'.format(year, week), '%Y %W %w')
date = timezone.datetime(year=str_time.tm_year, month=str_time.tm_mon,
day=str_time.tm_mday, tzinfo=timezone.utc)
if timezone.datetime(year, 1, 4).isoweekday() > 4:
# ISO 8601 where week 1 is the first week that has at least 4 days in
# the current year
date -= timezone.timedelta(days=7)
return date | 0.001779 |
def rerun(client, revision, roots, siblings, inputs, paths):
"""Recreate files generated by a sequence of ``run`` commands."""
graph = Graph(client)
outputs = graph.build(paths=paths, revision=revision)
# Check or extend siblings of outputs.
outputs = siblings(graph, outputs)
output_paths = {node.path for node in outputs}
# Normalize and check all starting paths.
roots = {graph.normalize_path(root) for root in roots}
assert not roots & output_paths, '--from colides with output paths'
# Generate workflow and check inputs.
# NOTE The workflow creation is done before opening a new file.
workflow = inputs(
client,
graph.ascwl(
input_paths=roots,
output_paths=output_paths,
outputs=outputs,
)
)
# Make sure all inputs are pulled from a storage.
client.pull_paths_from_storage(
*(path for _, path in workflow.iter_input_files(client.workflow_path))
)
# Store the generated workflow used for updating paths.
import yaml
output_file = client.workflow_path / '{0}.cwl'.format(uuid.uuid4().hex)
with output_file.open('w') as f:
f.write(
yaml.dump(
ascwl(
workflow,
filter=lambda _, x: x is not None,
basedir=client.workflow_path,
),
default_flow_style=False
)
)
# Execute the workflow and relocate all output files.
from ._cwl import execute
# FIXME get new output paths for edited tools
# output_paths = {path for _, path in workflow.iter_output_files()}
execute(
client,
output_file,
output_paths=output_paths,
) | 0.000567 |
async def search_and_download(album, artist, format, size, out_filepath, *, size_tolerance_prct, amazon_tlds, no_lq_sources,
async_loop):
""" Search and download a cover, return True if success, False instead. """
# register sources
source_args = (size, size_tolerance_prct)
cover_sources = [sources.LastFmCoverSource(*source_args),
sources.AmazonCdCoverSource(*source_args),
sources.AmazonDigitalCoverSource(*source_args)]
for tld in amazon_tlds:
cover_sources.append(sources.AmazonCdCoverSource(*source_args, tld=tld))
if not no_lq_sources:
cover_sources.append(sources.GoogleImagesWebScrapeCoverSource(*source_args))
# schedule search work
search_futures = []
for cover_source in cover_sources:
coroutine = cover_source.search(album, artist)
future = asyncio.ensure_future(coroutine, loop=async_loop)
search_futures.append(future)
# wait for it
await asyncio.wait(search_futures, loop=async_loop)
# get results
results = []
for future in search_futures:
source_results = future.result()
results.extend(source_results)
# sort results
results = await CoverSourceResult.preProcessForComparison(results, size, size_tolerance_prct)
results.sort(reverse=True,
key=functools.cmp_to_key(functools.partial(CoverSourceResult.compare,
target_size=size,
size_tolerance_prct=size_tolerance_prct)))
if not results:
logging.getLogger("Main").info("No results")
# download
for result in results:
try:
await result.get(format, size, size_tolerance_prct, out_filepath)
except Exception as e:
logging.getLogger("Main").warning("Download of %s failed: %s %s" % (result,
e.__class__.__qualname__,
e))
continue
else:
return True
return False | 0.015827 |
def visit_shapes(self, expr: ShExJ.shapeExpr, f: Callable[[Any, ShExJ.shapeExpr, "Context"], None], arg_cntxt: Any,
visit_center: _VisitorCenter = None, follow_inner_shapes: bool=True) -> None:
"""
Visit expr and all of its "descendant" shapes.
:param expr: root shape expression
:param f: visitor function
:param arg_cntxt: accompanying context for the visitor function
:param visit_center: Recursive visit context. (Not normally supplied on an external call)
:param follow_inner_shapes: Follow nested shapes or just visit on outer level
"""
if visit_center is None:
visit_center = _VisitorCenter(f, arg_cntxt)
has_id = getattr(expr, 'id', None) is not None
if not has_id or not (visit_center.already_seen_shape(expr.id)
or visit_center.actively_visiting_shape(expr.id)):
# Visit the root expression
if has_id:
visit_center.start_visiting_shape(expr.id)
f(arg_cntxt, expr, self)
# Traverse the expression and visit its components
if isinstance(expr, (ShExJ.ShapeOr, ShExJ.ShapeAnd)):
for expr2 in expr.shapeExprs:
self.visit_shapes(expr2, f, arg_cntxt, visit_center, follow_inner_shapes=follow_inner_shapes)
elif isinstance(expr, ShExJ.ShapeNot):
self.visit_shapes(expr.shapeExpr, f, arg_cntxt, visit_center, follow_inner_shapes=follow_inner_shapes)
elif isinstance(expr, ShExJ.Shape):
if expr.expression is not None and follow_inner_shapes:
self.visit_triple_expressions(expr.expression,
lambda ac, te, cntxt: self._visit_shape_te(te, visit_center),
arg_cntxt,
visit_center)
elif isinstance_(expr, ShExJ.shapeExprLabel):
if not visit_center.actively_visiting_shape(str(expr)) and follow_inner_shapes:
visit_center.start_visiting_shape(str(expr))
self.visit_shapes(self.shapeExprFor(expr), f, arg_cntxt, visit_center)
visit_center.done_visiting_shape(str(expr))
if has_id:
visit_center.done_visiting_shape(expr.id) | 0.006188 |
def execute_command(self, command, cwd=None, stdout_captured=None):
"""Execute a command at cwd, saving its normal output at
stdout_captured. Errors, defined as nonzero return code or a failure
to start execution, will raise a CompilerError exception with a
description of the cause. They do not write output.
This is file-system safe (any valid file names are allowed, even with
spaces or crazy characters) and OS agnostic (existing and future OSes
that Python supports should already work).
The only thing weird here is that any incoming command arg item may
itself be a tuple. This allows compiler implementations to look clean
while supporting historical string config settings and maintaining
backwards compatibility. Thus, we flatten one layer deep.
((env, foocomp), infile, (-arg,)) -> (env, foocomp, infile, -arg)
"""
argument_list = []
for flattening_arg in command:
if isinstance(flattening_arg, string_types):
argument_list.append(flattening_arg)
else:
argument_list.extend(flattening_arg)
# The first element in argument_list is the program that will be executed; if it is '', then
# a PermissionError will be raised. Thus empty arguments are filtered out from argument_list
argument_list = list(filter(None, argument_list))
stdout = None
try:
# We always catch stdout in a file, but we may not have a use for it.
temp_file_container = cwd or os.path.dirname(stdout_captured or "") or os.getcwd()
with NamedTemporaryFile(delete=False, dir=temp_file_container) as stdout:
compiling = subprocess.Popen(argument_list, cwd=cwd,
stdout=stdout,
stderr=subprocess.PIPE)
_, stderr = compiling.communicate()
set_std_streams_blocking()
if compiling.returncode != 0:
stdout_captured = None # Don't save erroneous result.
raise CompilerError(
"{0!r} exit code {1}\n{2}".format(argument_list, compiling.returncode, stderr),
command=argument_list,
error_output=stderr)
# User wants to see everything that happened.
if self.verbose:
with open(stdout.name) as out:
print(out.read())
print(stderr)
except OSError as e:
stdout_captured = None # Don't save erroneous result.
raise CompilerError(e, command=argument_list,
error_output=text_type(e))
finally:
# Decide what to do with captured stdout.
if stdout:
if stdout_captured:
shutil.move(stdout.name, os.path.join(cwd or os.curdir, stdout_captured))
else:
os.remove(stdout.name) | 0.002931 |
def contains_variables_from_set(expression, variables):
"""Returns True iff the expression contains any of the variables from the given set."""
if hasattr(expression, 'variable_name') and expression.variable_name in variables:
return True
if isinstance(expression, Operation):
return any(contains_variables_from_set(o, variables) for o in op_iter(expression))
return False | 0.009901 |
def export_xml_file(self, directory, filename):
"""
Exports diagram inner graph to BPMN 2.0 XML file (with Diagram Interchange data).
:param directory: strings representing output directory,
:param filename: string representing output file name.
"""
bpmn_export.BpmnDiagramGraphExport.export_xml_file(directory, filename, self) | 0.010638 |
def apply_calibration(self, strain):
"""Apply calibration model
This applies cubic spline calibration to the strain.
Parameters
----------
strain : FrequencySeries
The strain to be recalibrated.
Return
------
strain_adjusted : FrequencySeries
The recalibrated strain.
"""
amplitude_parameters =\
[self.params['amplitude_{}_{}'.format(self.ifo_name, ii)]
for ii in range(self.n_points)]
amplitude_spline = UnivariateSpline(self.spline_points,
amplitude_parameters)
delta_amplitude = amplitude_spline(strain.sample_frequencies.numpy())
phase_parameters =\
[self.params['phase_{}_{}'.format(self.ifo_name, ii)]
for ii in range(self.n_points)]
phase_spline = UnivariateSpline(self.spline_points, phase_parameters)
delta_phase = phase_spline(strain.sample_frequencies.numpy())
strain_adjusted = strain * (1.0 + delta_amplitude)\
* (2.0 + 1j * delta_phase) / (2.0 - 1j * delta_phase)
return strain_adjusted | 0.001706 |
def keyevent_to_keyseq(self, event):
"""Return a QKeySequence representation of the provided QKeyEvent."""
self.keyPressEvent(event)
event.accept()
return self.keySequence() | 0.009569 |
def parse(self, scope):
"""Parse node
args:
scope (Scope): current scope
raises:
SyntaxError
returns:
parsed
"""
if not self.parsed:
self.parsed = ''.join(self.process(self.tokens, scope))
return self.parsed | 0.006431 |
def _parse_validators(valids):
"""Parse a list of validator names or n-tuples, checking for errors.
Returns:
list((func_name, [args...])): A list of validator function names and a
potentially empty list of optional parameters for each function.
"""
outvals = []
for val in valids:
if isinstance(val, str):
args = []
elif len(val) > 1:
args = val[1:]
val = val[0]
else:
raise ValidationError("You must pass either an n-tuple or a string to define a validator", validator=val)
name = "validate_%s" % str(val)
outvals.append((name, args))
return outvals | 0.002911 |
def fir_remez_bsf(f_pass1, f_stop1, f_stop2, f_pass2, d_pass, d_stop,
fs = 1.0, N_bump=5):
"""
Design an FIR bandstop filter using remez with order
determination. The filter order is determined based on
f_pass1 Hz, f_stop1 Hz, f_stop2 Hz, f_pass2 Hz, and the
desired passband ripple d_pass dB and stopband attenuation
d_stop dB all relative to a sampling rate of fs Hz.
Mark Wickert October 2016, updated October 2018
"""
n, ff, aa, wts = bandstop_order(f_pass1, f_stop1, f_stop2, f_pass2,
d_pass, d_stop, fsamp=fs)
# Bump up the order by N_bump to bring down the final d_pass & d_stop
# Initially make sure the number of taps is even so N_bump needs to be odd
if np.mod(n,2) != 0:
n += 1
N_taps = n
N_taps += N_bump
b = signal.remez(N_taps, ff, aa[0::2], wts, Hz=2,
maxiter = 25, grid_density = 16)
print('N_bump must be odd to maintain odd filter length')
print('Remez filter taps = %d.' % N_taps)
return b | 0.010999 |
def forward_transform_fn(bijector):
"""Makes a function which applies a list of Bijectors' `forward`s."""
if not mcmc_util.is_list_like(bijector):
bijector = [bijector]
def fn(transformed_state_parts):
return [b.forward(sp) for b, sp in zip(bijector, transformed_state_parts)]
return fn | 0.016447 |
def kraus_iscomplete(kraus: Kraus) -> bool:
"""Returns True if the collection of (weighted) Kraus operators are
complete. (Which is necessary for a CPTP map to preserve trace)
"""
qubits = kraus.qubits
N = kraus.qubit_nb
ident = Gate(np.eye(2**N), qubits) # FIXME
tensors = [(op.H @ op @ ident).asoperator() for op in kraus.operators]
tensors = [t*w for t, w in zip(tensors, kraus.weights)]
tensor = reduce(np.add, tensors)
res = Gate(tensor, qubits)
return almost_identity(res) | 0.001898 |
def adjacent(labels):
'''Return a binary mask of all pixels which are adjacent to a pixel of
a different label.
'''
high = labels.max()+1
if high > np.iinfo(labels.dtype).max:
labels = labels.astype(np.int)
image_with_high_background = labels.copy()
image_with_high_background[labels == 0] = high
min_label = scind.minimum_filter(image_with_high_background,
footprint=np.ones((3,3),bool),
mode = 'constant',
cval = high)
max_label = scind.maximum_filter(labels,
footprint=np.ones((3,3),bool),
mode = 'constant',
cval = 0)
return (min_label != max_label) & (labels > 0) | 0.017647 |
def convert(self, vroot, entry_variables):
"""
All functions are replaced with the same `new` function.
Args:
vroot (:obj:`Variable`): NNabla Variable
entry_variables (:obj:`Variable`): Entry variable from which the conversion starts.
"""
self.graph_info = GraphInfo(vroot)
self.entry_variables = entry_variables
with nn.parameter_scope(self.name):
# Function loop in the forward order
for t, func in enumerate(self.graph_info.funcs):
if func.name in self.inner_prod_functions:
inner_prod_func = func
o = self._fixed_point_weight_conversion(inner_prod_func)
continue
# Identity conversion
o = self._identity_conversion(func)
self.end_variable = o
if self.call_forward:
o.forward(clear_buffer=True)
return self.end_variable | 0.00308 |
def to_export(export):
"""Serializes export to id string
:param export: object to serialize
:return: string id
"""
from sevenbridges.models.storage_export import Export
if not export:
raise SbgError('Export is required!')
elif isinstance(export, Export):
return export.id
elif isinstance(export, six.string_types):
return export
else:
raise SbgError('Invalid export parameter!') | 0.004016 |
def basic(username, password):
"""Add basic authentication to the requests of the clients."""
none()
_config.username = username
_config.password = password | 0.005814 |
def rl_force_redisplay() -> None: # pragma: no cover
"""
Causes readline to display the prompt and input text wherever the cursor is and start
reading input from this location. This is the proper way to restore the input line after
printing to the screen
"""
if not sys.stdout.isatty():
return
if rl_type == RlType.GNU:
readline_lib.rl_forced_update_display()
# After manually updating the display, readline asks that rl_display_fixed be set to 1 for efficiency
display_fixed = ctypes.c_int.in_dll(readline_lib, "rl_display_fixed")
display_fixed.value = 1
elif rl_type == RlType.PYREADLINE:
# Call _print_prompt() first to set the new location of the prompt
readline.rl.mode._print_prompt()
readline.rl.mode._update_line() | 0.004866 |
def _get_match(self, key):
"""
Gets a MatchObject for the given key.
Args:
key (str): Key of the property to look-up.
Return:
MatchObject: The discovered match.
"""
return self._get_string_match(key=key) or \
self._get_non_string_match(key=key) | 0.006042 |
def set_tag(tag, value):
"""
Set the tag 'tag' to the value True or False.
:param value: should be a boolean
:param tag: should be the id of the tag. Can not starts with '*auto-tag-'
"""
if not tag.startswith("*auto-tag-"):
rdict = load_feedback()
tests = rdict.setdefault("tests", {})
tests[tag] = (value == True)
save_feedback(rdict) | 0.01519 |
def flux_matrix(T, pi, qminus, qplus, netflux=True):
r"""Compute the flux.
Parameters
----------
T : (M, M) scipy.sparse matrix
Transition matrix
pi : (M,) ndarray
Stationary distribution corresponding to T
qminus : (M,) ndarray
Backward comittor
qplus : (M,) ndarray
Forward committor
netflux : boolean
True: net flux matrix will be computed
False: gross flux matrix will be computed
Returns
-------
flux : (M, M) scipy.sparse matrix
Matrix of flux values between pairs of states.
"""
D1 = diags((pi * qminus,), (0,))
D2 = diags((qplus,), (0,))
flux = D1.dot(T.dot(D2))
"""Remove self-fluxes"""
flux = flux - diags(flux.diagonal(), 0)
"""Return net or gross flux"""
if netflux:
return to_netflux(flux)
else:
return flux | 0.001139 |
def propagate_astrometry_and_covariance_matrix(self, a0, c0, t0, t1):
"""
Propagate the covariance matrix of the astrometric parameters and radial proper motion of a
source from epoch t0 to epoch t1.
Code based on the Hipparcos Fortran implementation by Lennart Lindegren.
Parameters
----------
a0 : array_like
6-element vector: (phi, theta, parallax, muphistar, mutheta, vrad) in units of (radians,
radians, mas, mas/yr, mas/yr, km/s). Shape of a should be (6,) or (6,N), with N the number of
sources for which the astrometric parameters are provided.
c0 : array_like
Covariance matrix stored in a 6x6 element array. This can be constructed from the columns
listed in the Gaia catalogue. The units are [mas^2, mas^2/yr, mas^2/yr^2] for the various
elements. Note that the elements in the 6th row and column should be:
c[6,i]=c[i,6]=c[i,3]*vrad/auKmYearPerSec for i=1,..,5 and
c[6,6]=c[3,3]*(vrad^2+vrad_error^2)/auKmYearPerSec^2+(parallax*vrad_error/auKmYearPerSec)^2
Shape of c0 should be (6,6) or (N,6,6).
t0 : float
Reference epoch (Julian years).
t1 : float
New epoch (Julian years).
Returns
-------
Astrometric parameters, including the "radial proper motion" (NOT the radial velocity), and
covariance matrix at the new epoch as a 2D matrix with the new variances on the diagional and the
covariance in the off-diagonal elements.
"""
zero, one, two, three = 0, 1, 2, 3
tau = t1-t0
# Calculate the normal triad [p0 q0 r0] at t0
p0, q0, r0 = normalTriad(a0[0], a0[1])
# Convert to internal units (radians, Julian year)
par0 = a0[2]*self.mastorad
pma0 = a0[3]*self.mastorad
pmd0 = a0[4]*self.mastorad
pmr0 = a0[5]*a0[2]/auKmYearPerSec*self.mastorad
# Proper motion vector
pmvec0 = pma0*p0+pmd0*q0
# Auxiliary quantities
tau2 = tau*tau
pm02 = pma0**2 + pmd0**2
w = one + pmr0*tau
f2 = one/(one + two*pmr0*tau + (pm02+pmr0**2)*tau2)
f = sqrt(f2)
f3 = f2*f
f4 = f2*f2
# Position vector and parallax at t1
u = (r0*w + pmvec0*tau)*f
_, ra, dec = cartesianToSpherical(u[0], u[1], u[2])
par = par0*f
# Proper motion vector and radial proper motion at t1
pmvec = (pmvec0*(one+pmr0*tau) - r0*pmr0**2*tau)*f3
pmr = (pmr0+(pm02 + pmr0**2)*tau)*f2
# Normal triad at t1
p, q, r = normalTriad(ra, dec)
# Convert parameters at t1 to external units (mas, Julian year)
pma = sum(p*pmvec, axis=0)
pmd = sum(q*pmvec, axis =0)
a = zeros_like(a0)
a[0] = ra
a[1] = dec
a[2] = par/self.mastorad
a[3] = pma/self.mastorad
a[4] = pmd/self.mastorad
a[5] = pmr/self.mastorad
# Auxiliary quantities for the partial derivatives
pmz = pmvec0*f - three*pmvec*w
pp0 = sum(p*p0, axis=0)
pq0 = sum(p*q0, axis=0)
pr0 = sum(p*r0, axis=0)
qp0 = sum(q*p0, axis=0)
qq0 = sum(q*q0, axis=0)
qr0 = sum(q*r0, axis=0)
ppmz = sum(p*pmz, axis=0)
qpmz = sum(q*pmz, axis=0)
J = zeros_like(c0)
if (c0.ndim==2):
J = J[newaxis,:,:]
# Partial derivatives
J[:,0,0] = pp0*w*f - pr0*pma0*tau*f
J[:,0,1] = pq0*w*f - pr0*pmd0*tau*f
J[:,0,2] = zero
J[:,0,3] = pp0*tau*f
J[:,0,4] = pq0*tau*f
J[:,0,5] = -pma*tau2
J[:,1,0] = qp0*w*f - qr0*pma0*tau*f
J[:,1,1] = qq0*w*f - qr0*pmd0*tau*f
J[:,1,2] = zero
J[:,1,3] = qp0*tau*f
J[:,1,4] = qq0*tau*f
J[:,1,5] = -pmd*tau2
J[:,2,0] = zero
J[:,2,1] = zero
J[:,2,2] = f
J[:,2,3] = -par*pma0*tau2*f2
J[:,2,4] = -par*pmd0*tau2*f2
J[:,2,5] = -par*w*tau*f2
J[:,3,0] = -pp0*pm02*tau*f3 - pr0*pma0*w*f3
J[:,3,1] = -pq0*pm02*tau*f3 - pr0*pmd0*w*f3
J[:,3,2] = zero
J[:,3,3] = pp0*w*f3 - two*pr0*pma0*tau*f3 - three*pma*pma0*tau2*f2
J[:,3,4] = pq0*w*f3 - two*pr0*pmd0*tau*f3 - three*pma*pmd0*tau2*f2
J[:,3,5] = ppmz*tau*f2
J[:,4,0] = -qp0*pm02*tau*f3 - qr0*pma0*w*f3
J[:,4,1] = -qq0*pm02*tau*f3 - qr0*pmd0*w*f3
J[:,4,2] = zero
J[:,4,3] = qp0*w*f3 - two*qr0*pma0*tau*f3 - three*pmd*pma0*tau2*f2
J[:,4,4] = qq0*w*f3 - two*qr0*pmd0*tau*f3 - three*pmd*pmd0*tau2*f2
J[:,4,5] = qpmz*tau*f2
J[:,5,0] = zero
J[:,5,1] = zero
J[:,5,2] = zero
J[:,5,3] = two*pma0*w*tau*f4
J[:,5,4] = two*pmd0*w*tau*f4
J[:,5,5] = (w**2 - pm02*tau2)*f4
JT = zeros_like(J)
for i in range(J.shape[0]):
JT[i] = J[i].T
if (c0.ndim==2):
c = matmul(J,matmul(c0[newaxis,:,:],JT))
else:
c = matmul(J,matmul(c0,JT))
return a, squeeze(c) | 0.01827 |
def _ParseSourcePathOption(self, options):
"""Parses the source path option.
Args:
options (argparse.Namespace): command line arguments.
Raises:
BadConfigOption: if the options are invalid.
"""
self._source_path = self.ParseStringOption(options, self._SOURCE_OPTION)
if not self._source_path:
raise errors.BadConfigOption('Missing source path.')
self._source_path = os.path.abspath(self._source_path) | 0.004454 |
def _assert_ssl_exc_contains(exc, *msgs):
"""Check whether SSL exception contains either of messages provided."""
if len(msgs) < 1:
raise TypeError(
'_assert_ssl_exc_contains() requires '
'at least one message to be passed.',
)
err_msg_lower = str(exc).lower()
return any(m.lower() in err_msg_lower for m in msgs) | 0.00271 |
def to_dict(self):
"""Get a dictionary representation of this item, formatted for Elasticsearch"""
out = {}
fields = self.__class__.search_objects.mapping.properties.properties
for key in fields:
# TODO: What if we've mapped the property to a different name? Will we allow that?
attribute = getattr(self, key)
field = fields[key]
# I believe this should take the highest priority.
if hasattr(field, "to_es"):
out[key] = field.to_es(attribute)
# First we check it this is a manager, in which case we have many related objects
elif isinstance(attribute, models.Manager):
if issubclass(attribute.model, Indexable):
# TODO: We want this to have some awareness of the relevant field.
out[key] = [obj.to_dict() for obj in attribute.all()]
else:
out[key] = list(attribute.values_list("pk", flat=True))
elif callable(attribute):
out[key] = attribute()
elif isinstance(attribute, Indexable):
out[key] = attribute.to_dict()
else:
out[key] = attribute
if out[key] is None:
del out[key]
return out | 0.004484 |
def get_events(self):
"""Returns a list of all ``KindleEvent``s held in the store
"""
with open(self._path, 'r') as file_:
file_lines = file_.read().splitlines()
event_lines = [line for line in file_lines if line]
events = []
for event_line in event_lines:
for event_cls in (AddEvent, SetReadingEvent, ReadEvent,
SetFinishedEvent):
try:
event = event_cls.from_str(event_line)
except EventParseError:
pass
else:
events.append(event)
return events | 0.002999 |
def get_first(self):
"""Return snmp value for the first OID."""
try: # Nested try..except because of Python 2.4
self.lock.acquire()
try:
return self.get(self.data_idx[0])
except (IndexError, ValueError):
return "NONE"
finally:
self.lock.release() | 0.044118 |
def compute_information_gain(ann_inter, est_inter, est_file, bins):
"""Computes the information gain of the est_file from the annotated
intervals and the estimated intervals."""
ann_times = utils.intervals_to_times(ann_inter)
est_times = utils.intervals_to_times(est_inter)
return mir_eval.beat.information_gain(ann_times, est_times, bins=bins) | 0.002747 |
def open_mfdataset(path_to_lsm_files,
lat_var,
lon_var,
time_var,
lat_dim,
lon_dim,
time_dim,
lon_to_180=False,
coords_projected=False,
loader=None,
engine=None,
autoclose=True):
"""
Wrapper to open land surface model netcdf files
using :func:`xarray.open_mfdataset`.
.. warning:: The time dimension and variable will both be
renamed to 'time' to enable slicing.
Parameters
----------
path_to_lsm_files: :obj:`str`
Path to land surface model files with wildcard.
(Ex. '/path/to/files/*.nc')
lat_var: :obj:`str`
Latitude variable (Ex. lat).
lon_var: :obj:`str`
Longitude variable (Ex. lon).
time_var: :obj:`str`
Time variable (Ex. time).
lat_dim: :obj:`str`
Latitude dimension (Ex. lat).
lon_dim: :obj:`str`
Longitude dimension (Ex. lon).
time_dim: :obj:`str`
Time dimension (ex. time).
lon_to_180: bool, optional, default=False
It True, will convert longitude from [0 to 360]
to [-180 to 180].
coords_projected: bool, optional, default=False
It True, it will assume the coordinates are already
in the projected coordinate system.
loader: str, optional, default=None
If 'hrrr', it will load in the HRRR dataset.
engine: str, optional
See: :func:`xarray.open_mfdataset` documentation.
autoclose: :obj:`str`, optional, default=True
If True, will use autoclose option with
:func:`xarray.open_mfdataset`.
Returns
-------
:func:`xarray.Dataset`
Read with pangaea example::
import pangaea as pa
with pa.open_mfdataset('/path/to/ncfiles/*.nc',
lat_var='lat',
lon_var='lon',
time_var='time',
lat_dim='lat',
lon_dim='lon',
time_dim='time') as xds:
print(xds.lsm.projection)
"""
def define_coords(xds):
"""xarray loader to ensure coordinates are loaded correctly"""
# remove time dimension from lat, lon coordinates
if xds[lat_var].ndim == 3:
xds[lat_var] = xds[lat_var].squeeze(time_dim)
# make sure coords are defined as coords
if lat_var not in xds.coords \
or lon_var not in xds.coords \
or time_var not in xds.coords:
xds.set_coords([lat_var, lon_var, time_var],
inplace=True)
return xds
def extract_hrrr_date(xds):
"""xarray loader for HRRR"""
for var in xds.variables:
if 'initial_time' in xds[var].attrs.keys():
grid_time = pd.to_datetime(xds[var].attrs['initial_time'],
format="%m/%d/%Y (%H:%M)")
if 'forecast_time' in xds[var].attrs.keys():
time_units = 'h'
if 'forecast_time_units' in xds[var].attrs.keys():
time_units = \
str(xds[var].attrs['forecast_time_units'][0])
time_dt = int(xds[var].attrs['forecast_time'][0])
grid_time += np.timedelta64(time_dt, time_units)
return xds.assign(time=grid_time)
return xds
if loader == 'hrrr':
preprocess = extract_hrrr_date
engine = 'pynio' if engine is None else engine
else:
preprocess = define_coords
xds = xr.open_mfdataset(path_to_lsm_files,
autoclose=autoclose,
preprocess=preprocess,
concat_dim=time_dim,
engine=engine,
)
xds.lsm.y_var = lat_var
xds.lsm.x_var = lon_var
xds.lsm.y_dim = lat_dim
xds.lsm.x_dim = lon_dim
xds.lsm.lon_to_180 = lon_to_180
xds.lsm.coords_projected = coords_projected
# make sure time dimensions are same for slicing
xds.rename(
{
time_dim: 'time',
time_var: 'time',
},
inplace=True
)
xds.lsm.to_datetime()
return xds | 0.000226 |
def per_installer_data(self):
"""
Return download data by installer name and version.
:return: dict of cache data; keys are datetime objects, values are
dict of installer name/version (str) to count (int).
:rtype: dict
"""
ret = {}
for cache_date in self.cache_dates:
data = self._cache_get(cache_date)
ret[cache_date] = {}
for inst_name, inst_data in data['by_installer'].items():
for inst_ver, count in inst_data.items():
k = self._compound_column_value(
inst_name,
self._shorten_version(inst_ver)
)
ret[cache_date][k] = count
if len(ret[cache_date]) == 0:
ret[cache_date]['unknown'] = 0
return ret | 0.002317 |
def water(target, temperature='pore.temperature', salinity='pore.salinity'):
r"""
Calculates surface tension of pure water or seawater at atmospheric
pressure using Eq. (28) given by Sharqawy et al. Values at
temperature higher than the normal boiling temperature are calculated at
the saturation pressure.
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
temperature : string
The dictionary key containing the temperature values. Temperature must
be in Kelvin for this emperical equation to work
salinity : string
The dictionary key containing the salinity values. Salinity must be
expressed in g of salt per kg of solution (ppt).
Returns
-------
sigma_sw, the surface tension of seawater in [N/m]
Notes
-----
T must be in K, and S in g of salt per kg of phase, or ppt (parts per
thousand)
VALIDITY: 273 < T < 313 K; 0 < S < 40 g/kg;
ACCURACY: 0.2 %
References
----------
Sharqawy M. H., Lienhard J. H., and Zubair, S. M., Desalination and
Water Treatment, 2010.
"""
T = target[temperature]
if salinity in target.keys():
S = target[salinity]
else:
S = 0
sigma_w = 0.2358*((1-(T/647.096))**1.256)*(1-0.625*(1-(T/647.096)))
a1 = 2.2637334337E-04
a2 = 9.4579521377E-03
a3 = 3.3104954843E-02
TC = T-273.15
sigma_sw = sigma_w*(1+(a1*TC+a2)*sp.log(1+a3*S))
value = sigma_sw
return value | 0.000596 |
def main():
"""
This generates the research document based on the results of
the various programs and includes RST imports for introduction
and summary
"""
print("Generating research notes...")
if os.path.exists(fname):
os.remove(fname)
append_rst('================================================\n')
append_rst('Comparison of Information Aggregation Techniques\n')
append_rst('================================================\n\n')
append_rst('.. contents::\n\n')
# import header
append_rst(open('res_core_data_HEADER.rst', 'r').read())
append_rst(res_core_data_mthd1.get_method())
append_rst(res_core_data_mthd2.get_method())
# call programs
append_rst('Results\n')
append_rst('=====================================\n')
for dat in data_files:
append_rst('\nData File : ' + dat + '\n---------------------------------------\n\n')
res_core_data_mthd1.get_results(fname, dat)
res_core_data_mthd2.get_results(fname, dat)
# import footer
append_rst(open('res_core_data_FOOTER.rst', 'r').read())
print("Done!") | 0.008673 |
def conflicting_deps(tree):
"""Returns dependencies which are not present or conflict with the
requirements of other packages.
e.g. will warn if pkg1 requires pkg2==2.0 and pkg2==1.0 is installed
:param tree: the requirements tree (dict)
:returns: dict of DistPackage -> list of unsatisfied/unknown ReqPackage
:rtype: dict
"""
conflicting = defaultdict(list)
for p, rs in tree.items():
for req in rs:
if req.is_conflicting():
conflicting[p].append(req)
return conflicting | 0.001818 |
def remove_node_by_value(self, value):
"""
Delete all nodes in ``self.node_list`` with the value ``value``.
Args:
value (Any): The value to find and delete owners of.
Returns: None
Example:
>>> from blur.markov.node import Node
>>> node_1 = Node('One')
>>> graph = Graph([node_1])
>>> graph.remove_node_by_value('One')
>>> len(graph.node_list)
0
"""
self.node_list = [node for node in self.node_list
if node.value != value]
# Remove links pointing to the deleted node
for node in self.node_list:
node.link_list = [link for link in node.link_list if
link.target.value != value] | 0.002491 |
def get_fact_cache(self, host):
'''
Get the entire fact cache only if the fact_cache_type is 'jsonfile'
'''
if self.config.fact_cache_type != 'jsonfile':
raise Exception('Unsupported fact cache type. Only "jsonfile" is supported for reading and writing facts from ansible-runner')
fact_cache = os.path.join(self.config.fact_cache, host)
if os.path.exists(fact_cache):
with open(fact_cache) as f:
return json.loads(f.read())
return {} | 0.00566 |
def train_image(self):
"""Return the Docker image to use for training.
The :meth:`~sagemaker.estimator.EstimatorBase.fit` method, which does the model training,
calls this method to find the image to use for model training.
Returns:
str: The URI of the Docker image.
"""
if self.image_name:
return self.image_name
else:
return fw_utils.create_image_uri(self.sagemaker_session.boto_region_name,
self._image_framework(),
self.train_instance_type,
self._image_version(),
py_version=PYTHON_VERSION) | 0.005208 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.