text
stringlengths 78
104k
| score
float64 0
0.18
|
---|---|
def crt_prf_tc(aryNrlTc, varNumVol, varTr, varTmpOvsmpl, switchHrfSet,
tplPngSize, varPar, dctPrm=None, lgcPrint=True):
"""Convolve every neural time course with HRF function.
Parameters
----------
aryNrlTc : 4d numpy array, shape [n_x_pos, n_y_pos, n_sd, n_vol]
Temporally upsampled neural time course models.
varNumVol : float, positive
Number of volumes of the (fMRI) data.
varTr : float, positive
Time to repeat (TR) of the (fMRI) experiment.
varTmpOvsmpl : int, positive
Factor by which the data hs been temporally upsampled.
switchHrfSet : int, (1, 2, 3)
Switch to determine which hrf basis functions are used
tplPngSize : tuple
Pixel dimensions of the visual space (width, height).
varPar : int, positive
Number of cores for multi-processing.
dctPrm : dictionary, default None
Dictionary with customized hrf parameters. If this is None, default
hrf parameters will be used.
lgcPrint: boolean, default True
Should print messages be sent to user?
Returns
-------
aryNrlTcConv : 5d numpy array,
shape [n_x_pos, n_y_pos, n_sd, n_hrf_bases, varNumVol]
Neural time courses convolved with HRF basis functions
"""
# Create hrf time course function:
if switchHrfSet == 3:
lstHrf = [spmt, dspmt, ddspmt]
elif switchHrfSet == 2:
lstHrf = [spmt, dspmt]
elif switchHrfSet == 1:
lstHrf = [spmt]
# If necessary, adjust the input such that input is 2D, with last dim time
tplInpShp = aryNrlTc.shape
aryNrlTc = np.reshape(aryNrlTc, (-1, aryNrlTc.shape[-1]))
if varPar == 1:
# if the number of cores requested by the user is equal to 1,
# we save the overhead of multiprocessing by calling aryMdlCndRsp
# directly
aryNrlTcConv = cnvl_tc(0, aryNrlTc, lstHrf, varTr,
varNumVol, varTmpOvsmpl, None, dctPrm=dctPrm)
else:
# Put input data into chunks:
lstNrlTc = np.array_split(aryNrlTc, varPar)
# Create a queue to put the results in:
queOut = mp.Queue()
# Empty list for processes:
lstPrcs = [None] * varPar
# Empty list for results of parallel processes:
lstConv = [None] * varPar
if lgcPrint:
print('------------Running parallel processes')
# Create processes:
for idxPrc in range(0, varPar):
lstPrcs[idxPrc] = mp.Process(target=cnvl_tc,
args=(idxPrc,
lstNrlTc[idxPrc],
lstHrf,
varTr,
varNumVol,
varTmpOvsmpl,
queOut),
kwargs={'dctPrm': dctPrm},
)
# Daemon (kills processes when exiting):
lstPrcs[idxPrc].Daemon = True
# Start processes:
for idxPrc in range(0, varPar):
lstPrcs[idxPrc].start()
# Collect results from queue:
for idxPrc in range(0, varPar):
lstConv[idxPrc] = queOut.get(True)
# Join processes:
for idxPrc in range(0, varPar):
lstPrcs[idxPrc].join()
if lgcPrint:
print('------------Collecting results from parallel processes')
# Put output into correct order:
lstConv = sorted(lstConv)
# Concatenate convolved pixel time courses (into the same order
aryNrlTcConv = np.zeros((0, switchHrfSet, varNumVol), dtype=np.float32)
for idxRes in range(0, varPar):
aryNrlTcConv = np.concatenate((aryNrlTcConv, lstConv[idxRes][1]),
axis=0)
# clean up
del(aryNrlTc)
del(lstConv)
# Reshape results:
tplOutShp = tplInpShp[:-1] + (len(lstHrf), ) + (varNumVol, )
# Return:
return np.reshape(aryNrlTcConv, tplOutShp).astype(np.float32) | 0.000237 |
def laminar_entry_thermal_Hausen(Re=None, Pr=None, L=None, Di=None):
r'''Calculates average internal convection Nusselt number for laminar flows
in pipe during the thermal entry region according to [1]_ as shown in
[2]_ and cited by [3]_.
.. math::
Nu_D=3.66+\frac{0.0668\frac{D}{L}Re_{D}Pr}{1+0.04{(\frac{D}{L}
Re_{D}Pr)}^{2/3}}
Parameters
----------
Re : float
Reynolds number, [-]
Pr : float
Prandtl number, [-]
L : float
Length of pipe [m]
Di : float
Diameter of pipe [m]
Returns
-------
Nu : float
Nusselt number, [-]
Notes
-----
If Pr >> 1, (5 is a common requirement) this equation also applies to flows
with developing velocity profile.
As L gets larger, this equation becomes the constant-temperature Nusselt
number.
Examples
--------
>>> laminar_entry_thermal_Hausen(Re=100000, Pr=1.1, L=5, Di=.5)
39.01352358988535
References
----------
.. [1] Hausen, H. Darstellung des Warmeuberganges in Rohren durch
verallgeminerte Potenzbeziehungen, Z. Ver deutsch. Ing Beih.
Verfahrenstech., 4, 91-98, 1943
.. [2] W. M. Kays. 1953. Numerical Solutions for Laminar Flow Heat Transfer
in Circular Tubes.
.. [3] Bergman, Theodore L., Adrienne S. Lavine, Frank P. Incropera, and
David P. DeWitt. Introduction to Heat Transfer. 6E.
Hoboken, NJ: Wiley, 2011.
'''
Gz = Di/L*Re*Pr
return 3.66 + (0.0668*Gz)/(1+0.04*(Gz)**(2/3.)) | 0.000649 |
def add_insertions(self, skip=10, window=1, test=False):
'''Adds a random base within window bases around every skip bases. e.g. skip=10, window=1 means a random base added somwhere in theintervals [9,11], [19,21] ... '''
assert 2 * window < skip
new_seq = list(self.seq)
for i in range(len(self) - skip, 0, -skip):
pos = random.randrange(i - window, i + window + 1)
base = random.choice(['A', 'C', 'G', 'T'])
if test:
base = 'N'
new_seq.insert(pos, base)
self.seq = ''.join(new_seq) | 0.005102 |
def set_sampling_strategies(self, filter, strategy_and_params):
"""Set sampling strategy for the sensors of all the group's clients.
Only sensors that match the specified filter are considered. See the
`KATCPResource.set_sampling_strategies` docstring for parameter
definitions and more info.
Returns
-------
sensors_strategies : tornado Future
Resolves with a dict with client names as keys and with the value as
another dict. The value dict is similar to the return value
described in the `KATCPResource.set_sampling_strategies` docstring.
"""
futures_dict = {}
for res_obj in self.clients:
futures_dict[res_obj.name] = res_obj.set_sampling_strategies(
filter, strategy_and_params)
sensors_strategies = yield futures_dict
raise tornado.gen.Return(sensors_strategies) | 0.002157 |
def make_masks(self, template):
"""This method generates all seven masks so that the best mask can
be determined. The template parameter is a code matrix that will
server as the base for all the generated masks.
"""
from copy import deepcopy
nmasks = len(tables.mask_patterns)
masks = [''] * nmasks
count = 0
for n in range(nmasks):
cur_mask = deepcopy(template)
masks[n] = cur_mask
#Add the type pattern bits to the code
self.add_type_pattern(cur_mask, tables.type_bits[self.error][n])
#Get the mask pattern
pattern = tables.mask_patterns[n]
#This will read the 1's and 0's one at a time
bits = iter(self.buffer.getvalue())
#These will help us do the up, down, up, down pattern
row_start = itertools.cycle([len(cur_mask)-1, 0])
row_stop = itertools.cycle([-1,len(cur_mask)])
direction = itertools.cycle([-1, 1])
#The data pattern is added using pairs of columns
for column in range(len(cur_mask)-1, 0, -2):
#The vertical timing pattern is an exception to the rules,
#move the column counter over by one
if column <= 6:
column = column - 1
#This will let us fill in the pattern
#right-left, right-left, etc.
column_pair = itertools.cycle([column, column-1])
#Go through each row in the pattern moving up, then down
for row in range(next(row_start), next(row_stop),
next(direction)):
#Fill in the right then left column
for i in range(2):
col = next(column_pair)
#Go to the next column if we encounter a
#preexisting pattern (usually an alignment pattern)
if cur_mask[row][col] != ' ':
continue
#Some versions don't have enough bits. You then fill
#in the rest of the pattern with 0's. These are
#called "remainder bits."
try:
bit = int(next(bits))
except:
bit = 0
#If the pattern is True then flip the bit
if pattern(row, col):
cur_mask[row][col] = bit ^ 1
else:
cur_mask[row][col] = bit
#DEBUG CODE!!!
#Save all of the masks as png files
#for i, m in enumerate(masks):
# _png(m, self.version, 'mask-{0}.png'.format(i), 5)
return masks | 0.008669 |
def _fix_component_id(self, component):
'Fix name of component ad all of its children'
theID = getattr(component, "id", None)
if theID is not None:
setattr(component, "id", self._fix_id(theID))
try:
for c in component.children:
self._fix_component_id(c)
except: #pylint: disable=bare-except
pass | 0.012887 |
def angle(vec1, vec2):
"""Calculate the angle between two Vector2's"""
dotp = Vector2.dot(vec1, vec2)
mag1 = vec1.length()
mag2 = vec2.length()
result = dotp / (mag1 * mag2)
return math.acos(result) | 0.00813 |
def check_config_file(msg):
"""
Checks the config.json file for default settings and auth values.
Args:
:msg: (Message class) an instance of a message class.
"""
with jsonconfig.Config("messages", indent=4) as cfg:
verify_profile_name(msg, cfg)
retrieve_data_from_config(msg, cfg)
if msg._auth is None:
retrieve_pwd_from_config(msg, cfg)
if msg.save:
update_config_data(msg, cfg)
update_config_pwd(msg, cfg) | 0.001972 |
def load_variant(self, variant_obj):
"""Load a variant object
Args:
variant_obj(dict)
Returns:
inserted_id
"""
# LOG.debug("Loading variant %s", variant_obj['_id'])
try:
result = self.variant_collection.insert_one(variant_obj)
except DuplicateKeyError as err:
raise IntegrityError("Variant %s already exists in database", variant_obj['_id'])
return result | 0.006397 |
def save(self, *args, **kwargs):
"""save(filething=None, padding=None)"""
super(MP4, self).save(*args, **kwargs) | 0.015504 |
def parse(self, line, cell, namespace=None):
"""Parses a line and cell into a dictionary of arguments, expanding variables from a namespace.
For each line parameters beginning with --, it also checks the cell content and see if it exists
there. For example, if "--config1" is a line parameter, it checks to see if cell dict contains
"config1" item, and if so, use the cell value. The "config1" item will also be removed from
cell content.
Args:
line: line content.
cell: cell content.
namespace: user namespace. If None, IPython's user namespace is used.
Returns:
A tuple of: 1. parsed config dict. 2. remaining cell after line parameters are extracted.
"""
if namespace is None:
ipy = IPython.get_ipython()
namespace = ipy.user_ns
# Find which subcommand in the line by comparing line with subcommand progs.
# For example, assuming there are 3 subcommands with their progs
# %bq tables
# %bq tables list
# %bq datasets
# and the line is "tables list --dataset proj.myds"
# it will find the second one --- "tables list" because it matches the prog and
# it is the longest.
args = CommandParser.create_args(line, namespace)
# "prog" is a ArgumentParser's path splitted by namspace, such as '%bq tables list'.
sub_parsers_progs = [x.prog for x in self._get_subparsers()]
matched_progs = []
for prog in sub_parsers_progs:
# Remove the leading magic such as "%bq".
match = prog.split()[1:]
for i in range(len(args)):
if args[i:i + len(match)] == match:
matched_progs.append(prog)
break
matched_prog = None
if matched_progs:
# Get the longest match.
matched_prog = max(matched_progs, key=lambda x: len(x.split()))
# Line args can be provided in cell too. If they are in cell, move them to line
# so we can parse them all together.
line_args = self._get_subparser_line_args(matched_prog)
if line_args:
cell_config = None
try:
cell_config, cell = google.datalab.utils.commands.parse_config_for_selected_keys(
cell, line_args)
except:
# It is okay --- probably because cell is not in yaml or json format.
pass
if cell_config:
google.datalab.utils.commands.replace_vars(cell_config, namespace)
for arg_name in cell_config:
arg_value = cell_config[arg_name]
if arg_value is None:
continue
if '--' + arg_name in args:
raise ValueError('config item "%s" is specified in both cell and line.' % arg_name)
if isinstance(arg_value, bool):
if arg_value:
line += ' --%s' % arg_name
else:
line += ' --%s %s' % (arg_name, str(cell_config[arg_name]))
# Parse args again with the new line.
args = CommandParser.create_args(line, namespace)
args = vars(self.parse_args(args))
# Parse cell args.
cell_config = None
cell_args = self._get_subparser_cell_args(matched_prog)
if cell_args:
try:
cell_config, _ = google.datalab.utils.commands.parse_config_for_selected_keys(
cell, cell_args)
except:
# It is okay --- probably because cell is not in yaml or json format.
pass
if cell_config:
google.datalab.utils.commands.replace_vars(cell_config, namespace)
for arg in cell_args:
if (cell_args[arg]['required'] and
(cell_config is None or cell_config.get(arg, None) is None)):
raise ValueError('Cell config "%s" is required.' % arg)
if cell_config:
args.update(cell_config)
return args, cell | 0.010764 |
def minimum_distance2(hull_a, center_a, hull_b, center_b):
'''Return the minimum distance or 0 if overlap between 2 convex hulls
hull_a - list of points in clockwise direction
center_a - a point within the hull
hull_b - list of points in clockwise direction
center_b - a point within the hull
'''
if hull_a.shape[0] < 3 or hull_b.shape[0] < 3:
return slow_minimum_distance2(hull_a, hull_b)
else:
return faster_minimum_distance2(hull_a, center_a, hull_b, center_b) | 0.003876 |
def get_sketch(self, sketch_id):
"""Get information on the specified sketch.
Args:
sketch_id (int): ID of sketch
Returns:
dict: Dictionary of sketch information
Raises:
ValueError: Sketch is inaccessible
"""
resource_url = '{0:s}/sketches/{1:d}/'.format(self.api_base_url, sketch_id)
response = self.session.get(resource_url)
response_dict = response.json()
try:
response_dict['objects']
except KeyError:
raise ValueError('Sketch does not exist or you have no access')
return response_dict | 0.0053 |
def visit(spht, node):
"""Append opening tags to document body list.
:param sphinx.writers.html.SmartyPantsHTMLTranslator spht: Object to modify.
:param sphinxcontrib.imgur.nodes.ImgurEmbedNode node: This class' instance.
"""
html_attrs_bq = {'CLASS': 'imgur-embed-pub', 'lang': spht.settings.language_code, 'data-id': node.imgur_id}
if node.hide_post_details:
html_attrs_bq['data-context'] = 'false'
spht.body.append(spht.starttag(node, 'blockquote', '', **html_attrs_bq))
html_attrs_ah = dict(href='https://imgur.com/{}'.format(node.imgur_id), CLASS='reference external')
spht.body.append(spht.starttag(node, 'a', 'Loading...', **html_attrs_ah)) | 0.010929 |
def cleanup_candidates(self, node_ip):
"""
Removes old TCP hole punching candidates for a
designated node if a certain amount of time has passed
since they last connected.
"""
if node_ip in self.factory.candidates:
old_candidates = []
for candidate in self.factory.candidates[node_ip]:
elapsed = int(time.time() - candidate["time"])
if elapsed > self.challege_timeout:
old_candidates.append(candidate)
for candidate in old_candidates:
self.factory.candidates[node_ip].remove(candidate) | 0.003067 |
def compile_tag_re(self, tags):
"""
Return the regex used to look for Mustache tags compiled to work with
specific opening tags, close tags, and tag types.
"""
return re.compile(self.raw_tag_re % tags, self.re_flags) | 0.007813 |
def geometricBar(weights, alldistribT):
"""return the weighted geometric mean of distributions"""
assert(len(weights) == alldistribT.shape[1])
return np.exp(np.dot(np.log(alldistribT), weights.T)) | 0.004808 |
def set_table_cb(self, viewer, table):
"""Display the given table object."""
self.clear()
tree_dict = OrderedDict()
# Extract data as astropy table
a_tab = table.get_data()
# Fill masked values, if applicable
try:
a_tab = a_tab.filled()
except Exception: # Just use original table
pass
# This is to get around table widget not sorting numbers properly
i_fmt = '{{0:0{0}d}}'.format(len(str(len(a_tab))))
# Table header with units
columns = [('Row', '_DISPLAY_ROW')]
for c in a_tab.columns.values():
col_str = '{0:^s}\n{1:^s}'.format(c.name, str(c.unit))
columns.append((col_str, c.name))
self.widget.setup_table(columns, 1, '_DISPLAY_ROW')
# Table contents
for i, row in enumerate(a_tab, 1):
bnch = Bunch.Bunch(zip(row.colnames, row.as_void()))
i_str = i_fmt.format(i)
bnch['_DISPLAY_ROW'] = i_str
tree_dict[i_str] = bnch
self.widget.set_tree(tree_dict)
# Resize column widths
n_rows = len(tree_dict)
if n_rows < self.settings.get('max_rows_for_col_resize', 5000):
self.widget.set_optimal_column_widths()
self.logger.debug('Resized columns for {0} row(s)'.format(n_rows))
tablename = table.get('name', 'NoName')
self.logger.debug('Displayed {0}'.format(tablename)) | 0.001361 |
def closestsites(struct_blk, struct_def, pos):
"""
Returns closest site to the input position
for both bulk and defect structures
Args:
struct_blk: Bulk structure
struct_def: Defect structure
pos: Position
Return: (site object, dist, index)
"""
blk_close_sites = struct_blk.get_sites_in_sphere(pos, 5, include_index=True)
blk_close_sites.sort(key=lambda x: x[1])
def_close_sites = struct_def.get_sites_in_sphere(pos, 5, include_index=True)
def_close_sites.sort(key=lambda x: x[1])
return blk_close_sites[0], def_close_sites[0] | 0.005042 |
def commandify(use_argcomplete=False, exit=True, *args, **kwargs):
'''Turns decorated functions into command line args
Finds the main_command and all commands and generates command line args
from these.'''
parser = CommandifyArgumentParser(*args, **kwargs)
parser.setup_arguments()
if use_argcomplete:
try:
import argcomplete
except ImportError:
print('argcomplete not installed, please install it.')
parser.exit(status=2)
# Must happen between setup_arguments() and parse_args().
argcomplete.autocomplete(parser)
args = parser.parse_args()
if exit:
parser.dispatch_commands()
parser.exit(0)
else:
return parser.dispatch_commands() | 0.001316 |
def polemap_magic(loc_file="locations.txt", dir_path=".", interactive=False, crd="",
sym='ro', symsize=40, rsym='g^', rsymsize=40,
fmt="pdf", res="c", proj="ortho",
flip=False, anti=False, fancy=False,
ell=False, ages=False, lat_0=90., lon_0=0., save_plots=True):
"""
Use a MagIC format locations table to plot poles.
Parameters
----------
loc_file : str, default "locations.txt"
dir_path : str, default "."
directory name to find loc_file in (if not included in loc_file)
interactive : bool, default False
if True, interactively plot and display
(this is best used on the command line only)
crd : str, default ""
coordinate system [g, t] (geographic, tilt_corrected)
sym : str, default "ro"
symbol color and shape, default red circles
(see matplotlib documentation for more options)
symsize : int, default 40
symbol size
rsym : str, default "g^"
symbol for plotting reverse poles
rsymsize : int, default 40
symbol size for reverse poles
fmt : str, default "pdf"
format for figures, ["svg", "jpg", "pdf", "png"]
res : str, default "c"
resolution [c, l, i, h] (crude, low, intermediate, high)
proj : str, default "ortho"
ortho = orthographic
lcc = lambert conformal
moll = molweide
merc = mercator
flip : bool, default False
if True, flip reverse poles to normal antipode
anti : bool, default False
if True, plot antipodes for each pole
fancy : bool, default False
if True, plot topography (not yet implementedj)
ell : bool, default False
if True, plot ellipses
ages : bool, default False
if True, plot ages
lat_0 : float, default 90.
eyeball latitude
lon_0 : float, default 0.
eyeball longitude
save_plots : bool, default True
if True, create and save all requested plots
"""
# initialize and format variables
saved = []
lats, lons = [], []
Pars = []
dates, rlats, rlons = [], [], []
polarities = []
if interactive:
save_plots = False
full_path = pmag.resolve_file_name(loc_file, dir_path)
dir_path, loc_file = os.path.split(full_path)
# create MagIC contribution
con = cb.Contribution(dir_path, single_file=loc_file)
if not list(con.tables.keys()):
print("-W - Couldn't read in data")
return False, "Couldn't read in data"
FIG = {'map': 1}
pmagplotlib.plot_init(FIG['map'], 6, 6)
pole_container = con.tables['locations']
pole_df = pole_container.df
if 'pole_lat' not in pole_df.columns or 'pole_lon' not in pole_df.columns:
print("-W- pole_lat and pole_lon are required columns to run polemap_magic.py")
return False, "pole_lat and pole_lon are required columns to run polemap_magic.py"
# use records with pole_lat and pole_lon
cond1, cond2 = pole_df['pole_lat'].notnull(), pole_df['pole_lon'].notnull()
Results = pole_df[cond1 & cond2]
# don't plot identical poles twice
Results.drop_duplicates(subset=['pole_lat', 'pole_lon', 'location'], inplace=True)
# use tilt correction if available
# prioritize tilt-corrected poles
if 'dir_tilt_correction' in Results.columns:
if not crd:
coords = Results['dir_tilt_correction'].unique()
if 100. in coords:
crd = 't'
elif 0. in coords:
crd = 'g'
else:
crd = ''
coord_dict = {'g': 0, 't': 100}
coord = coord_dict[crd] if crd else ""
# filter results by dir_tilt_correction if available
if (coord or coord == 0) and 'dir_tilt_correction' in Results.columns:
Results = Results[Results['dir_tilt_correction'] == coord]
# get location name and average ages
loc_list = Results['location'].values
locations = ":".join(Results['location'].unique())
if 'age' not in Results.columns and 'age_low' in Results.columns and 'age_high' in Results.columns:
Results['age'] = Results['age_low']+0.5 * \
(Results['age_high']-Results['age_low'])
if 'age' in Results.columns and ages:
dates = Results['age'].unique()
if not any(Results.index):
print("-W- No poles could be plotted")
return False, "No poles could be plotted"
# go through rows and extract data
for ind, row in Results.iterrows():
lat, lon = float(row['pole_lat']), float(row['pole_lon'])
if 'dir_polarity' in row:
polarities.append(row['dir_polarity'])
if anti:
lats.append(-lat)
lon = lon + 180.
if lon > 360:
lon = lon - 360.
lons.append(lon)
elif not flip:
lats.append(lat)
lons.append(lon)
elif flip:
if lat < 0:
rlats.append(-lat)
lon = lon + 180.
if lon > 360:
lon = lon - 360
rlons.append(lon)
else:
lats.append(lat)
lons.append(lon)
ppars = []
ppars.append(lon)
ppars.append(lat)
ell1, ell2 = "", ""
if 'pole_dm' in list(row.keys()) and row['pole_dm']:
ell1 = float(row['pole_dm'])
if 'pole_dp' in list(row.keys()) and row['pole_dp']:
ell2 = float(row['pole_dp'])
if 'pole_alpha95' in list(row.keys()) and row['pole_alpha95']:
ell1, ell2 = float(row['pole_alpha95']), float(row['pole_alpha95'])
if ell1 and ell2 and lons:
ppars = []
ppars.append(lons[-1])
ppars.append(lats[-1])
ppars.append(ell1)
ppars.append(lons[-1])
try:
isign = abs(lats[-1]) / lats[-1]
except ZeroDivisionError:
isign = 1
ppars.append(lats[-1] - isign * 90.)
ppars.append(ell2)
ppars.append(lons[-1] + 90.)
ppars.append(0.)
Pars.append(ppars)
locations = locations.strip(':')
Opts = {'latmin': -90, 'latmax': 90, 'lonmin': 0., 'lonmax': 360.,
'lat_0': lat_0, 'lon_0': lon_0, 'proj': proj, 'sym': 'b+',
'symsize': 40, 'pltgrid': 0, 'res': res, 'boundinglat': 0.,
'edgecolor': 'face'}
Opts['details'] = {'coasts': 1, 'rivers': 0, 'states': 0,
'countries': 0, 'ocean': 1, 'fancy': fancy}
base_Opts = Opts.copy()
# make the base map with a blue triangle at the pole
pmagplotlib.plot_map(FIG['map'], [90.], [0.], Opts)
#Opts['pltgrid'] = -1
if proj=='merc':Opts['pltgrid']=1
Opts['sym'] = sym
Opts['symsize'] = symsize
if len(dates) > 0:
Opts['names'] = dates
if len(lats) > 0:
pole_lats = []
pole_lons = []
for num, lat in enumerate(lats):
lon = lons[num]
if lat > 0:
pole_lats.append(lat)
pole_lons.append(lon)
# plot the lats and lons of the poles
pmagplotlib.plot_map(FIG['map'], pole_lats, pole_lons, Opts)
# do reverse poles
if len(rlats) > 0:
reverse_Opts = Opts.copy()
reverse_Opts['sym'] = rsym
reverse_Opts['symsize'] = rsymsize
reverse_Opts['edgecolor'] = 'black'
# plot the lats and lons of the reverse poles
pmagplotlib.plot_map(FIG['map'], rlats, rlons, reverse_Opts)
Opts['names'] = []
titles = {}
files = {}
if pmagplotlib.isServer:
# plot each indvidual pole for the server
for ind in range(len(lats)):
lat = lats[ind]
lon = lons[ind]
polarity = ""
if 'polarites' in locals():
polarity = polarities[ind]
polarity = "_" + polarity if polarity else ""
location = loc_list[ind]
FIG["map_{}".format(ind)] = ind+2
pmagplotlib.plot_init(FIG['map_{}'.format(ind)], 6, 6)
pmagplotlib.plot_map(FIG['map_{}'.format(ind)], [90.], [0.], base_Opts)
pmagplotlib.plot_map(ind+2, [lat], [lon], Opts)
titles["map_{}".format(ind)] = location
if crd:
fname = "LO:_{}{}_TY:_POLE_map_{}.{}".format(location, polarity, crd, fmt)
fname_short = "LO:_{}{}_TY:_POLE_map_{}".format(location, polarity, crd)
else:
fname = "LO:_{}{}_TY:_POLE_map.{}".format(location, polarity, fmt)
fname_short = "LO:_{}{}_TY:_POLE_map".format(location, polarity)
# don't allow identically named files
if files:
file_values = files.values()
file_values_short = [fname.rsplit('.')[0] for fname in file_values]
if fname_short in file_values_short:
for val in [str(n) for n in range(1, 10)]:
fname = fname_short + "_{}.".format(val) + fmt
if fname not in file_values:
break
files["map_{}".format(ind)] = fname
# truncate location names so that ultra long filenames are not created
if len(locations) > 50:
locations = locations[:50]
if pmagplotlib.isServer:
# use server plot naming convention
con_id = ''
if 'contribution' in con.tables:
# try to get contribution id
if 'id' in con.tables['contribution'].df.columns:
con_id = con.tables['contribution'].df.iloc[0]['id']
files['map'] = 'MC:_{}_TY:_POLE_map_{}.{}'.format(con_id, crd, fmt)
else:
# no contribution id available
files['map'] = 'LO:_' + locations + '_TY:_POLE_map_{}.{}'.format(crd, fmt)
else:
# use readable naming convention for non-database use
files['map'] = '{}_POLE_map_{}.{}'.format(locations, crd, fmt)
#
if interactive and (not set_env.IS_WIN):
pmagplotlib.draw_figs(FIG)
if ell: # add ellipses if desired.
Opts['details'] = {'coasts': 0, 'rivers': 0, 'states': 0,
'countries': 0, 'ocean': 0, 'fancy': fancy}
Opts['pltgrid'] = -1 # turn off meridian replotting
Opts['symsize'] = 2
Opts['sym'] = 'g-'
for ppars in Pars:
if ppars[2] != 0:
PTS = pmagplotlib.plot_ell(FIG['map'], ppars, 'g.', 0, 0)
elats, elons = [], []
for pt in PTS:
elons.append(pt[0])
elats.append(pt[1])
# make the base map with a blue triangle at the pole
pmagplotlib.plot_map(FIG['map'], elats, elons, Opts)
if interactive and (not set_env.IS_WIN):
pmagplotlib.draw_figs(FIG)
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles['map'] = 'LO:_' + locations + '_POLE_map'
con_id = ''
if 'contribution' in con.tables:
if 'id' in con.tables['contribution'].df.columns:
con_id = con.tables['contribution'].df.iloc[0]['id']
loc_string = ""
if 'locations' in con.tables:
num_locs = len(con.tables['locations'].df.index.unique())
loc_string = "{} location{}".format(num_locs, 's' if num_locs > 1 else '')
num_lats = len([lat for lat in lats if lat > 0])
num_rlats = len(rlats)
npole_string = ""
rpole_string = ""
if num_lats:
npole_string = "{} normal ".format(num_lats) #, 's' if num_lats > 1 else '')
if num_rlats:
rpole_string = "{} reverse".format(num_rlats)
if num_lats + num_rlats > 1:
pole_string = "poles"
elif num_lats + num_rlats == 0:
pole_string = ""
else:
pole_string = "pole"
title = "MagIC contribution {}\n {} {}{} {}".format(con_id, loc_string, npole_string, rpole_string, pole_string)
titles['map'] = title.replace(' ', ' ')
FIG = pmagplotlib.add_borders(FIG, titles, black, purple, con_id)
saved = pmagplotlib.save_plots(FIG, files)
elif interactive:
pmagplotlib.draw_figs(FIG)
ans = input(" S[a]ve to save plot, Return to quit: ")
if ans == "a":
saved = pmagplotlib.save_plots(FIG, files)
else:
print("Good bye")
elif save_plots:
saved = pmagplotlib.save_plots(FIG, files)
return True, saved | 0.001879 |
def collect_trajectories(env,
policy_fun,
num_trajectories=1,
policy="greedy",
max_timestep=None,
epsilon=0.1):
"""Collect trajectories with the given policy net and behaviour.
Args:
env: A gym env interface, for now this is not-batched.
policy_fun: observations(B,T+1) -> log-probabs(B,T+1, A) callable.
num_trajectories: int, number of trajectories.
policy: string, "greedy", "epsilon-greedy", or "categorical-sampling" i.e.
how to use the policy_fun to return an action.
max_timestep: int or None, the index of the maximum time-step at which we
return the trajectory, None for ending a trajectory only when env
returns done.
epsilon: float, the epsilon for `epsilon-greedy` policy.
Returns:
trajectory: list of (observation, action, reward) tuples, where each element
`i` is a tuple of numpy arrays with shapes as follows:
observation[i] = (B, T_i + 1)
action[i] = (B, T_i)
reward[i] = (B, T_i)
"""
trajectories = []
for t in range(num_trajectories):
t_start = time.time()
rewards = []
actions = []
done = False
observation = env.reset()
# This is currently shaped (1, 1) + OBS, but new observations will keep
# getting added to it, making it eventually (1, T+1) + OBS
observation_history = observation[np.newaxis, np.newaxis, :]
# Run either till we're done OR if max_timestep is defined only till that
# timestep.
ts = 0
while ((not done) and
(not max_timestep or observation_history.shape[1] < max_timestep)):
ts_start = time.time()
# Run the policy, to pick an action, shape is (1, t, A) because
# observation_history is shaped (1, t) + OBS
predictions = policy_fun(observation_history)
# We need the predictions for the last time-step, so squeeze the batch
# dimension and take the last time-step.
predictions = np.squeeze(predictions, axis=0)[-1]
# Policy can be run in one of the following ways:
# - Greedy
# - Epsilon-Greedy
# - Categorical-Sampling
action = None
if policy == "greedy":
action = np.argmax(predictions)
elif policy == "epsilon-greedy":
# A schedule for epsilon is 1/k where k is the episode number sampled.
if onp.random.random() < epsilon:
# Choose an action at random.
action = onp.random.randint(0, high=len(predictions))
else:
# Return the best action.
action = np.argmax(predictions)
elif policy == "categorical-sampling":
# NOTE: The predictions aren't probabilities but log-probabilities
# instead, since they were computed with LogSoftmax.
# So just np.exp them to make them probabilities.
predictions = np.exp(predictions)
action = onp.argwhere(onp.random.multinomial(1, predictions) == 1)
else:
raise ValueError("Unknown policy: %s" % policy)
# NOTE: Assumption, single batch.
try:
action = int(action)
except TypeError as err:
# Let's dump some information before we die off.
logging.error("Cannot convert action into an integer: [%s]", err)
logging.error("action.shape: [%s]", action.shape)
logging.error("action: [%s]", action)
logging.error("predictions.shape: [%s]", predictions.shape)
logging.error("predictions: [%s]", predictions)
logging.error("observation_history: [%s]", observation_history)
raise err
observation, reward, done, _ = env.step(action)
# observation is of shape OBS, so add extra dims and concatenate on the
# time dimension.
observation_history = np.concatenate(
[observation_history, observation[np.newaxis, np.newaxis, :]], axis=1)
rewards.append(reward)
actions.append(action)
ts += 1
logging.vlog(
2, " Collected time-step[ %5d] of trajectory[ %5d] in [%0.2f] msec.",
ts, t, get_time(ts_start))
logging.vlog(
2, " Collected trajectory[ %5d] in [%0.2f] msec.", t, get_time(t_start))
# This means we are done we're been terminated early.
assert done or (
max_timestep and max_timestep >= observation_history.shape[1])
# observation_history is (1, T+1) + OBS, lets squeeze out the batch dim.
observation_history = np.squeeze(observation_history, axis=0)
trajectories.append(
(observation_history, np.stack(actions), np.stack(rewards)))
return trajectories | 0.008628 |
def mme_matches(case_obj, institute_obj, mme_base_url, mme_token):
"""Show Matchmaker submission data for a sample and eventual matches.
Args:
case_obj(dict): a scout case object
institute_obj(dict): an institute object
mme_base_url(str) base url of the MME server
mme_token(str) auth token of the MME server
Returns:
data(dict): data to display in the html template
"""
data = {
'institute' : institute_obj,
'case' : case_obj,
'server_errors' : []
}
matches = {}
# loop over the submitted samples and get matches from the MatchMaker server
if not case_obj.get('mme_submission'):
return None
for patient in case_obj['mme_submission']['patients']:
patient_id = patient['id']
matches[patient_id] = None
url = ''.join([ mme_base_url, '/matches/', patient_id])
server_resp = matchmaker_request(url=url, token=mme_token, method='GET')
if 'status_code' in server_resp: # the server returned a valid response
# and this will be a list of match objects sorted by desc date
pat_matches = []
if server_resp.get('matches'):
pat_matches = parse_matches(patient_id, server_resp['matches'])
matches[patient_id] = pat_matches
else:
LOG.warning('Server returned error message: {}'.format(server_resp['message']))
data['server_errors'].append(server_resp['message'])
data['matches'] = matches
return data | 0.005814 |
def _in6_getifaddr(ifname):
"""
Returns a list of IPv6 addresses configured on the interface ifname.
"""
# Get the output of ifconfig
try:
f = os.popen("%s %s" % (conf.prog.ifconfig, ifname))
except OSError:
log_interactive.warning("Failed to execute ifconfig.")
return []
# Iterate over lines and extract IPv6 addresses
ret = []
for line in f:
if "inet6" in line:
addr = line.rstrip().split(None, 2)[1] # The second element is the IPv6 address # noqa: E501
else:
continue
if '%' in line: # Remove the interface identifier if present
addr = addr.split("%", 1)[0]
# Check if it is a valid IPv6 address
try:
inet_pton(socket.AF_INET6, addr)
except (socket.error, ValueError):
continue
# Get the scope and keep the address
scope = in6_getscope(addr)
ret.append((addr, scope, ifname))
return ret | 0.001005 |
def get_events(self, from_=None, to=None):
"""Query a slice of the events.
Events are always returned in the order the were added.
Parameters:
from_ -- if not None, return only events added after the event with
id `from_`. If None, return from the start of history.
to -- if not None, return only events added before, and
including, the event with event id `to`. If None, return up
to, and including, the last added event.
returns -- an iterable of (event id, eventdata) tuples.
"""
if from_ and (from_ not in self.keys or from_ not in self.events):
raise EventStore.EventKeyDoesNotExistError(
"Could not find the from_ key: {0}".format(from_))
if to and (to not in self.keys or to not in self.events):
raise EventStore.EventKeyDoesNotExistError(
"Could not find the from_ key: {0}".format(to))
# +1 here because we have already seen the event we are asking for
fromindex = self.keys.index(from_) + 1 if from_ else 0
toindex = self.keys.index(to) + 1 if to else len(self.events)
if fromindex > toindex:
msg = ("'From' index came after 'To'."
" Keys: ({0}, {1})"
" Indices: ({2}, {3})").format(from_, to, fromindex,
toindex)
raise EventOrderError(msg)
return ((key, self.events[key])
for key in self.keys[fromindex:toindex]) | 0.001259 |
def mangleNec(code, freq=40):
"""Convert NEC code to shorthand notation"""
# base time is 550 microseconds
# unit of burst time
# lead in pattern: 214d 10b3
# "1" burst pattern: 0226 0960
# "0" burst pattern: 0226 0258
# lead out pattern: 0226 2000
# there's large disagreement between devices as to a common preamble
# or the "long" off period for the representation of a binary 1
# thus we can't construct a code suitable for transmission
# without more information--but it's good enough for creating
# a shorthand representaiton for use with recv
timings = []
for octet in binascii.unhexlify(code.replace(" ", "")):
burst = lambda x: x and "0226 06AD" or "0226 0258"
for bit in reversed("%08d" % int(bin(ord(octet))[2:])):
bit = int(bit)
timings.append(burst(bit))
return mangleIR("K %0X22 214d 10b3 " % freq + " ".join(timings) + " 0226 2000") | 0.003158 |
def discrete(self):
"""
A sequence of connected vertices in space, corresponding to
self.paths.
Returns
---------
discrete : (len(self.paths),)
A sequence of (m*, dimension) float
"""
discrete = np.array([self.discretize_path(i)
for i in self.paths])
return discrete | 0.005263 |
def max(self, axis=None, keepdims=False):
"""
Return the maximum of the array over the given axis.
Parameters
----------
axis : tuple or int, optional, default=None
Axis to compute statistic over, if None
will compute over all axes
keepdims : boolean, optional, default=False
Keep axis remaining after operation with size 1.
"""
from numpy import maximum
return self._stat(axis, func=maximum, keepdims=keepdims) | 0.003839 |
def game(self):
"""
If the message media is a game, this returns the :tl:`Game`.
"""
if isinstance(self.media, types.MessageMediaGame):
return self.media.game | 0.009901 |
def set_hflip(self, val):
"""val is True or False that determines if we should horizontally flip the surface or not."""
if self.__horizontal_flip is not val:
self.__horizontal_flip = val
self.image = pygame.transform.flip(self.untransformed_image, val, self.__vertical_flip) | 0.012739 |
def text(self):
"""Get value of output stream (StringIO).
"""
if self.out:
self.out.close() # pragma: nocover
return self.fp.getvalue() | 0.011111 |
def _copy_with_dtype(data, dtype):
"""Create a copy of an array with the given dtype.
We use this instead of np.array() to ensure that custom object dtypes end
up on the resulting array.
"""
result = np.empty(data.shape, dtype)
result[...] = data
return result | 0.00346 |
def delete_objective(self, objective_id=None):
"""Deletes the Objective identified by the given Id.
arg: objectiveId (osid.id.Id): the Id of the Objective to
delete
raise: NotFound - an Objective was not found identified by the
given Id
raise: NullArgument - objectiveId is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
compliance: mandatory - This method must be implemented.
"""
if objective_id is None:
raise NullArgument()
if not isinstance(objective_id, abc_id):
raise InvalidArgument('argument type is not an osid Id')
url_path = construct_url('objectives',
bank_id=self._catalog_idstr,
obj_id=objective_id)
result = self._delete_request(url_path)
return objects.Objective(result) | 0.002039 |
def send(self, url, data, headers):
"""
Spawn an async request to a remote webserver.
"""
eventlet.spawn(self._send_payload, (url, data, headers)) | 0.011236 |
def update_attribute_value_items(self):
"""
Returns an iterator of items for an attribute value map to use for
an UPDATE operation.
The iterator ignores collection attributes as these are processed
implicitly by the traversal algorithm.
:returns: iterator yielding tuples with objects implementing
:class:`everest.resources.interfaces.IResourceAttribute` as the
first and the proxied attribute value as the second argument.
"""
for attr in self._attribute_iterator():
if attr.kind != RESOURCE_ATTRIBUTE_KINDS.COLLECTION:
try:
attr_val = self._get_proxied_attribute_value(attr)
except AttributeError:
continue
else:
yield (attr, attr_val) | 0.002372 |
def git_hash(blob):
"""Return git-hash compatible SHA-1 hexdigits for a blob of data."""
head = str("blob " + str(len(blob)) + "\0").encode("utf-8")
return sha1(head + blob).hexdigest() | 0.005076 |
def get_crawler(self, crawler, url):
"""
Checks if a crawler supports a website (the website offers e.g. RSS
or sitemap) and falls back to the fallbacks defined in the config if
the site is not supported.
:param str crawler: Crawler-string (from the crawler-module)
:param str url: the url this crawler is supposed to be loaded with
:rtype: crawler-class or None
"""
checked_crawlers = []
while crawler is not None and crawler not in checked_crawlers:
checked_crawlers.append(crawler)
current = self.get_crawler_class(crawler)
if hasattr(current, "supports_site"):
supports_site = getattr(current, "supports_site")
if callable(supports_site):
if supports_site(url):
self.log.debug("Using crawler %s for %s.",
crawler, url)
return current
elif (crawler in self.cfg_crawler["fallbacks"] and
self.cfg_crawler["fallbacks"][crawler] is not None):
self.log.warn("Crawler %s not supported by %s. "
"Trying to fall back.", crawler, url)
crawler = self.cfg_crawler["fallbacks"][crawler]
else:
self.log.error("No crawlers (incl. fallbacks) "
"found for url %s.", url)
raise RuntimeError("No crawler found. Quit.")
else:
self.log.warning("The crawler %s has no "
"supports_site-method defined", crawler)
return current
self.log.error("Could not fall back since you created a fall back "
"loop for %s in the config file.", crawler)
sys.exit(1) | 0.002051 |
def planSummary(self):
"""Extract planSummary if available (lazy)."""
if not self._counters_calculated:
self._counters_calculated = True
self._extract_counters()
return self._planSummary | 0.008511 |
def add_membership_listener(self, member_added=None, member_removed=None, fire_for_existing=False):
"""
Helper method for adding membership listeners
:param member_added: (Function), Function to be called when a member is added, in the form of f(member)
(optional).
:param member_removed: (Function), Function to be called when a member is removed, in the form of f(member)
(optional).
:param fire_for_existing: if True, already existing members will fire member_added event (optional).
:return: `self` for cascading configuration
"""
self.membership_listeners.append((member_added, member_removed, fire_for_existing))
return self | 0.009736 |
def Units(uname):
"""Generate a unit object.
Parameters
----------
uname : str
Wavelength or flux unit name.
Returns
-------
unit : `BaseUnit` or `None`
Unit object. `None` means unitless.
Raises
------
ValueError
Unknown unit name.
"""
if isinstance(uname,BaseUnit):
return uname
else:
try:
if issubclass(uname,BaseUnit):
return uname()
except TypeError:
try:
return factory(uname)
except KeyError:
if uname == str(None):
return None
else:
raise ValueError("Unknown units %s"%uname) | 0.005495 |
async def send_message():
"""Example of sending a message."""
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
await modem.sms(phone=sys.argv[3], message=sys.argv[4])
await modem.logout()
await websession.close() | 0.002513 |
def get_lat_variable(nc):
'''
Returns the variable for latitude
:param netcdf4.dataset nc: an open netcdf dataset object
'''
if 'latitude' in nc.variables:
return 'latitude'
latitudes = nc.get_variables_by_attributes(standard_name="latitude")
if latitudes:
return latitudes[0].name
return None | 0.002924 |
def get_next_item(iterable):
"""Gets the next item of an iterable.
If the iterable is exhausted, returns None."""
try: x = iterable.next()
except StopIteration: x = None
except AttributeError: x = None
return x | 0.017094 |
def init_object(self, args, kwargs):
"""This method is reponsible for setting :attr:`obj`.
It is called during :meth:`prepare_args`.
"""
self.object_id = kwargs.pop(self.pk, None)
if self.object_id is not None:
self.obj = self.Model.query.get(self.object_id)
actions.context["object"] = self.obj
return args, kwargs | 0.005141 |
def exists(self, model_class, ID):
'''Check if a record of id==ID exists in table model_class.__name__.lower()'''
assert hasattr(model_class, '_fields'), 'Not a valid model class'
res = self.get(model_class, id=ID, fetchOne=True)
if res:
return True
return False | 0.009524 |
def query(self, resource):
"""
Given a resource identifier find the domain specifier and then
construct the webfinger request. Implements
http://openid.net/specs/openid-connect-discovery-1_0.html#NormalizationSteps
:param resource:
"""
if resource[0] in ['=', '@', '!']: # Have no process for handling these
raise ValueError('Not allowed resource identifier')
try:
part = urlsplit(resource)
except Exception:
raise ValueError('Unparsable resource')
else:
if not part[SCHEME]:
if not part[NETLOC]:
_path = part[PATH]
if not part[QUERY] and not part[FRAGMENT]:
if '/' in _path or ':' in _path:
resource = "https://{}".format(resource)
part = urlsplit(resource)
authority = part[NETLOC]
else:
if '@' in _path:
authority = _path.split('@')[1]
else:
authority = _path
resource = 'acct:{}'.format(_path)
elif part[QUERY]:
resource = "https://{}?{}".format(_path, part[QUERY])
parts = urlsplit(resource)
authority = parts[NETLOC]
else:
resource = "https://{}".format(_path)
part = urlsplit(resource)
authority = part[NETLOC]
else:
raise ValueError('Missing netloc')
else:
_scheme = part[SCHEME]
if _scheme not in ['http', 'https', 'acct']:
# assume it to be a hostname port combo,
# eg. example.com:8080
resource = 'https://{}'.format(resource)
part = urlsplit(resource)
authority = part[NETLOC]
resource = self.create_url(part, [FRAGMENT])
elif _scheme in ['http', 'https'] and not part[NETLOC]:
raise ValueError(
'No authority part in the resource specification')
elif _scheme == 'acct':
_path = part[PATH]
for c in ['/', '?']:
_path = _path.split(c)[0]
if '@' in _path:
authority = _path.split('@')[1]
else:
raise ValueError(
'No authority part in the resource specification')
authority = authority.split('#')[0]
resource = self.create_url(part, [FRAGMENT])
else:
authority = part[NETLOC]
resource = self.create_url(part, [FRAGMENT])
location = WF_URL.format(authority)
return oidc.WebFingerRequest(
resource=resource, rel=OIC_ISSUER).request(location) | 0.000943 |
def _local_call(self, call_conf):
'''
Execute local call
'''
try:
ret = self._get_caller(call_conf).call()
except SystemExit:
ret = 'Data is not available at this moment'
self.out.error(ret)
except Exception as ex:
ret = 'Unhandled exception occurred: {}'.format(ex)
log.debug(ex, exc_info=True)
self.out.error(ret)
return ret | 0.004396 |
def get_domain(self, name):
"""
获取域名信息,文档 https://developer.qiniu.com/fusion/api/4246/the-domain-name
Args:
name: 域名, 如果是泛域名,必须以点号 . 开头
Returns:
返回一个tuple对象,其格式为(<result>, <ResponseInfo>)
- result 成功返回dict{},失败返回{"error": "<errMsg string>"}
- ResponseInfo 请求的Response信息
"""
url = '{0}/domain/{1}'.format(self.server, name)
return self.__post(url) | 0.004292 |
def attach(self):
"""Attach strategy to its sensor and send initial update."""
s = self._sensor
self.update(s, s.read())
self._sensor.attach(self) | 0.011236 |
def _siftup_max(heap, pos):
'Maxheap variant of _siftup'
endpos = len(heap)
startpos = pos
newitem = heap[pos]
# Bubble up the larger child until hitting a leaf.
childpos = 2*pos + 1 # leftmost child position
while childpos < endpos:
# Set childpos to index of larger child.
rightpos = childpos + 1
if rightpos < endpos and not heap[rightpos] < heap[childpos]:
childpos = rightpos
# Move the larger child up.
heap[pos] = heap[childpos]
pos = childpos
childpos = 2*pos + 1
# The leaf at pos is empty now. Put newitem there, and bubble it up
# to its final resting place (by sifting its parents down).
heap[pos] = newitem
_siftdown_max(heap, startpos, pos) | 0.001299 |
def extract(data, items, out_dir=None):
"""Extract germline calls for the given sample, if tumor only.
"""
if vcfutils.get_paired_phenotype(data):
if len(items) == 1:
germline_vcf = _remove_prioritization(data["vrn_file"], data, out_dir)
germline_vcf = vcfutils.bgzip_and_index(germline_vcf, data["config"])
data["vrn_file_plus"] = {"germline": germline_vcf}
return data | 0.006977 |
def to_jupyter(graph: BELGraph, chart: Optional[str] = None) -> Javascript:
"""Render the graph as JavaScript in a Jupyter Notebook."""
with open(os.path.join(HERE, 'render_with_javascript.js'), 'rt') as f:
js_template = Template(f.read())
return Javascript(js_template.render(**_get_context(graph, chart=chart))) | 0.002994 |
def build_conflict_dict(key_list, val_list):
"""
Builds dict where a list of values is associated with more than one key
Args:
key_list (list):
val_list (list):
Returns:
dict: key_to_vals
CommandLine:
python -m utool.util_dict --test-build_conflict_dict
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> key_list = [ 1, 2, 2, 3, 1]
>>> val_list = ['a', 'b', 'c', 'd', 'e']
>>> key_to_vals = build_conflict_dict(key_list, val_list)
>>> result = ut.repr4(key_to_vals)
>>> print(result)
{
1: ['a', 'e'],
2: ['b', 'c'],
3: ['d'],
}
"""
key_to_vals = defaultdict(list)
for key, val in zip(key_list, val_list):
key_to_vals[key].append(val)
return key_to_vals | 0.001099 |
def run_once_per_node(func):
"""
Decorator preventing wrapped function from running more than
once per host (not just interpreter session).
Using env.patch = True will allow the wrapped function to be run
if it has been previously executed, but not otherwise
Stores the result of a function as server state
"""
@wraps(func)
def decorated(*args, **kwargs):
if not hasattr(env,'patch'): env.patch = False
state = version_state(func.__name__)
if not env.patch and state:
verbose = " ".join([env.host,func.__name__,"completed. Skipping..."])
elif env.patch and not state:
verbose = " ".join([env.host,func.__name__,"not previously completed. Skipping..."])
else:
results = func(*args, **kwargs)
verbose =''
if results: set_version_state(func.__name__,object=results)
else: set_version_state(func.__name__)
return results
if env.verbosity and verbose: print verbose
return
return decorated | 0.015539 |
def sde(self):
"""
Return the state space representation of the standard periodic covariance.
! Note: one must constrain lengthscale not to drop below 0.2. (independently of approximation order)
After this Bessel functions of the first becomes NaN. Rescaling
time variable might help.
! Note: one must keep period also not very low. Because then
the gradients wrt wavelength become ustable.
However this might depend on the data. For test example with
300 data points the low limit is 0.15.
"""
#import pdb; pdb.set_trace()
# Params to use: (in that order)
#self.variance
#self.period
#self.lengthscale
if self.approx_order is not None:
N = int(self.approx_order)
else:
N = 7 # approximation order
p_period = float(self.period)
p_lengthscale = 2*float(self.lengthscale)
p_variance = float(self.variance)
w0 = 2*np.pi/p_period # frequency
# lengthscale is multiplied by 2 because of different definition of lengthscale
[q2,dq2l] = seriescoeff(N, p_lengthscale, p_variance)
dq2l = 2*dq2l # This is because the lengthscale if multiplied by 2.
eps = 1e-12
if np.any( np.isfinite(q2) == False) or np.any( np.abs(q2) > 1.0/eps) or np.any( np.abs(q2) < eps):
warnings.warn("sde_Periodic: Infinite, too small, or too large (eps={0:e}) values in q2 :".format(eps) + q2.__format__("") )
if np.any( np.isfinite(dq2l) == False) or np.any( np.abs(dq2l) > 1.0/eps) or np.any( np.abs(dq2l) < eps):
warnings.warn("sde_Periodic: Infinite, too small, or too large (eps={0:e}) values in dq2l :".format(eps) + q2.__format__("") )
F = np.kron(np.diag(range(0,N+1)),np.array( ((0, -w0), (w0, 0)) ) )
L = np.eye(2*(N+1))
Qc = np.zeros((2*(N+1), 2*(N+1)))
P_inf = np.kron(np.diag(q2),np.eye(2))
H = np.kron(np.ones((1,N+1)),np.array((1,0)) )
P0 = P_inf.copy()
# Derivatives
dF = np.empty((F.shape[0], F.shape[1], 3))
dQc = np.empty((Qc.shape[0], Qc.shape[1], 3))
dP_inf = np.empty((P_inf.shape[0], P_inf.shape[1], 3))
# Derivatives wrt self.variance
dF[:,:,0] = np.zeros(F.shape)
dQc[:,:,0] = np.zeros(Qc.shape)
dP_inf[:,:,0] = P_inf / p_variance
# Derivatives self.period
dF[:,:,1] = np.kron(np.diag(range(0,N+1)),np.array( ((0, w0), (-w0, 0)) ) / p_period );
dQc[:,:,1] = np.zeros(Qc.shape)
dP_inf[:,:,1] = np.zeros(P_inf.shape)
# Derivatives self.lengthscales
dF[:,:,2] = np.zeros(F.shape)
dQc[:,:,2] = np.zeros(Qc.shape)
dP_inf[:,:,2] = np.kron(np.diag(dq2l),np.eye(2))
dP0 = dP_inf.copy()
if self.balance:
# Benefits of this are not very sound.
import GPy.models.state_space_main as ssm
(F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf,dP0) = ssm.balance_ss_model(F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0 )
return (F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0) | 0.029015 |
def method_args(self, context, **kwargs):
"""
Collect the set of arguments that should be used by a set of methods
:param context: Which service we're working for
:param kwargs: A set of keyword arguments that are added at run-time.
:return: A set of keyword arguments
"""
try:
_args = self.conf[context].copy()
except KeyError:
_args = kwargs
else:
_args.update(kwargs)
return _args | 0.004 |
def wait_for(self, timeout=None):
""" When a timeout should be applied for awaiting use this method.
:param timeout: optional timeout in seconds.
:returns: a future returning the emitted value
"""
from broqer.op import OnEmitFuture # due circular dependency
return self | OnEmitFuture(timeout=timeout) | 0.005714 |
def multiqc(institute_id, case_name):
"""Load multiqc report for the case."""
data = controllers.multiqc(store, institute_id, case_name)
if data['case'].get('multiqc') is None:
return abort(404)
out_dir = os.path.abspath(os.path.dirname(data['case']['multiqc']))
filename = os.path.basename(data['case']['multiqc'])
return send_from_directory(out_dir, filename) | 0.002545 |
def get_f_code_idx():
"""
How many pointers into PyFrame is the ``f_code`` variable?
"""
frame = sys._getframe()
frame_ptr = id(frame)
LARGE_ENOUGH = 20
# Look through the frame object until we find the f_tstate variable, whose
# value we know from above.
ptrs = [ctypes.c_voidp.from_address(frame_ptr+i*svp)
for i in range(LARGE_ENOUGH)]
# Find its index into the structure
ptrs = [p.value for p in ptrs]
fcode_ptr = id(frame.f_code)
try:
threadstate_idx = ptrs.index(fcode_ptr)
except ValueError:
log.critical("rootpy bug! Please report this.")
raise
return threadstate_idx | 0.001481 |
def console_get_height(con: tcod.console.Console) -> int:
"""Return the height of a console.
Args:
con (Console): Any Console instance.
Returns:
int: The height of a Console.
.. deprecated:: 2.0
Use `Console.height` instead.
"""
return int(lib.TCOD_console_get_height(_console(con))) | 0.002994 |
def should_use_ephemeral_cache(
req, # type: InstallRequirement
format_control, # type: FormatControl
autobuilding, # type: bool
cache_available # type: bool
):
# type: (...) -> Optional[bool]
"""
Return whether to build an InstallRequirement object using the
ephemeral cache.
:param cache_available: whether a cache directory is available for the
autobuilding=True case.
:return: True or False to build the requirement with ephem_cache=True
or False, respectively; or None not to build the requirement.
"""
if req.constraint:
return None
if req.is_wheel:
if not autobuilding:
logger.info(
'Skipping %s, due to already being wheel.', req.name,
)
return None
if not autobuilding:
return False
if req.editable or not req.source_dir:
return None
if req.link and not req.link.is_artifact:
# VCS checkout. Build wheel just for this run.
return True
if "binary" not in format_control.get_allowed_formats(
canonicalize_name(req.name)):
logger.info(
"Skipping bdist_wheel for %s, due to binaries "
"being disabled for it.", req.name,
)
return None
link = req.link
base, ext = link.splitext()
if cache_available and _contains_egg_info(base):
return False
# Otherwise, build the wheel just for this run using the ephemeral
# cache since we are either in the case of e.g. a local directory, or
# no cache directory is available to use.
return True | 0.000616 |
def _evaluate_barycentric_multi(nodes, degree, param_vals, dimension):
r"""Compute multiple points on the surface.
.. note::
There is also a Fortran implementation of this function, which
will be used if it can be built.
Args:
nodes (numpy.ndarray): Control point nodes that define the surface.
degree (int): The degree of the surface define by ``nodes``.
param_vals (numpy.ndarray): Array of parameter values (as a
``N x 3`` array).
dimension (int): The dimension the surface lives in.
Returns:
numpy.ndarray: The evaluated points, where columns correspond to
rows of ``param_vals`` and the rows to the dimension of the
underlying surface.
"""
num_vals, _ = param_vals.shape
result = np.empty((dimension, num_vals), order="F")
for index, (lambda1, lambda2, lambda3) in enumerate(param_vals):
result[:, index] = evaluate_barycentric(
nodes, degree, lambda1, lambda2, lambda3
)[:, 0]
return result | 0.000956 |
def encode_hdr(self, boundary):
"""Returns the header of the encoding of this parameter"""
boundary = encode_and_quote(boundary)
headers = ["--%s" % boundary]
if self.filename:
disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
self.filename)
else:
disposition = 'form-data; name="%s"' % self.name
headers.append("Content-Disposition: %s" % disposition)
if self.filetype:
filetype = self.filetype
else:
filetype = "text/plain; charset=utf-8"
headers.append("Content-Type: %s" % filetype)
headers.append("")
headers.append("")
return "\r\n".join(headers) | 0.003817 |
def set_coordsys(self):
"""
Mapping to astropy's coordinate system name
# TODO: needs expert attention (Most reference systems are not mapped)
"""
if self.coordsys.lower() in self.coordsys_mapping:
self.coordsys = self.coordsys_mapping[self.coordsys.lower()] | 0.006431 |
def _handle_is_dag_stopped(self, request):
""" The handler for the dag_stopped request.
The dag_stopped request checks whether a dag is flagged to be terminated.
Args:
request (Request): Reference to a request object containing the
incoming request. The payload has to contain the
following fields:
'dag_name': the name of the dag that should be checked
Returns:
Response: A response object containing the following fields:
- is_stopped: True if the dag is flagged to be stopped.
"""
return Response(success=True,
uid=request.uid,
payload={
'is_stopped': request.payload['dag_name'] in self._stop_dags
}) | 0.006674 |
def pause_unit(assess_status_func, services=None, ports=None,
charm_func=None):
"""Pause a unit by stopping the services and setting 'unit-paused'
in the local kv() store.
Also checks that the services have stopped and ports are no longer
being listened to.
An optional charm_func() can be called that can either raise an
Exception or return non None, None to indicate that the unit
didn't pause cleanly.
The signature for charm_func is:
charm_func() -> message: string
charm_func() is executed after any services are stopped, if supplied.
The services object can either be:
- None : no services were passed (an empty dict is returned)
- a list of strings
- A dictionary (optionally OrderedDict) {service_name: {'service': ..}}
- An array of [{'service': service_name, ...}, ...]
@param assess_status_func: (f() -> message: string | None) or None
@param services: OPTIONAL see above
@param ports: OPTIONAL list of port
@param charm_func: function to run for custom charm pausing.
@returns None
@raises Exception(message) on an error for action_fail().
"""
_, messages = manage_payload_services(
'pause',
services=services,
charm_func=charm_func)
set_unit_paused()
if assess_status_func:
message = assess_status_func()
if message:
messages.append(message)
if messages and not is_unit_upgrading_set():
raise Exception("Couldn't pause: {}".format("; ".join(messages))) | 0.000642 |
def search(self, **kwargs):
"""
Method to search Virtual Interfaces based on extends search.
:param search: Dict containing QuerySets to find Virtual Interfaces.
:param include: Array containing fields to include on response.
:param exclude: Array containing fields to exclude on response.
:param fields: Array containing fields to override default fields.
:param kind: Determine if result will be detailed ('detail') or basic ('basic').
:return: Dict containing Virtual Interfaces
"""
return super(ApiV4VirtualInterface, self).get(self.prepare_url(
'api/v4/virtual-interface/', kwargs)) | 0.004399 |
def __put_buttons_in_buttonframe(choices):
"""Put the buttons in the buttons frame"""
global __widgetTexts, __firstWidget, buttonsFrame
__firstWidget = None
__widgetTexts = {}
i = 0
for buttonText in choices:
tempButton = tk.Button(buttonsFrame, takefocus=1, text=buttonText)
_bindArrows(tempButton)
tempButton.pack(expand=tk.YES, side=tk.LEFT, padx='1m', pady='1m', ipadx='2m', ipady='1m')
# remember the text associated with this widget
__widgetTexts[tempButton] = buttonText
# remember the first widget, so we can put the focus there
if i == 0:
__firstWidget = tempButton
i = 1
# for the commandButton, bind activation events to the activation event handler
commandButton = tempButton
handler = __buttonEvent
for selectionEvent in STANDARD_SELECTION_EVENTS:
commandButton.bind('<%s>' % selectionEvent, handler)
if CANCEL_TEXT in choices:
commandButton.bind('<Escape>', __cancelButtonEvent) | 0.003745 |
def render_indirect(self, program: moderngl.Program, buffer, mode=None, count=-1, *, first=0):
"""
The render primitive (mode) must be the same as the input primitive of the GeometryShader.
The draw commands are 5 integers: (count, instanceCount, firstIndex, baseVertex, baseInstance).
Args:
program: The ``moderngl.Program``
buffer: The ``moderngl.Buffer`` containing indirect draw commands
Keyword Args:
mode (int): By default :py:data:`TRIANGLES` will be used.
count (int): The number of draws.
first (int): The index of the first indirect draw command.
"""
vao = self.instance(program)
if mode is None:
mode = self.mode
vao.render_indirect(buffer, mode=mode, count=count, first=first) | 0.005981 |
def btc_tx_sign_multisig(tx, idx, redeem_script, private_keys, hashcode=SIGHASH_ALL):
"""
Sign a p2sh multisig input (not segwit!).
@tx must be a hex-encoded tx
Return the signed transaction
"""
from .multisig import parse_multisig_redeemscript
# sign in the right order. map all possible public keys to their private key
txobj = btc_tx_deserialize(str(tx))
privs = {}
for pk in private_keys:
pubk = ecdsalib.ecdsa_private_key(pk).public_key().to_hex()
compressed_pubkey = keylib.key_formatting.compress(pubk)
uncompressed_pubkey = keylib.key_formatting.decompress(pubk)
privs[compressed_pubkey] = pk
privs[uncompressed_pubkey] = pk
m, public_keys = parse_multisig_redeemscript(str(redeem_script))
used_keys, sigs = [], []
for public_key in public_keys:
if public_key not in privs:
continue
if len(used_keys) == m:
break
if public_key in used_keys:
raise ValueError('Tried to reuse key in redeem script: {}'.format(public_key))
pk_str = privs[public_key]
used_keys.append(public_key)
sig = btc_tx_make_input_signature(tx, idx, redeem_script, pk_str, hashcode)
sigs.append(sig)
if len(used_keys) != m:
raise ValueError('Missing private keys (used {}, required {})'.format(len(used_keys), m))
txobj["ins"][idx]["script"] = btc_script_serialize([None] + sigs + [redeem_script])
return btc_tx_serialize(txobj) | 0.005232 |
def register(self):
"""Proxy method to register the device with the parent.
"""
if not self.registered:
self.registered = True
if self.parent:
self.parent.register(self) | 0.008584 |
def getRaDecs(self, mods):
"""Internal function converting cartesian coords to
ra dec"""
raDecOut = np.empty( (len(mods), 5))
raDecOut[:,0:3] = mods[:,0:3]
for i, row in enumerate(mods):
raDecOut[i, 3:5] = r.raDecFromVec(row[3:6])
return raDecOut | 0.016287 |
def handle_500(self, request, exception):
# type: (BaseHttpRequest, BaseException) -> Resource
"""
Handle an *un-handled* exception.
"""
# Let middleware attempt to handle exception
try:
for middleware in self.middleware.handle_500:
resource = middleware(request, exception)
if resource:
return resource
except Exception as ex: # noqa - This is a top level handler
exception = ex
# Fallback to generic error
logger.exception('Internal Server Error: %s', exception, extra={
'status_code': 500,
'request': request
})
return Error.from_status(HTTPStatus.INTERNAL_SERVER_ERROR, 0,
"An unhandled error has been caught.") | 0.003576 |
def get_message_state_scope(self, msgid, line=None, confidence=UNDEFINED):
"""Returns the scope at which a message was enabled/disabled."""
if self.config.confidence and confidence.name not in self.config.confidence:
return MSG_STATE_CONFIDENCE
try:
if line in self.file_state._module_msgs_state[msgid]:
return MSG_STATE_SCOPE_MODULE
except (KeyError, TypeError):
return MSG_STATE_SCOPE_CONFIG
return None | 0.006036 |
def draw(self, img, pixmapper, bounds):
'''draw the trail'''
for p in self.points:
(px,py) = pixmapper(p)
if px >= 0 and py >= 0 and px < img.width and py < img.height:
cv.Circle(img, (px,py), 1, self.colour) | 0.015152 |
def get_all_conditions(self):
"""
Returns a generator which yields groups of lists of conditions.
>>> for set_id, label, field in gargoyle.get_all_conditions(): #doctest: +SKIP
>>> print "%(label)s: %(field)s" % (label, field.label) #doctest: +SKIP
"""
for condition_set in sorted(self.get_condition_sets(), key=lambda x: x.get_group_label()):
group = unicode(condition_set.get_group_label())
for field in condition_set.fields.itervalues():
yield condition_set.get_id(), group, field | 0.008681 |
def parseAcceptHeader(value):
"""Parse an accept header, ignoring any accept-extensions
returns a list of tuples containing main MIME type, MIME subtype,
and quality markdown.
str -> [(str, str, float)]
"""
chunks = [chunk.strip() for chunk in value.split(',')]
accept = []
for chunk in chunks:
parts = [s.strip() for s in chunk.split(';')]
mtype = parts.pop(0)
if '/' not in mtype:
# This is not a MIME type, so ignore the bad data
continue
main, sub = mtype.split('/', 1)
for ext in parts:
if '=' in ext:
k, v = ext.split('=', 1)
if k == 'q':
try:
q = float(v)
break
except ValueError:
# Ignore poorly formed q-values
pass
else:
q = 1.0
accept.append((q, main, sub))
accept.sort()
accept.reverse()
return [(main, sub, q) for (q, main, sub) in accept] | 0.00093 |
def getFirstUrn(self, reference=None):
""" Get the first children URN for a given resource
:param reference: CtsReference from which to find child (If None, find first reference)
:type reference: CtsReference, str
:return: Children URN
:rtype: URN
"""
if reference is not None:
if ":" in reference:
urn = reference
else:
urn = "{}:{}".format(
str(URN(str(self.urn)).upTo(URN.NO_PASSAGE)),
str(reference)
)
else:
urn = str(self.urn)
_first = _SharedMethod.firstUrn(
self.retriever.getFirstUrn(
urn
)
)
return _first | 0.003911 |
def google_register(username:str, email:str, full_name:str, google_id:int, bio:str, token:str=None):
"""
Register a new user from google.
This can raise `exc.IntegrityError` exceptions in
case of conflics found.
:returns: User
"""
auth_data_model = apps.get_model("users", "AuthData")
user_model = apps.get_model("users", "User")
try:
# Google user association exist?
auth_data = auth_data_model.objects.get(key="google", value=google_id)
user = auth_data.user
except auth_data_model.DoesNotExist:
try:
# Is a user with the same email as the google user?
user = user_model.objects.get(email=email)
auth_data_model.objects.create(user=user, key="google", value=google_id, extra={})
except user_model.DoesNotExist:
# Create a new user
username_unique = slugify_uniquely(username, user_model, slugfield="username")
user = user_model.objects.create(email=email,
username=username_unique,
full_name=full_name,
bio=bio)
auth_data_model.objects.create(user=user, key="google", value=google_id, extra={})
send_register_email(user)
user_registered_signal.send(sender=user.__class__, user=user)
if token:
membership = get_membership_by_token(token)
membership.user = user
membership.save(update_fields=["user"])
return user | 0.00827 |
def calc_temp(Data_ref, Data):
"""
Calculates the temperature of a data set relative to a reference.
The reference is assumed to be at 300K.
Parameters
----------
Data_ref : DataObject
Reference data set, assumed to be 300K
Data : DataObject
Data object to have the temperature calculated for
Returns
-------
T : uncertainties.ufloat
The temperature of the data set
"""
T = 300 * ((Data.A * Data_ref.Gamma) / (Data_ref.A * Data.Gamma))
Data.T = T
return T | 0.001862 |
def slh_associate(a_features, b_features, max_sigma=5):
"""
An implementation of the Scott and Longuet-Higgins algorithm for feature
association.
This function takes two lists of features. Each feature is a
:py:class:`MultivariateNormal` instance representing a feature
location and its associated uncertainty.
Args:
a_features (list of MultivariateNormal)
b_features (list of MultivariateNormal)
max_sigma (float or int): maximum number of standard deviations two
features can be separated and still considered "associated".
Returns:
(array): A Nx2 array of feature associations. Column 0 is the index into
the a_features list, column 1 is the index into the b_features list.
"""
# Compute proximity matrix
proximity = _weighted_proximity(a_features, b_features)
# Compute association matrix
association_matrix = _proximity_to_association(proximity)
# Now build up list of associations
associations = []
if association_matrix.shape[0] == 0:
return np.zeros((0, 2))
# Compute column-wise maxima
col_max_idxs = np.argmax(association_matrix, axis=0)
prox_threshold = np.exp(-0.5*max_sigma*max_sigma)
# Find associations
for row_idx, row in enumerate(association_matrix):
if row.shape[0] == 0:
continue
# ... compute index of maximum element
col_idx = np.argmax(row)
# Is this row also the maximum in that column?
if col_max_idxs[col_idx] == row_idx:
prox = proximity[row_idx, col_idx]
if prox > prox_threshold:
associations.append((row_idx, col_idx))
if len(associations) == 0:
return np.zeros((0, 2))
return np.vstack(associations) | 0.001114 |
def on_key_release(self, symbol, modifiers):
"""
Pyglet specific key release callback.
Forwards and translates the events to :py:func:`keyboard_event`
"""
self.keyboard_event(symbol, self.keys.ACTION_RELEASE, modifiers) | 0.007576 |
def watch(self, path, action, *args, **kwargs):
"""
Called by the Server instance when a new watch task is requested.
"""
if action is None:
action = _set_changed
event_handler = _WatchdogHandler(self, action)
self._observer.schedule(event_handler, path=path, recursive=True) | 0.00597 |
def encrypt(self, data):
'''
encrypt data with AES-CBC and sign it with HMAC-SHA256
'''
aes_key, hmac_key = self.keys
pad = self.AES_BLOCK_SIZE - len(data) % self.AES_BLOCK_SIZE
if six.PY2:
data = data + pad * chr(pad)
else:
data = data + salt.utils.stringutils.to_bytes(pad * chr(pad))
iv_bytes = os.urandom(self.AES_BLOCK_SIZE)
if HAS_M2:
cypher = EVP.Cipher(alg='aes_192_cbc', key=aes_key, iv=iv_bytes, op=1, padding=False)
encr = cypher.update(data)
encr += cypher.final()
else:
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
encr = cypher.encrypt(data)
data = iv_bytes + encr
sig = hmac.new(hmac_key, data, hashlib.sha256).digest()
return data + sig | 0.00355 |
def subjects_create(self, filename):
"""Create subject from given data files. Expects the file to be a
Freesurfer archive.
Raises ValueError if given file is not a valid subject file.
Parameters
----------
filename : File-type object
Freesurfer archive file
Returns
-------
SubjectHandle
Handle for created subject in database
"""
# Ensure that the file name has a valid archive suffix
if get_filename_suffix(filename, ARCHIVE_SUFFIXES) is None:
raise ValueError('invalid file suffix: ' + os.path.basename(os.path.normpath(filename)))
# Create subject from archive. Raises exception if file is not a valid
# subject archive
return self.subjects.upload_file(filename) | 0.003636 |
def tuple_replace(tup, *pairs):
"""Return a copy of a tuple with some elements replaced.
:param tup: The tuple to be copied.
:param pairs: Any number of (index, value) tuples where index is the index
of the item to replace and value is the new value of the item.
"""
tuple_list = list(tup)
for index, value in pairs:
tuple_list[index] = value
return tuple(tuple_list) | 0.002427 |
def compose_receipt(message):
"""
Compose a :xep:`184` delivery receipt for a :class:`~aioxmpp.Message`.
:param message: The message to compose the receipt for.
:type message: :class:`~aioxmpp.Message`
:raises ValueError: if the input message is of type
:attr:`~aioxmpp.MessageType.ERROR`
:raises ValueError: if the input message is a message receipt itself
:return: A message which serves as a receipt for the input message.
:rtype: :class:`~aioxmpp.Message`
"""
if message.type_ == aioxmpp.MessageType.ERROR:
raise ValueError("receipts cannot be generated for error messages")
if message.xep0184_received:
raise ValueError("receipts cannot be generated for receipts")
if message.id_ is None:
raise ValueError("receipts cannot be generated for id-less messages")
reply = message.make_reply()
reply.to = reply.to.bare()
reply.xep0184_received = xso.Received(message.id_)
return reply | 0.001018 |
def meth_new(args):
""" Submit a new workflow (or update) to the methods repository. """
r = fapi.update_repository_method(args.namespace, args.method,
args.synopsis, args.wdl, args.doc,
args.comment)
fapi._check_response_code(r, 201)
if fcconfig.verbosity:
print("Method %s installed to project %s" % (args.method,
args.namespace))
return 0 | 0.002008 |
def add_vertex(self, v):
""" Add a vertex to the graph
:param v: The vertex name.
"""
self.graph.add_vertex(v)
self.vs.add(v) | 0.012048 |
def purge(gandi, email, background, force, alias):
"""Purge a mailbox."""
login, domain = email
if alias:
if not force:
proceed = click.confirm('Are you sure to purge all aliases for '
'mailbox %s@%s ?' % (login, domain))
if not proceed:
return
result = gandi.mail.set_alias(domain, login, [])
else:
if not force:
proceed = click.confirm('Are you sure to purge mailbox %s@%s ?'
% (login, domain))
if not proceed:
return
result = gandi.mail.purge(domain, login, background)
return result | 0.001447 |
def stop_volume(name, force=False):
'''
Stop a gluster volume
name
Volume name
force
Force stop the volume
.. versionadded:: 2015.8.4
CLI Example:
.. code-block:: bash
salt '*' glusterfs.stop_volume mycluster
'''
volinfo = info()
if name not in volinfo:
log.error('Cannot stop non-existing volume %s', name)
return False
if int(volinfo[name]['status']) != 1:
log.warning('Attempt to stop already stopped volume %s', name)
return True
cmd = 'volume stop {0}'.format(name)
if force:
cmd += ' force'
return _gluster(cmd) | 0.001541 |
def fit(self, X, C):
"""
Fit one weighted classifier per class
Parameters
----------
X : array (n_samples, n_features)
The data on which to fit a cost-sensitive classifier.
C : array (n_samples, n_classes)
The cost of predicting each label for each observation (more means worse).
"""
X, C = _check_fit_input(X, C)
C = np.asfortranarray(C)
self.nclasses = C.shape[1]
self.classifiers = [deepcopy(self.base_classifier) for i in range(self.nclasses)]
if not self.weight_simple_diff:
C = WeightedAllPairs._calculate_v(self, C)
Parallel(n_jobs=self.njobs, verbose=0, require="sharedmem")(delayed(self._fit)(c, X, C) for c in range(self.nclasses))
return self | 0.007389 |
def generate_take(out_f, steps, line_prefix):
"""Generate the take function"""
out_f.write(
'{0}constexpr inline int take(int n_)\n'
'{0}{{\n'
'{0} return {1} 0 {2};\n'
'{0}}}\n'
'\n'.format(
line_prefix,
''.join('n_ >= {0} ? {0} : ('.format(s) for s in steps),
')' * len(steps)
)
) | 0.002639 |
def prefix_items(self, prefix, strip_prefix=False):
"""Get all (key, value) pairs with keys that begin with ``prefix``.
:param prefix: Lexical prefix for keys to search.
:type prefix: bytes
:param strip_prefix: True to strip the prefix from yielded items.
:type strip_prefix: bool
:yields: All (key, value) pairs in the store where the keys
begin with the ``prefix``.
"""
items = self.items(key_from=prefix)
start = 0
if strip_prefix:
start = len(prefix)
for key, value in items:
if not key.startswith(prefix):
break
yield key[start:], value | 0.002861 |
def get_evernote_notes(self, evernote_filter):
"""
get the notes related to the filter
:param evernote_filter: filtering
:return: notes
"""
data = []
note_store = self.client.get_note_store()
our_note_list = note_store.findNotesMetadata(self.token, evernote_filter, 0, 100,
EvernoteMgr.set_evernote_spec())
for note in our_note_list.notes:
whole_note = note_store.getNote(self.token, note.guid, True, True, False, False)
content = self._cleaning_content(whole_note.content)
data.append({'title': note.title, 'my_date': arrow.get(note.created),
'link': whole_note.attributes.sourceURL, 'content': content})
return data | 0.008475 |
def draw(self):
"""Draws the checkbox."""
if not self.visible:
return
# Blit the current checkbox's image.
if self.isEnabled:
if self.mouseIsDown and self.lastMouseDownOverButton and self.mouseOverButton:
if self.value:
self.window.blit(self.surfaceOnDown, self.loc)
else:
self.window.blit(self.surfaceOffDown, self.loc)
else:
if self.value:
self.window.blit(self.surfaceOn, self.loc)
else:
self.window.blit(self.surfaceOff, self.loc)
else:
if self.value:
self.window.blit(self.surfaceOnDisabled, self.loc)
else:
self.window.blit(self.surfaceOffDisabled, self.loc) | 0.004608 |
def _vertex_data_to_sframe(data, vid_field):
"""
Convert data into a vertex data sframe. Using vid_field to identify the id
column. The returned sframe will have id column name '__id'.
"""
if isinstance(data, SFrame):
# '__id' already in the sframe, and it is ok to not specify vid_field
if vid_field is None and _VID_COLUMN in data.column_names():
return data
if vid_field is None:
raise ValueError("vid_field must be specified for SFrame input")
data_copy = copy.copy(data)
data_copy.rename({vid_field: _VID_COLUMN}, inplace=True)
return data_copy
if type(data) == Vertex or type(data) == list:
return _vertex_list_to_sframe(data, '__id')
elif HAS_PANDAS and type(data) == pd.DataFrame:
if vid_field is None:
# using the dataframe index as vertex id
if data.index.is_unique:
if not ("index" in data.columns):
# pandas reset_index() will insert a new column of name "index".
sf = SFrame(data.reset_index()) # "index"
sf.rename({'index': _VID_COLUMN}, inplace=True)
return sf
else:
# pandas reset_index() will insert a new column of name "level_0" if there exists a column named "index".
sf = SFrame(data.reset_index()) # "level_0"
sf.rename({'level_0': _VID_COLUMN}, inplace=True)
return sf
else:
raise ValueError("Index of the vertices dataframe is not unique, \
try specifying vid_field name to use a column for vertex ids.")
else:
sf = SFrame(data)
if _VID_COLUMN in sf.column_names():
raise ValueError('%s reserved vid column name already exists in the SFrame' % _VID_COLUMN)
sf.rename({vid_field: _VID_COLUMN}, inplace=True)
return sf
else:
raise TypeError('Vertices type %s is Not supported.' % str(type(data))) | 0.002871 |
def auth_user_oid(self, email):
"""
OpenID user Authentication
:param email: user's email to authenticate
:type self: User model
"""
user = self.find_user(email=email)
if user is None or (not user.is_active):
log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(email))
return None
else:
self.update_user_auth_stat(user)
return user | 0.004464 |
def resize_max(img, max_side):
"""
Resize the image to threshold the maximum dimension within max_side
:param img:
:param max_side: Length of the maximum height or width
:return:
"""
h, w = img.shape[:2]
if h > w:
nh = max_side
nw = w * (nh / h)
else:
nw = max_side
nh = h * (nw / w)
return cv.resize(img, (nw, nh)) | 0.002577 |
def remove_props(self,*args):
"""
Removes desired properties from ``self.properties``.
"""
for arg in args:
if arg in self.properties:
del self.properties[arg] | 0.017544 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.