code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def __init__(self, func, inputs, batch=False): <NEW_LINE> <INDENT> def enable_grads(inputs): <NEW_LINE> <INDENT> if isinstance(inputs, (list, tuple)): <NEW_LINE> <INDENT> for x in inputs: <NEW_LINE> <INDENT> x.stop_gradient = False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert isinstance(inputs, paddle.fluid.framework.Variable), ( f"Expecting {inputs} to be paddle.fluid.framework.Variable," f" however it's found to be a(n) {type(inputs)}.") <NEW_LINE> inputs.stop_gradient = False <NEW_LINE> <DEDENT> return inputs <NEW_LINE> <DEDENT> self.batch = batch <NEW_LINE> self.xs = enable_grads(inputs) <NEW_LINE> ys = func(inputs) <NEW_LINE> if not isinstance(ys, list): <NEW_LINE> <INDENT> ys = [ys] <NEW_LINE> <DEDENT> self.y = self.flatten_all(ys) <NEW_LINE> self.ydim = self.y.shape[-1] <NEW_LINE> self.xdim = self.flatten_all(inputs).shape[-1] <NEW_LINE> self.bdim = self.y.shape[0] <NEW_LINE> self.jacobian = {}
Constructing a Jacobian matrix. Parameters: func (Callable): a Python function that takes as input a Tensor or a Tensor list and outputs a Tensor or a Tensor list. inputs (Tensor|list[Tensor]): a Tensor or a list of Tensors as `func`'s input. batch (bool): if True the 0'th axis is considered the batch dimension, both on input and output.
625941b45166f23b2e1a4f21
def test_int_input(self): <NEW_LINE> <INDENT> info = 25 <NEW_LINE> with self.assertRaises(phishingpage.ArgumentIsNotAString): <NEW_LINE> <INDENT> phishingpage.is_type_string(info)
" Tests is_type_string with a integer as an input.
625941b48a43f66fc4b53e33
def _BuildMediaBody(self): <NEW_LINE> <INDENT> if self.get('mimeType') is None: <NEW_LINE> <INDENT> self['mimeType'] = 'application/octet-stream' <NEW_LINE> <DEDENT> return MediaIoBaseUpload(self.content, self['mimeType'], resumable=True)
Build MediaIoBaseUpload to get prepared to upload content of the file. Sets mimeType as 'application/octet-stream' if not specified. :returns: MediaIoBaseUpload -- instance that will be used to upload content.
625941b44e4d5625662d41a7
def marble_noise_3d(octaves, persistence, scale, x, y, z): <NEW_LINE> <INDENT> return math.cos(float(x) * scale + simplexnoise.octave_noise_3d( octaves, persistence, float(scale) / 3.0, x, y, z) );
3D Marble Noise on the x-axis.
625941b4236d856c2ad445a8
def test_create_snapshot_snapshot(self): <NEW_LINE> <INDENT> pass
Test case for create_snapshot_snapshot
625941b410dbd63aa1bd297a
def run_event_loop(): <NEW_LINE> <INDENT> print("what do you want to do with your journal?") <NEW_LINE> cmd = 'EMPTY' <NEW_LINE> journal_name = 'default' <NEW_LINE> journal_data = journal_load(journal_name) <NEW_LINE> while cmd != 'x' and cmd: <NEW_LINE> <INDENT> cmd = input('[L]ist entries, [A]dd and entry. E[x]it: ') <NEW_LINE> cmd = cmd.lower().strip() <NEW_LINE> if cmd == 'l': <NEW_LINE> <INDENT> list_entries(journal_data) <NEW_LINE> <DEDENT> elif cmd == 'a': <NEW_LINE> <INDENT> add_entry(journal_data) <NEW_LINE> <DEDENT> elif cmd != 'x' and cmd: <NEW_LINE> <INDENT> print("Sorry, we dont't understand '{}'.".format(cmd)) <NEW_LINE> <DEDENT> <DEDENT> print('Done, goodbye.') <NEW_LINE> journal.save(journal_name, journal_data)
doc of menthod
625941b426068e7796caeaa1
def sflow_collector_collector_port_number(self, **kwargs): <NEW_LINE> <INDENT> config = ET.Element("config") <NEW_LINE> sflow = ET.SubElement(config, "sflow", xmlns="urn:brocade.com:mgmt:brocade-sflow") <NEW_LINE> collector = ET.SubElement(sflow, "collector") <NEW_LINE> collector_ip_address_key = ET.SubElement(collector, "collector-ip-address") <NEW_LINE> collector_ip_address_key.text = kwargs.pop('collector_ip_address') <NEW_LINE> use_vrf_key = ET.SubElement(collector, "use-vrf") <NEW_LINE> use_vrf_key.text = kwargs.pop('use_vrf') <NEW_LINE> collector_port_number = ET.SubElement(collector, "collector-port-number") <NEW_LINE> collector_port_number.text = kwargs.pop('collector_port_number') <NEW_LINE> callback = kwargs.pop('callback', self._callback) <NEW_LINE> return callback(config)
Auto Generated Code
625941b4bf627c535bc12fa0
def upload_data(self, excludes=['.localcache', 'entrypoint']): <NEW_LINE> <INDENT> dbx_root, local_path = self._syncable_local_subfolders.get('root') <NEW_LINE> responses = [] <NEW_LINE> for dn, dirs, files in os.walk(local_path): <NEW_LINE> <INDENT> dirs[:] = [d for d in dirs if d not in excludes] <NEW_LINE> for f in files: <NEW_LINE> <INDENT> fpath = dn + '/' + f <NEW_LINE> dbx_path = fpath.replace(str(local_path), dbx_root) <NEW_LINE> print('uploading...', fpath, ' to ', dbx_path) <NEW_LINE> with open(fpath, 'rb') as f: <NEW_LINE> <INDENT> res = self._dbx.files_upload(f.read(), dbx_path) <NEW_LINE> responses.append(res) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return responses
Uploads any data from root of the data folder into the dropbox Apps/project/directory. Will exclude any directories given to upload_data.
625941b44a966d76dd550dd4
def make_index(self, batch_size): <NEW_LINE> <INDENT> batch_idx = np.random.randint(self._curr_size, size=batch_size) <NEW_LINE> return batch_idx
sample a batch of indexes Args: batch_size (int): batch size Returns: batch of indexes
625941b47cff6e4e81117750
def run_migrations_offline(): <NEW_LINE> <INDENT> url = os.environ["POSTGRES_URL"] <NEW_LINE> context.configure( url=url, target_metadata=target_metadata, literal_binds=True, compare_server_default=True, ) <NEW_LINE> with context.begin_transaction(): <NEW_LINE> <INDENT> context.run_migrations()
Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output.
625941b49f2886367277a663
def set_startstop_enabled(self, value): <NEW_LINE> <INDENT> leap_assert_type(value, bool) <NEW_LINE> self.ui.btnEipStartStop.setEnabled(value) <NEW_LINE> self._action_eip_startstop.setEnabled(value)
Enable or disable btnEipStartStop and _action_eip_startstop based on value :param value: True for enabled, False otherwise :type value: bool
625941b46aa9bd52df036b6b
def main(): <NEW_LINE> <INDENT> global CONFIG, DEV_CONFIG <NEW_LINE> parser = argparse.ArgumentParser( description='Gerencia canais no Tvheadend.') <NEW_LINE> group = parser.add_mutually_exclusive_group() <NEW_LINE> group.add_argument('--no-update', action='store_true', help='não verifica por atualização') <NEW_LINE> group.add_argument('--force-update', action='store_true', help='força atualização') <NEW_LINE> group_debug = parser.add_mutually_exclusive_group() <NEW_LINE> group_debug.add_argument('--dev', action='store_true', help='modo de testes - desenvolvimento') <NEW_LINE> group_desativar = parser.add_mutually_exclusive_group(required=True) <NEW_LINE> group_desativar.add_argument( '--desativar-canais-sd', action='store_true', help='desativar canais sd quando hover em hd') <NEW_LINE> group_desativar.add_argument( '--desativar-canais-adultos', action='store_true', help='desativar canais adultos') <NEW_LINE> group_desativar.add_argument('--desativar-canais-internos', action='store_true', help='desativar canais internos da operadora') <NEW_LINE> group_desativar.add_argument( '--ativar-todos-canais', action='store_true', help='Ativa todos os canais') <NEW_LINE> group_desativar.add_argument('--reorganizar-numeracao-canais', action='store_true', help='Reorganiza numeracao dos canais - sd junto com hd (irreversível - somente excluindo e mapeando)') <NEW_LINE> args = parser.parse_args() <NEW_LINE> CONFIG['tvheadendAddress'] = get_ip() <NEW_LINE> if args.dev: <NEW_LINE> <INDENT> print(args) <NEW_LINE> CONFIG = DEV_CONFIG <NEW_LINE> <DEDENT> logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG) <NEW_LINE> logging.info("version %s", __version__) <NEW_LINE> ck_updates = True <NEW_LINE> if args.no_update or args.dev: <NEW_LINE> <INDENT> ck_updates = False <NEW_LINE> <DEDENT> if args.force_update: <NEW_LINE> <INDENT> update(CONFIG['updateurl'], True) <NEW_LINE> print_line("alert", "Pronto.") <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> if ck_updates: <NEW_LINE> <INDENT> update_return = update(CONFIG['updateurl']) <NEW_LINE> if update_return: <NEW_LINE> <INDENT> print_line("alert", "Reiniciando script") <NEW_LINE> python = sys.executable <NEW_LINE> os.execl(python, python, *sys.argv) <NEW_LINE> <DEDENT> <DEDENT> has_tvh = check_for_tvh(CONFIG) <NEW_LINE> if args.desativar_canais_sd and has_tvh: <NEW_LINE> <INDENT> desativar_canais_duplicados(CONFIG) <NEW_LINE> <DEDENT> if args.desativar_canais_adultos and has_tvh: <NEW_LINE> <INDENT> desativar_canais_adultos(CONFIG) <NEW_LINE> <DEDENT> if args.desativar_canais_internos and has_tvh: <NEW_LINE> <INDENT> desativar_canais_internos(CONFIG) <NEW_LINE> <DEDENT> if args.ativar_todos_canais and has_tvh: <NEW_LINE> <INDENT> ativar_todos_canais(CONFIG) <NEW_LINE> <DEDENT> if args.reorganizar_numeracao_canais and has_tvh: <NEW_LINE> <INDENT> reorganizar_numeracao_canais(CONFIG)
Main function.
625941b4a8ecb033257d2e9f
def write_sex( image_manager, bsex_file, sex_file ): <NEW_LINE> <INDENT> bsex_configs = io.get_configs( bsex_file ) <NEW_LINE> telescope = spec["telescope"][0] <NEW_LINE> Filter = Instrument + "_" + Filter <NEW_LINE> sex_file = "Astromatic/Configs/" + Filter + ".sex" <NEW_LINE> cat_file = "Astromatic/Catalogs/" + Filter + ".cat" <NEW_LINE> flag_file = "Data/" + telescope + "/" + Filter + ".flg.fits" <NEW_LINE> check_file = "Data/" + telescope + "/" + Filter + ".chk.fits" <NEW_LINE> weight_file = "Data/" + telescope + "/" + Filter + ".wht.fits" <NEW_LINE> var_file = "Data/" + telescope + "/" + Filter + ".var.fits" <NEW_LINE> bg_file = "Data/" + telescope + "/" + Filter + ".bg.fits" <NEW_LINE> if os.path.isfile( flag_file ): <NEW_LINE> <INDENT> sex_configs["FLAG_IMAGE"] = flag_file <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sex_configs.pop( "FLAG_IMAGE" ) <NEW_LINE> sex_configs.pop( "FLAG_TYPE" ) <NEW_LINE> <DEDENT> if os.path.isfile( weight_file ): <NEW_LINE> <INDENT> sex_configs["WEIGHT_IMAGE"] = weight_file <NEW_LINE> sex_configs["WEIGHT_TYPE"] = "MAP_WEIGHT" <NEW_LINE> <DEDENT> elif os.path.isfile( var_file ): <NEW_LINE> <INDENT> sex_configs["WEIGHT_IMAGE"] = var_file <NEW_LINE> sex_configs["WEIGHT_TYPE"] = "MAP_VAR" <NEW_LINE> <DEDENT> elif os.path.isfile( bg_file ): <NEW_LINE> <INDENT> sex_configs["WEIGHT_IMAGE"] = bg_file <NEW_LINE> sex_configs["WEIGHT_TYPE"] = "BACKGROUND" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sex_configs.pop( "WEIGHT_IMAGE" ) <NEW_LINE> sex_configs.pop( "WEIGHT_TYPE" ) <NEW_LINE> sex_configs.pop( "WEIGHT_GAIN" ) <NEW_LINE> <DEDENT> for config in sex_configs: <NEW_LINE> <INDENT> if config in var_changes: <NEW_LINE> <INDENT> sex_configs[config] = spec[ var_changes[config] ][0] <NEW_LINE> <DEDENT> <DEDENT> sex_configs["CATALOG_NAME"] = cat_file <NEW_LINE> sex_configs["CHECKIMAGE_NAME"] = check_file <NEW_LINE> sex_configs["FLAG_IMAGE"] = flag_file <NEW_LINE> print( "Writing %s..." % sex_file ) <NEW_LINE> io.write_configs( sex_file, sex_configs )
This function reads in a .bsex file to write a .sex file for an image with details provided by the image manager.
625941b40383005118ecf3ae
def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkVectorCastImageFilterICVF33IVF33_Superclass.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj
New() -> itkVectorCastImageFilterICVF33IVF33_Superclass Create a new object of the class itkVectorCastImageFilterICVF33IVF33_Superclass and set the input and the parameters if some named or non-named arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects - the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. Ex: itkVectorCastImageFilterICVF33IVF33_Superclass.New( reader, Threshold=10 ) is (most of the time) equivalent to: obj = itkVectorCastImageFilterICVF33IVF33_Superclass.New() obj.SetInput( 0, reader.GetOutput() ) obj.SetThreshold( 10 )
625941b4377c676e91271f74
def __str__(self): <NEW_LINE> <INDENT> return "PostQuery: {}, page {}, limit {}".format(self._tags, self._page, self._limit)
String representation with tags
625941b450485f2cf553cb62
def _get_score(self, feat): <NEW_LINE> <INDENT> if torch.cuda.is_available(): <NEW_LINE> <INDENT> feat = feat.cuda() <NEW_LINE> <DEDENT> if feat.dim() == 2: <NEW_LINE> <INDENT> feat = feat.unsqueeze(0).unsqueeze(0) <NEW_LINE> <DEDENT> dvector = self.model.forward(feat).detach().cpu().numpy() <NEW_LINE> return dvector
dvector: ndarray
625941b430bbd722463cbb8d
@app.route('/api/uploadfile/<file>', methods=['POST']) <NEW_LINE> @auto.doc() <NEW_LINE> def uploadfile(file): <NEW_LINE> <INDENT> ok_fileid, is_logfile, filename = _get_filepath_from_file_id(file) <NEW_LINE> if ok_fileid: <NEW_LINE> <INDENT> if is_logfile: <NEW_LINE> <INDENT> log('uploading logfile {} [{}]!!!'.format(file, filename), 'error', True) <NEW_LINE> return redirect(url_for('index'), code=404) <NEW_LINE> <DEDENT> f = request.files['file'] <NEW_LINE> alert = check_uploaded_config_file(file, f, dest_filepath=filename) <NEW_LINE> if alert: <NEW_LINE> <INDENT> alert = json.dumps(alert) <NEW_LINE> <DEDENT> return redirect(url_for('editfile', file=file, alerta=alert)) <NEW_LINE> <DEDENT> return abort(500)
POST method for interesting config files upload & replacement :param str file: uploaded file_id
625941b48e71fb1e9831d57f
def test_beta_features(self): <NEW_LINE> <INDENT> pass
Test case for beta_features Returns beta features active in tenant. # noqa: E501
625941b4596a897236089895
def plot_arrow(x, y, yaw, length=0.5, width=0.1): <NEW_LINE> <INDENT> plt.arrow(x, y, length * math.cos(yaw), length * math.sin(yaw), head_length=1.5 * width, head_width=width) <NEW_LINE> plt.plot(x, y)
plot arrow :param x: current position x :param y: current position y :param yaw: current yaw :param length: arrow length :param width: arrow width :return: None length_includes_head:determine whether arrow head length is included, default: False head_width:the width of the arrow head, default: 3*width head_length:the length of the arrow head, default: 1.5*head_width shape:param 'full'、'left'、'right' is shape of the arrow, default: 'full' overhang:代表箭头头部三角形底边与箭头尾部直接的夹角关系,通过该参数可改变箭头的形状。默认值为0,即头部为三角形,当该值小于0时,头部为菱形,当值大于0时,头部为鱼尾状
625941b4aad79263cf390804
def cancel(self): <NEW_LINE> <INDENT> with self._condition: <NEW_LINE> <INDENT> if self._state in ['RUNNING', 'FINISHED']: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._state in ['PENDING']: <NEW_LINE> <INDENT> self._state = 'CANCELLED' <NEW_LINE> self._condition.notify_all() <NEW_LINE> <DEDENT> return True
取消获取future的操作 :return:
625941b485dfad0860c3ac22
def rms(varray=[]): <NEW_LINE> <INDENT> squares = map(lambda x: x*x, varray) <NEW_LINE> return pow(sum(squares), 0.5)
Root mean squared velocity. Returns square root of sum of squares of velocities
625941b4cdde0d52a9e52df8
@app.route("/buy", methods=["GET", "POST"]) <NEW_LINE> @login_required <NEW_LINE> def buy(): <NEW_LINE> <INDENT> if request.method == "GET": <NEW_LINE> <INDENT> print(session["user_id"]) <NEW_LINE> return render_template("buy.html") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> symbol = request.form.get("symbol") <NEW_LINE> if not symbol: <NEW_LINE> <INDENT> render_template("apology.html", message="Please enter a stock symbol.") <NEW_LINE> <DEDENT> get_quote = lookup(symbol) <NEW_LINE> if not get_quote: <NEW_LINE> <INDENT> return render_template("apology.html", message="No symbol found.") <NEW_LINE> <DEDENT> shares = request.form.get("shares") <NEW_LINE> if not shares: <NEW_LINE> <INDENT> return render_template("apology.html", message="Please enter a number of shares.") <NEW_LINE> <DEDENT> if int(shares) < 0: <NEW_LINE> <INDENT> return render_template("apology.html", message="Invalid number of shares.") <NEW_LINE> <DEDENT> current_price = get_quote["price"] <NEW_LINE> amount_due = current_price * int(shares) <NEW_LINE> balance = db.execute("SELECT cash FROM users WHERE id == :id", id=session["user_id"])[0] <NEW_LINE> if amount_due <= float(balance["cash"]): <NEW_LINE> <INDENT> can_buy = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render_template("apology.html", message="Insufficent funds") <NEW_LINE> <DEDENT> if can_buy: <NEW_LINE> <INDENT> new_cash = float(balance["cash"]) - amount_due <NEW_LINE> now = datetime.datetime.now() <NEW_LINE> db.execute("UPDATE users SET cash = :cash WHERE id == :id", cash=new_cash, id=session["user_id"]) <NEW_LINE> db.execute("INSERT INTO transactions (customer_id, date, type, symbol, shares, PPS, Total_Amount) VALUES (:customer_id, :date, :type, :symbol, :shares, :PPS, :Total_Amount)", customer_id=session["user_id"], date=now, type="Buy", symbol=symbol, shares=shares, PPS=current_price, Total_Amount=amount_due) <NEW_LINE> return render_template("success.html")
Buy shares of stock
625941b40c0af96317bb7fb3
def login_user(self, password, my_name): <NEW_LINE> <INDENT> from courier_app.send_it_apis.v1.models import SystemUsers <NEW_LINE> for item in SystemUsers.send_it_users.keys(): <NEW_LINE> <INDENT> if my_name == SystemUsers.send_it_users[item] ['email'] or my_name == SystemUsers.send_it_users [item]['username']: <NEW_LINE> <INDENT> pass_code = SystemUsers.send_it_users[item] ['password'] <NEW_LINE> if check_password_hash(pass_code,password): <NEW_LINE> <INDENT> s = Serializer(Config.SECRET_KEY, expires_in=21600) <NEW_LINE> token = (s.dumps({'user_id': item})).decode("ascii") <NEW_LINE> return token <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False
This method allows the admin to create a user
625941b485dfad0860c3ac23
def test_str_with_no_content(self): <NEW_LINE> <INDENT> payload = payloads.GetAttributeListResponsePayload( None, None ) <NEW_LINE> expected = str({ 'unique_identifier': None, 'attribute_names': list() }) <NEW_LINE> observed = str(payload) <NEW_LINE> self.assertEqual(expected, observed)
Test that str can be applied to a GetAttributeList response payload with no ID or attribute names.
625941b415fb5d323cde08d2
def __init__(self, end_number, distance, arrows_number: int, max_scoring_per_arrow: int): <NEW_LINE> <INDENT> self.end_number = end_number <NEW_LINE> self.distance = distance <NEW_LINE> self.arrows_number = arrows_number <NEW_LINE> self.max_scoring_per_arrow = max_scoring_per_arrow
Constituent of ArcheryGame, representing one end (round) of the game. :param end_number: int representing end's number :param distance: str representing distance from archer to target (eg.'15 m') :param arrows_number: int representing number of arrows shot at one target during one end :param max_scoring_per_arrow: int representing number of points for hitting most awarded target zone
625941b4dc8b845886cb52fe
def test_view_validates_permission(self): <NEW_LINE> <INDENT> response = self.client.get(self.thread.get_unapproved_post_url()) <NEW_LINE> self.assertContains(response, "You need permission to approve content", status_code=403) <NEW_LINE> self.grant_permission() <NEW_LINE> response = self.client.get(self.thread.get_unapproved_post_url()) <NEW_LINE> self.assertEqual(response.status_code, 302)
view validates permission to see unapproved posts
625941b438b623060ff0abc0
def _get_clipper(self, layer_bounds, out_bounds, scalar=None): <NEW_LINE> <INDENT> if not self.clip or bounding.covers(out_bounds, layer_bounds): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> scalar = scalar or self.scalar <NEW_LINE> if not self.clipper: <NEW_LINE> <INDENT> padded_bounds = bounding.pad(self.projected_bounds, 1000) <NEW_LINE> self.clipper = transform.clipper([c * scalar for c in padded_bounds]) <NEW_LINE> <DEDENT> return self.clipper
Get a clipping function for the given input crs and bounds. Args: layer_bounds (tuple): The bounds of the layer. out_bounds (tuple): The desired output bounds (in layer coordinates). scalar (float): Map scale. Returns: ``None`` if layer_bounds are inside out_bounds or clipping is off.
625941b445492302aab5e08a
def test_parallel_beta_diversity(self): <NEW_LINE> <INDENT> params = {'metrics': 'weighted_unifrac,unweighted_unifrac', 'tree_path': self.tree_fp, 'jobs_to_start': 3, 'full_tree': False } <NEW_LINE> app = ParallelBetaDiversitySingle() <NEW_LINE> r = app(self.input1_fp, self.test_out, params, job_prefix='BTEST', poll_directly=True, suppress_submit_jobs=False) <NEW_LINE> input_sample_ids = parse_biom_table( open(self.input1_fp, 'U')).ids() <NEW_LINE> dm_fps = glob(join(self.test_out, '*weighted_unifrac*')) <NEW_LINE> for dm_fp in dm_fps: <NEW_LINE> <INDENT> dm_sample_ids = parse_distmat(open(dm_fp))[0] <NEW_LINE> self.assertItemsEqual(dm_sample_ids, input_sample_ids)
parallel beta diveristy functions in single file mode
625941b4460517430c393f5a
def close(self): <NEW_LINE> <INDENT> self.disconnect()
Close ssh connection, opened by `open` or `connect`
625941b4adb09d7d5db6c55e
def mavg(x,y, span=SPAN): <NEW_LINE> <INDENT> x, y = map(_plot_friendly, [x,y]) <NEW_LINE> if _isdate(x[0]): <NEW_LINE> <INDENT> x = np.array([i.toordinal() for i in x]) <NEW_LINE> <DEDENT> std_err = pd.expanding_std(y, span) <NEW_LINE> y = pd.rolling_mean(y, span) <NEW_LINE> y1 = y - std_err <NEW_LINE> y2 = y + std_err <NEW_LINE> return (y, y1, y2)
compute moving average
625941b4d6c5a10208143e10
def plot_spectrogram_image(image, title="MelSpectrogram", figsize=(20, 7)): <NEW_LINE> <INDENT> plt.figure(figsize=figsize) <NEW_LINE> plt.title(title) <NEW_LINE> plt.grid() <NEW_LINE> plt.imshow(image) <NEW_LINE> plt.show()
Plots (mel)spectrogram-like image.
625941b41f5feb6acb0c4924
def validate_get_params(self): <NEW_LINE> <INDENT> if hasattr(self, 'params_serializer_class'): <NEW_LINE> <INDENT> data = self.request.query_params <NEW_LINE> ser = self.params_serializer_class(data=data) <NEW_LINE> try: <NEW_LINE> <INDENT> ser.is_valid(raise_exception=True) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return None, e.detail if hasattr(e, 'detail') else None <NEW_LINE> <DEDENT> return ser.validated_data, None <NEW_LINE> <DEDENT> return None, None
This function validates the query params by the serializer specified in views params_serializer_class attribute :return: a directory of validated items
625941b48e05c05ec3eea13b
def test_stamp(self): <NEW_LINE> <INDENT> self.assertEqual(Stamp(None), self.item.stamp())
Verify an unknown item has no stamp.
625941b4d486a94d0b98df19
@api.task <NEW_LINE> @api.roles('frontend') <NEW_LINE> def restart_haproxy(): <NEW_LINE> <INDENT> frontend.restart_haproxy()
Restart the HA-Proxy load balancer component.
625941b455399d3f0558847d
def body_insertion(content, insertion, end=False): <NEW_LINE> <INDENT> insertion = BeautifulSoup(insertion) <NEW_LINE> soup = BeautifulSoup(content) <NEW_LINE> if soup.body and end: <NEW_LINE> <INDENT> soup.body.append(insertion) <NEW_LINE> <DEDENT> elif soup.body: <NEW_LINE> <INDENT> soup.body.insert(0, insertion) <NEW_LINE> <DEDENT> elif not soup.body and end: <NEW_LINE> <INDENT> soup.append(insertion) <NEW_LINE> <DEDENT> elif not soup.body: <NEW_LINE> <INDENT> soup.insert(0, insertion) <NEW_LINE> <DEDENT> if USE_PRETTIFY: <NEW_LINE> <INDENT> return soup.prettify() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return soup.renderContents()
Insert an HTML content into the body HTML node
625941b4004d5f362079a102
def fetch_permissions_list(src_root) -> dict: <NEW_LINE> <INDENT> automation_files = os.listdir(os.path.join(src_root, "actions")) <NEW_LINE> automations = [os.path.splitext(f)[0] for f in automation_files if f.endswith('.py')] <NEW_LINE> permissions = {} <NEW_LINE> for index, automation in enumerate(automations): <NEW_LINE> <INDENT> automation_module = importlib.import_module(''.join(['actions.', automation]), package=None) <NEW_LINE> for permission in automation_module.info()['permissions']: <NEW_LINE> <INDENT> permissions[''.join([permission])] = permission <NEW_LINE> <DEDENT> <DEDENT> return permissions
Generates the HyperglanceAutomations.json file Returns ------- list A json formatted list containing the available automations
625941b430dc7b7665901735
def send_text(self, text: str, quoted_msg_id: Optional[str] = None, mentions: Optional[List[str]] = None, link_desc=None) -> Result[str]: <NEW_LINE> <INDENT> params: Dict[str, Any] = {'text': text} <NEW_LINE> if quoted_msg_id: <NEW_LINE> <INDENT> params['quotedMsgId'] = quoted_msg_id <NEW_LINE> <DEDENT> if mentions: <NEW_LINE> <INDENT> params['mentions'] = mentions <NEW_LINE> <DEDENT> if link_desc: <NEW_LINE> <INDENT> params['linkDesc'] = link_desc <NEW_LINE> <DEDENT> return self._execute_command('sendText', params)
Send text message to current chat. :param text: Message to send. :param quoted_msg_id: Quoted message's identifier. :param mentions: List of user ids mentioned. :param link_desc: Link description. :return: New message's identifier
625941b463d6d428bbe442c1
def test_create_attribute_csv_table(self): <NEW_LINE> <INDENT> from natcap.invest.wave_energy import wave_energy <NEW_LINE> temp_dir = self.workspace_dir <NEW_LINE> table_uri = os.path.join(temp_dir, 'att_csv_file.csv') <NEW_LINE> fields = ['id', 'height', 'length'] <NEW_LINE> data = {1: {'id': 1, 'height': 10, 'length': 15}, 0: {'id': 0, 'height': 10, 'length': 15}, 2: {'id': 2, 'height': 10, 'length': 15}} <NEW_LINE> wave_energy.create_attribute_csv_table(table_uri, fields, data) <NEW_LINE> exp_rows = [{'id': '0', 'height': '10', 'length': '15'}, {'id': '1', 'height': '10', 'length': '15'}, {'id': '2', 'height': '10', 'length': '15'}] <NEW_LINE> result_file = open(table_uri, 'rU') <NEW_LINE> csv_reader = csv.DictReader(result_file) <NEW_LINE> for row, exp_row in zip(csv_reader, exp_rows): <NEW_LINE> <INDENT> self.assertDictEqual(row, exp_row) <NEW_LINE> <DEDENT> result_file.close()
WaveEnergy: testing 'create_attribute_csv_table' function.
625941b430c21e258bdfa268
def get_move(self, game, time_left): <NEW_LINE> <INDENT> self.time_left = time_left <NEW_LINE> best_move = (-1, -1) <NEW_LINE> try: <NEW_LINE> <INDENT> search_depth = 0 <NEW_LINE> while (self.time_left() > 0): <NEW_LINE> <INDENT> best_move = self.alphabeta(game, search_depth) <NEW_LINE> search_depth += 1 <NEW_LINE> <DEDENT> <DEDENT> except SearchTimeout: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return best_move
Search for the best move from the available legal moves and return a result before the time limit expires. Modify the get_move() method from the MinimaxPlayer class to implement iterative deepening search instead of fixed-depth search. ********************************************************************** NOTE: If time_left() < 0 when this function returns, the agent will forfeit the game due to timeout. You must return _before_ the timer reaches 0. ********************************************************************** Parameters ---------- game : `isolation.Board` An instance of `isolation.Board` encoding the current state of the game (e.g., player locations and blocked cells). time_left : callable A function that returns the number of milliseconds left in the current turn. Returning with any less than 0 ms remaining forfeits the game. Returns ------- (int, int) Board coordinates corresponding to a legal move; may return (-1, -1) if there are no available legal moves.
625941b4ad47b63b2c509d56
def fetch_emojis(message_content): <NEW_LINE> <INDENT> emojis = re.findall("<.:[a-zA-Z0-9_]{2,}:\d{18}>", message_content) <NEW_LINE> emojis_objects = [] <NEW_LINE> for emoji in emojis: <NEW_LINE> <INDENT> emoji_id = int(emoji[-19:-1]) <NEW_LINE> emojis_objects.append(client_object.get_emoji(emoji_id)) <NEW_LINE> <DEDENT> return emojis_objects
Function, that fetches all emojis into emoji project
625941b407f4c71912b11250
def put_dataset(self, name, value, overwrite=True): <NEW_LINE> <INDENT> if name in self.dataset_names(): <NEW_LINE> <INDENT> if overwrite == False: <NEW_LINE> <INDENT> raise NameError(f"the dataset '{name}' already exists") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.delete_dataset(name) <NEW_LINE> <DEDENT> <DEDENT> self._store_child_dataset(name, value) <NEW_LINE> locked = self.attrs.lock() <NEW_LINE> self.attrs[f"{name}/dtype"] = str(value.dtype) <NEW_LINE> self.attrs[f"{name}/shape"] = value.shape <NEW_LINE> self.attrs[f"{name}/byteorder"] = self._byteorders[value.dtype.byteorder] <NEW_LINE> if locked == True: <NEW_LINE> <INDENT> self.attrs.commit()
puts `value` to this entry with `name`.
625941b4f8510a7c17cf94d0
def get_edit_mapping_value(self, edit_value): <NEW_LINE> <INDENT> return edit_value.edit_text
Get value from edit_mapping text. Overload to perform automatic value conversion.
625941b4d7e4931a7ee9dce6
def __init__(self, key=None, secret=None, verify_certs=True): <NEW_LINE> <INDENT> self.session = requests.Session() <NEW_LINE> if key is not None: <NEW_LINE> <INDENT> self.session.auth = (key, secret) <NEW_LINE> <DEDENT> self.verify_certs = verify_certs
Initialize object :param key: Service key :type key: None | str :param secret: Service secret :type secret: None | str :param verify_certs: Verify the certificates when making a request :type verify_certs: bool :rtype: None
625941b4b830903b967e96e3
def move_file(self, destpath: str) -> None: <NEW_LINE> <INDENT> workspace = self.content.workspace <NEW_LINE> parent = self.content.parent <NEW_LINE> with new_revision( content=self.content, tm=transaction.manager, session=self.session, ): <NEW_LINE> <INDENT> if basename(destpath) != self.getDisplayName(): <NEW_LINE> <INDENT> new_given_file_name = transform_to_bdd(basename(destpath)) <NEW_LINE> new_file_name, new_file_extension = os.path.splitext(new_given_file_name) <NEW_LINE> self.content_api.update_content( self.content, new_file_name, ) <NEW_LINE> self.content.file_extension = new_file_extension <NEW_LINE> self.content_api.save(self.content) <NEW_LINE> <DEDENT> workspace_api = WorkspaceApi( current_user=self.user, session=self.session, config=self.provider.app_config, ) <NEW_LINE> content_api = ContentApi( current_user=self.user, session=self.session, config=self.provider.app_config ) <NEW_LINE> destination_workspace = self.provider.get_workspace_from_path( destpath, workspace_api, ) <NEW_LINE> destination_parent = self.provider.get_parent_from_path( destpath, content_api, destination_workspace, ) <NEW_LINE> if destination_parent != parent or destination_workspace != workspace: <NEW_LINE> <INDENT> self.content_api.move( item=self.content, new_parent=destination_parent, must_stay_in_same_workspace=False, new_workspace=destination_workspace ) <NEW_LINE> <DEDENT> <DEDENT> transaction.commit()
Move file mean changing the path to access to a file. This can mean simple renaming(1), moving file from a directory to one another(2) but also renaming + moving file from a directory to one another at the same time (3). (1): move /dir1/file1 -> /dir1/file2 (2): move /dir1/file1 -> /dir2/file1 (3): move /dir1/file1 -> /dir2/file2 :param destpath: destination path of webdav move :return: nothing
625941b4a934411ee3751466
def get_damping_param( method: str, defaults: Optional[list] = None, data_file: Optional[str] = None, keep_meta=False, ) -> dict: <NEW_LINE> <INDENT> global _data_base <NEW_LINE> if _data_base is None: <NEW_LINE> <INDENT> if data_file is None: <NEW_LINE> <INDENT> data_file = get_data_file_name() <NEW_LINE> <DEDENT> _data_base = load_data_base(data_file) <NEW_LINE> <DEDENT> if "default" not in _data_base or "parameter" not in _data_base: <NEW_LINE> <INDENT> raise KeyError("No default correct scheme provided") <NEW_LINE> <DEDENT> if defaults is None: <NEW_LINE> <INDENT> defaults = _data_base["default"]["d4"] <NEW_LINE> <DEDENT> _base = _data_base["default"]["parameter"]["d4"] <NEW_LINE> _entry = _data_base["parameter"][method.lower()]["d4"] <NEW_LINE> return _get_params(_entry, _base, defaults, keep_meta)
Obtain damping parameters from a data base file.
625941b432920d7e50b27f96
def parse_spec(spec, db=None): <NEW_LINE> <INDENT> items = list(spec.items()) <NEW_LINE> if not items: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> name, coll = items[0] <NEW_LINE> cmd = Command(name, db or spec.get('$db'), coll) <NEW_LINE> if 'ordered' in spec: <NEW_LINE> <INDENT> cmd.tags['mongodb.ordered'] = spec['ordered'] <NEW_LINE> <DEDENT> if cmd.name == 'insert': <NEW_LINE> <INDENT> if 'documents' in spec: <NEW_LINE> <INDENT> cmd.metrics['mongodb.documents'] = len(spec['documents']) <NEW_LINE> <DEDENT> <DEDENT> elif cmd.name == 'update': <NEW_LINE> <INDENT> updates = spec.get('updates') <NEW_LINE> if updates: <NEW_LINE> <INDENT> cmd.query = updates[0].get('q') <NEW_LINE> <DEDENT> <DEDENT> elif cmd.name == 'delete': <NEW_LINE> <INDENT> dels = spec.get('deletes') <NEW_LINE> if dels: <NEW_LINE> <INDENT> cmd.query = dels[0].get('q') <NEW_LINE> <DEDENT> <DEDENT> return cmd
Return a Command that has parsed the relevant detail for the given pymongo SON spec.
625941b416aa5153ce362242
def test_persist_dag(self): <NEW_LINE> <INDENT> test_course = persistent_factories.PersistentCourseFactory.create( offering='tempcourse', org='testx', display_name='fun test course', user_id='testbot' ) <NEW_LINE> test_chapter = self.split_store.create_xblock( test_course.system, 'chapter', {'display_name': 'chapter n'}, parent_xblock=test_course ) <NEW_LINE> self.assertEqual(test_chapter.display_name, 'chapter n') <NEW_LINE> test_def_content = '<problem>boo</problem>' <NEW_LINE> new_block = self.split_store.create_xblock( test_course.system, 'problem', fields={ 'data': test_def_content, 'display_name': 'problem' }, parent_xblock=test_chapter ) <NEW_LINE> self.assertIsNotNone(new_block.definition_locator) <NEW_LINE> self.assertTrue(isinstance(new_block.definition_locator.definition_id, LocalId)) <NEW_LINE> persisted_course = self.split_store.persist_xblock_dag(test_course, 'testbot') <NEW_LINE> self.assertEqual(len(persisted_course.children), 1) <NEW_LINE> persisted_chapter = persisted_course.get_children()[0] <NEW_LINE> self.assertEqual(persisted_chapter.category, 'chapter') <NEW_LINE> self.assertEqual(persisted_chapter.display_name, 'chapter n') <NEW_LINE> self.assertEqual(len(persisted_chapter.children), 1) <NEW_LINE> persisted_problem = persisted_chapter.get_children()[0] <NEW_LINE> self.assertEqual(persisted_problem.category, 'problem') <NEW_LINE> self.assertEqual(persisted_problem.data, test_def_content) <NEW_LINE> persisted_problem.display_name = 'altered problem' <NEW_LINE> persisted_problem = self.split_store.persist_xblock_dag(persisted_problem, 'testbot') <NEW_LINE> self.assertEqual(persisted_problem.display_name, 'altered problem')
try saving temporary xblocks
625941b473bcbd0ca4b2be47
def __init__(self): <NEW_LINE> <INDENT> self.login = None <NEW_LINE> self.config = OrderedDict() <NEW_LINE> self.data_schema = OrderedDict( [ (vol.Required(CONF_EMAIL), str), (vol.Required(CONF_PASSWORD), str), (vol.Required(CONF_URL, default="amazon.com"), str), (vol.Optional(CONF_DEBUG, default=False), bool), (vol.Optional(CONF_INCLUDE_DEVICES, default=""), str), (vol.Optional(CONF_EXCLUDE_DEVICES, default=""), str), (vol.Optional(CONF_SCAN_INTERVAL, default=60), int), ] ) <NEW_LINE> self.captcha_schema = OrderedDict( [(vol.Required(CONF_PASSWORD), str), (vol.Required("captcha"), str)] ) <NEW_LINE> self.twofactor_schema = OrderedDict([(vol.Required("securitycode"), str)]) <NEW_LINE> self.claimspicker_schema = OrderedDict([(vol.Required("claimsoption"), str)]) <NEW_LINE> self.authselect_schema = OrderedDict( [ ( vol.Required("authselectoption", default=0), vol.All(cv.positive_int, vol.Clamp(min=0)), ) ] ) <NEW_LINE> self.verificationcode_schema = OrderedDict( [(vol.Required("verificationcode"), str)] )
Initialize the config flow.
625941b41f037a2d8b945fc9
def setStretch(self, p_int): <NEW_LINE> <INDENT> pass
QFont.setStretch(int)
625941b44f6381625f114811
def compute_distances_no_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> train_square = sum(np.transpose(self.X_train ** 2)) <NEW_LINE> test_square = sum(np.transpose(X ** 2)) <NEW_LINE> train_test = np.matmul(X, np.transpose(self.X_train)) <NEW_LINE> test_square = np.reshape(test_square, (num_test, 1)) <NEW_LINE> train_square = np.reshape(train_square, (num_train, 1)) <NEW_LINE> dists = (np.transpose(train_square) - 2 * train_test + test_square) ** (1 / 2) <NEW_LINE> return dists
Compute the distance between each test point in X and each training point in self.X_train using no explicit loops. Input / Output: Same as compute_distances_two_loops
625941b4f548e778e58cd346
def test_genomeGenes(): <NEW_LINE> <INDENT> geneLimit = 4 <NEW_LINE> version = 'GCF_000302455.1' <NEW_LINE> genome_instance = Genome(version) <NEW_LINE> totalGenes = 0 <NEW_LINE> for gene in genome_instance.iterateGenes(geneLimit): <NEW_LINE> <INDENT> totalGenes += 1 <NEW_LINE> <DEDENT> assert totalGenes == geneLimit
Test genomic gene iterations
625941b431939e2706e4cc3c
def visited(self,times=1): <NEW_LINE> <INDENT> t=int(time.time()) <NEW_LINE> if self.visites.has_key(t): <NEW_LINE> <INDENT> self.visites[t]=self.visites[t]+times <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.visites[t]=times
browser visites the site,so log it
625941b4956e5f7376d70c47
def __init__(self, status=200, summary=None, request_time=0): <NEW_LINE> <INDENT> self.status = status <NEW_LINE> self.summary = summary <NEW_LINE> self.request = MockHandler.Request(request_time)
Initialise the mock handler with appropriate fields.
625941b4eab8aa0e5d26d929
def get_cost(self): <NEW_LINE> <INDENT> return self.cost_of_labelling * self.queried
Gets the total cost of labelling :return: the cost of labelling
625941b41f037a2d8b945fca
def get_pane_syntax(session: str, window: int, pane: int = None): <NEW_LINE> <INDENT> return "{0}:{1}".format(session, window) + ( ".{0}".format(pane) if pane is not None else "" )
Format a session, window, and optional pane into tmux formatting Parameters ---------- session : str Name of the session window : int Window number of pane pane : int Pane index in the window
625941b4d58c6744b4257a2c
def wait_for_ip(update_callback, update_args=None, update_kwargs=None, timeout=5 * 60, interval=5, interval_multiplier=1, max_failures=10): <NEW_LINE> <INDENT> if update_args is None: <NEW_LINE> <INDENT> update_args = () <NEW_LINE> <DEDENT> if update_kwargs is None: <NEW_LINE> <INDENT> update_kwargs = {} <NEW_LINE> <DEDENT> duration = timeout <NEW_LINE> while True: <NEW_LINE> <INDENT> log.debug( 'Waiting for VM IP. Giving up in 00:{0:02d}:{1:02d}'.format( int(timeout // 60), int(timeout % 60) ) ) <NEW_LINE> data = update_callback(*update_args, **update_kwargs) <NEW_LINE> if data is False: <NEW_LINE> <INDENT> log.debug( 'update_callback has returned False which is considered a ' 'failure. Remaining Failures: {0}'.format(max_failures) ) <NEW_LINE> max_failures -= 1 <NEW_LINE> if max_failures <= 0: <NEW_LINE> <INDENT> raise SaltCloudExecutionFailure( 'Too much failures occurred while waiting for ' 'the IP address' ) <NEW_LINE> <DEDENT> <DEDENT> elif data is not None: <NEW_LINE> <INDENT> return data <NEW_LINE> <DEDENT> if timeout < 0: <NEW_LINE> <INDENT> raise SaltCloudExecutionTimeout( 'Unable to get IP for 00:{0:02d}:{1:02d}'.format( int(duration // 60), int(duration % 60) ) ) <NEW_LINE> <DEDENT> time.sleep(interval) <NEW_LINE> timeout -= interval <NEW_LINE> if interval_multiplier > 1: <NEW_LINE> <INDENT> interval *= interval_multiplier <NEW_LINE> if interval > timeout: <NEW_LINE> <INDENT> interval = timeout + 1 <NEW_LINE> <DEDENT> log.info('Interval multiplier in effect; interval is ' 'now {0}s'.format(interval))
Helper function that waits for an IP address for a specific maximum amount of time. :param update_callback: callback function which queries the cloud provider for the VM ip address. It must return None if the required data, IP included, is not available yet. :param update_args: Arguments to pass to update_callback :param update_kwargs: Keyword arguments to pass to update_callback :param timeout: The maximum amount of time(in seconds) to wait for the IP address. :param interval: The looping interval, i.e., the amount of time to sleep before the next iteration. :param interval_multiplier: Increase the interval by this multiplier after each request; helps with throttling :param max_failures: If update_callback returns ``False`` it's considered query failure. This value is the amount of failures accepted before giving up. :returns: The update_callback returned data :raises: SaltCloudExecutionTimeout
625941b46fb2d068a760ee6c
def save_csv(): <NEW_LINE> <INDENT> url_dict = get_url() <NEW_LINE> for client in url_dict: <NEW_LINE> <INDENT> print ('Saving {} csv file...'.format(client)) <NEW_LINE> response = requests.get(url_dict[client], stream=True) <NEW_LINE> filename = os.getcwd() + "/" + 'clients/' + client + '/' + client + '.csv' <NEW_LINE> handle = open(filename, "wb") <NEW_LINE> for chunk in response.iter_content(chunk_size=512): <NEW_LINE> <INDENT> if chunk: <NEW_LINE> <INDENT> handle.write(chunk)
Saves a .csv file for each organisation in the org_list. Saved filename is short_name + month/year + .csv (eg orgname_march17.csv)
625941b45166f23b2e1a4f23
@app.route("/tickets/<ticket_id>", methods=["GET"]) <NEW_LINE> def ticket_details(ticket_id): <NEW_LINE> <INDENT> ticket_url = zd.format_ticket(session["subdomain"], ticket_id) <NEW_LINE> resp = zd.request(ticket_url, session["email"], session["password"]) <NEW_LINE> ticket = resp["ticket"] <NEW_LINE> return render_template("ticket_details.html", ticket=ticket)
Shows a ticket's details.
625941b4e8904600ed9f1cf4
def youngest(): <NEW_LINE> <INDENT> def ages(p): <NEW_LINE> <INDENT> return p['age'] <NEW_LINE> <DEDENT> return sorted(PEOPLE_LIST, key=ages)
sort by age in ascending order
625941b415fb5d323cde08d3
def register(self, sim, *args, **kwargs): <NEW_LINE> <INDENT> kwargs.update(zip(self.meta_names, args)) <NEW_LINE> super(SimRegistry, self).register(sim, **kwargs)
register simulation and metadata. * ``commands`` - list of methods to callable from model :param sim: new simulation
625941b47c178a314d6ef222
def CalcPsychrometricsFromRelHum(TDryBulb: float, RelHum: float, Pressure: float) -> tuple: <NEW_LINE> <INDENT> HumRatio = GetHumRatioFromRelHum(TDryBulb, RelHum, Pressure) <NEW_LINE> TWetBulb = GetTWetBulbFromHumRatio(TDryBulb, HumRatio, Pressure) <NEW_LINE> TDewPoint = GetTDewPointFromHumRatio(TDryBulb, HumRatio, Pressure) <NEW_LINE> VapPres = GetVapPresFromHumRatio(HumRatio, Pressure) <NEW_LINE> MoistAirEnthalpy = GetMoistAirEnthalpy(TDryBulb, HumRatio) <NEW_LINE> MoistAirVolume = GetMoistAirVolume(TDryBulb, HumRatio, Pressure) <NEW_LINE> DegreeOfSaturation = GetDegreeOfSaturation(TDryBulb, HumRatio, Pressure) <NEW_LINE> return HumRatio, TWetBulb, TDewPoint, VapPres, MoistAirEnthalpy, MoistAirVolume, DegreeOfSaturation
Utility function to calculate humidity ratio, wet-bulb temperature, dew-point temperature, vapour pressure, moist air enthalpy, moist air volume, and degree of saturation of air given dry-bulb temperature, relative humidity and pressure. Args: TDryBulb : Dry-bulb temperature in °F [IP] or °C [SI] RelHum : Relative humidity in range [0, 1] Pressure : Atmospheric pressure in Psi [IP] or Pa [SI] Returns: Humidity ratio in lb_H₂O lb_Air⁻¹ [IP] or kg_H₂O kg_Air⁻¹ [SI] Wet-bulb temperature in °F [IP] or °C [SI] Dew-point temperature in °F [IP] or °C [SI]. Partial pressure of water vapor in moist air in Psi [IP] or Pa [SI] Moist air enthalpy in Btu lb⁻¹ [IP] or J kg⁻¹ [SI] Specific volume of moist air in ft³ lb⁻¹ [IP] or in m³ kg⁻¹ [SI] Degree of saturation [unitless]
625941b4925a0f43d2549c3e
def parse_object(response, infotype): <NEW_LINE> <INDENT> if infotype in ("idletime", "refcount"): <NEW_LINE> <INDENT> return int_or_none(response) <NEW_LINE> <DEDENT> return response
Parse the results of an OBJECT command
625941b4cb5e8a47e48b787c
def traverse_tree_dnds(self, source_node, target_node, storage_index): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> full_target_seq = self.alndict[target_node.name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> print("\n\nTree node names do not have matches in alignment file. Make sure that the provided alignment file *includes ancestral sequences*.") <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> full_source_seq = self.alndict[source_node.name] <NEW_LINE> bl = target_node.branch_length <NEW_LINE> for s in range(0, self.alnlen*3, 3): <NEW_LINE> <INDENT> source_seq = full_source_seq[s:s+3] <NEW_LINE> target_seq = full_target_seq[s:s+3] <NEW_LINE> calcer = CalcOverBranch(source_seq, target_seq, bl, self.B) <NEW_LINE> n_changes, s_changes, n_sites, s_sites= calcer.compute_branch_dnds() <NEW_LINE> self.n_changes[s/3][storage_index] = n_changes <NEW_LINE> self.s_changes[s/3][storage_index] = s_changes <NEW_LINE> self.n_sites[s/3][storage_index] = n_sites <NEW_LINE> self.s_sites[s/3][storage_index] = s_sites <NEW_LINE> <DEDENT> storage_index += 1 <NEW_LINE> if len(target_node.children) > 0: <NEW_LINE> <INDENT> for child in target_node.children: <NEW_LINE> <INDENT> storage_index = self.traverse_tree_dnds(target_node, child, storage_index) <NEW_LINE> <DEDENT> <DEDENT> return storage_index
Traverse the tree to compute and store dN, dS quantities (sites and changes) at each edge.
625941b4283ffb24f3c556d9
def playGame(wordList): <NEW_LINE> <INDENT> hand = {} <NEW_LINE> while True: <NEW_LINE> <INDENT> user_input = input('Enter n to deal a new hand, r to replay the last hand, or e to end game:') <NEW_LINE> if user_input == 'n': <NEW_LINE> <INDENT> hand =dealHand(HAND_SIZE) <NEW_LINE> b_dict = dict(hand) <NEW_LINE> displayHand(hand) <NEW_LINE> playHand(hand,wordList,HAND_SIZE) <NEW_LINE> <DEDENT> elif user_input =='r': <NEW_LINE> <INDENT> if hand == {}: <NEW_LINE> <INDENT> print('You have not played a hand yet. Please play a new hand first!') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> playHand(b_dict,wordList,HAND_SIZE) <NEW_LINE> <DEDENT> <DEDENT> elif user_input == 'e': <NEW_LINE> <INDENT> print('exit game') <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('invalid input')
Allow the user to play an arbitrary number of hands. 1) Asks the user to input 'n' or 'r' or 'e'. * If the user inputs 'n', let the user play a new (random) hand. * If the user inputs 'r', let the user play the last hand again. * If the user inputs 'e', exit the game. * If the user inputs anything else, tell them their input was invalid. 2) When done playing the hand, repeat from step 1
625941b456ac1b37e6263fad
def save(self, filename): <NEW_LINE> <INDENT> with open(filename, 'w') as pbs_file: <NEW_LINE> <INDENT> pbs_file.write(str(self))
Saves this PBS job to a file. Parameters ---------- filename : str specified where to save this PBS file
625941b410dbd63aa1bd297c
@app.post('/api/query') <NEW_LINE> def handler_api_query(): <NEW_LINE> <INDENT> entry = None <NEW_LINE> try: <NEW_LINE> <INDENT> obj = request.json <NEW_LINE> buffer_id = obj['buffer_id'] <NEW_LINE> timestamp = str(obj['timestamp']) <NEW_LINE> entry = RenderedMarkupCache.instance().get_entry(buffer_id) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if entry is None or entry.disconnected: <NEW_LINE> <INDENT> return {'status': 'DISCONNECTED'} <NEW_LINE> <DEDENT> if entry.timestamp == timestamp: <NEW_LINE> <INDENT> return {'status': 'UNCHANGED'} <NEW_LINE> <DEDENT> result = { 'status': 'OK', 'timestamp': entry.timestamp, 'revivable_key': entry.revivable_key, 'filename': entry.filename, 'dirname': entry.dirname, 'html_part': entry.html_part } <NEW_LINE> return result
Querying for updates.
625941b45f7d997b87174867
def set_crypto_pragmas(db_handle, sqlcipher_opts): <NEW_LINE> <INDENT> opts = sqlcipher_opts <NEW_LINE> _set_key(db_handle, opts.key, opts.is_raw_key) <NEW_LINE> _set_cipher(db_handle, opts.cipher) <NEW_LINE> _set_kdf_iter(db_handle, opts.kdf_iter) <NEW_LINE> _set_cipher_page_size(db_handle, opts.cipher_page_size)
Set cryptographic params (key, cipher, KDF number of iterations and cipher page size). :param db_handle: :type db_handle: :param sqlcipher_opts: options for the SQLCipherDatabase :type sqlcipher_opts: SQLCipherOpts instance
625941b44a966d76dd550dd7
def get(self, id_, type_): <NEW_LINE> <INDENT> if get_authentication(): <NEW_LINE> <INDENT> if request.authorization is None: <NEW_LINE> <INDENT> return failed_authentication() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> auth = check_authorization(request, get_session()) <NEW_LINE> if auth is False: <NEW_LINE> <INDENT> return failed_authentication() <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> status_code, message = e.get_HTTP() <NEW_LINE> return set_response_headers(jsonify(message), status_code=status_code) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> class_type = get_doc().collections[type_]["collection"].class_.title <NEW_LINE> if checkClassOp(class_type, "GET"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response = crud.get(id_, class_type, api_name=get_api_name(), session=get_session()) <NEW_LINE> return set_response_headers(jsonify(hydrafy(response))) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> status_code, message = e.get_HTTP() <NEW_LINE> return set_response_headers(jsonify(message), status_code=status_code) <NEW_LINE> <DEDENT> <DEDENT> abort(405)
GET object with id = id_ from the database.
625941b476d4e153a657e8fb
def test_write_gff_file(self, seqprop_with_i, tmpdir): <NEW_LINE> <INDENT> outpath = tmpdir.join('test_seqprop_with_i_write_gff_file.gff').strpath <NEW_LINE> seqprop_with_i.write_gff_file(outfile=outpath, force_rerun=True) <NEW_LINE> assert op.exists(outpath) <NEW_LINE> assert op.getsize(outpath) > 0 <NEW_LINE> assert seqprop_with_i.feature_path == outpath <NEW_LINE> assert seqprop_with_i.feature_file == 'test_seqprop_with_i_write_gff_file.gff' <NEW_LINE> assert seqprop_with_i.feature_dir == tmpdir <NEW_LINE> with pytest.raises(ValueError): <NEW_LINE> <INDENT> seqprop_with_i.features = ['NOFEATURES']
Test writing the features, and that features are now loaded from a file
625941b48e05c05ec3eea13c
def p_expr_name(self, p): <NEW_LINE> <INDENT> p[0] = ast.Name( self.position(p,1), s=p[1])
expr : NAME
625941b499cbb53fe67929b3
def exporter_text(self, test_result_ext): <NEW_LINE> <INDENT> success_code = 0 <NEW_LINE> pt = PrettyTable(["Result", "Target", "Toolchain", "Test ID", "Test Description", "Elapsed Time", "Timeout"]) <NEW_LINE> pt.align["Result"] = "l" <NEW_LINE> pt.align["Target"] = "l" <NEW_LINE> pt.align["Toolchain"] = "l" <NEW_LINE> pt.align["Test ID"] = "l" <NEW_LINE> pt.align["Test Description"] = "l" <NEW_LINE> pt.padding_width = 1 <NEW_LINE> result_dict = {"OK" : 0, "FAIL" : 0, "ERROR" : 0, "UNDEF" : 0, "IOERR_COPY" : 0, "IOERR_DISK" : 0, "IOERR_SERIAL" : 0, "TIMEOUT" : 0, "NO_IMAGE" : 0, "MBED_ASSERT" : 0, "BUILD_FAILED" : 0, "NOT_SUPPORTED" : 0 } <NEW_LINE> unique_test_ids = self.get_all_unique_test_ids(test_result_ext) <NEW_LINE> targets = sorted(test_result_ext.keys()) <NEW_LINE> for target in targets: <NEW_LINE> <INDENT> toolchains = sorted(test_result_ext[target].keys()) <NEW_LINE> for toolchain in toolchains: <NEW_LINE> <INDENT> test_cases = [] <NEW_LINE> tests = sorted(test_result_ext[target][toolchain].keys()) <NEW_LINE> for test in tests: <NEW_LINE> <INDENT> test_results = test_result_ext[target][toolchain][test] <NEW_LINE> for test_res in test_results: <NEW_LINE> <INDENT> test_ids = sorted(test_res.keys()) <NEW_LINE> for test_no in test_ids: <NEW_LINE> <INDENT> test_result = test_res[test_no] <NEW_LINE> result_dict[test_result['result']] += 1 <NEW_LINE> pt.add_row([test_result['result'], test_result['target_name'], test_result['toolchain_name'], test_result['id'], test_result['description'], test_result['elapsed_time'], test_result['duration']]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> result = pt.get_string() <NEW_LINE> result += "\n" <NEW_LINE> result += "Result: " + ' / '.join(['%s %s' % (value, key) for (key, value) in {k: v for k, v in result_dict.items() if v != 0}.items()]) <NEW_LINE> return result
Prints well-formed summary with results (SQL table like) table shows target x test results matrix across
625941b4a79ad161976cbf12
def test_check_random_state(): <NEW_LINE> <INDENT> rns = check_random_state(seed=None) <NEW_LINE> assert(isinstance(rns, np.random.RandomState)) <NEW_LINE> rns = check_random_state(seed=10) <NEW_LINE> assert(isinstance(rns, np.random.RandomState)) <NEW_LINE> cpy_rns = check_random_state(seed=rns) <NEW_LINE> assert(cpy_rns is rns) <NEW_LINE> T.assert_raises(ValueError, check_random_state, seed='bs')
Test for check_random_state
625941b4293b9510aa2c3065
def generate_sudoku(puzzle_file_name, choice=2): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open('empty_doku.txt', 'r') as f: <NEW_LINE> <INDENT> generate_complete_input(f) <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> print("The empty sudoku file was not found. Please provide one in the correct format") <NEW_LINE> <DEDENT> lts = time.time() <NEW_LINE> exec_cmd = '/bin/minisat -rnd-init -rnd-seed={} cnf_propositions.txt output.txt > /dev/null'.format(lts) <NEW_LINE> exit_status = os.system(exec_cmd) <NEW_LINE> with open('output.txt', 'r') as f: <NEW_LINE> <INDENT> f.readline() <NEW_LINE> print_sudoku(f, 'answer.txt', 2) <NEW_LINE> <DEDENT> if(choice=='3'): <NEW_LINE> <INDENT> print("Here's some random fun - ") <NEW_LINE> with open('answer.txt', 'r') as final: <NEW_LINE> <INDENT> L = [i.split() for i in final.read().split('\n')] <NEW_LINE> for i in range (9): <NEW_LINE> <INDENT> for j in range (8): <NEW_LINE> <INDENT> print("{} ".format(L[i][j]), end='', flush=True) <NEW_LINE> <DEDENT> print("{}\n".format(L[i][8]), end='', flush=True) <NEW_LINE> <DEDENT> <DEDENT> exit() <NEW_LINE> <DEDENT> fp = open('answer.txt', 'r') <NEW_LINE> T = fp.read() <NEW_LINE> R = [i.split() for i in T.split('\n')] <NEW_LINE> status_map = [i for i in range (0,81)] <NEW_LINE> while(status_map!=[]): <NEW_LINE> <INDENT> rand_pos = random.SystemRandom().choice(status_map) <NEW_LINE> status_map.remove(rand_pos) <NEW_LINE> puzzle_fp = open(puzzle_file_name, 'w+') <NEW_LINE> for i in range (9): <NEW_LINE> <INDENT> for j in range (8): <NEW_LINE> <INDENT> if(i is not int(rand_pos/9) or j is not (rand_pos%9)): <NEW_LINE> <INDENT> puzzle_fp.write("{} ".format(R[i][j])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> puzzle_fp.write(". ") <NEW_LINE> <DEDENT> <DEDENT> if(i==int(rand_pos/9) & (rand_pos%9)==8): <NEW_LINE> <INDENT> puzzle_fp.write(".\n") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> puzzle_fp.write("{}\n".format(R[i][8])) <NEW_LINE> <DEDENT> <DEDENT> puzzle_fp.flush() <NEW_LINE> puzzle_fp.seek(0) <NEW_LINE> return_status = solve_sudoku(puzzle_fp, 2) <NEW_LINE> if(return_status==0): <NEW_LINE> <INDENT> R[int(rand_pos/9)][rand_pos%9]='.' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> print("Here you go. Happy Sudoku Hours. :)") <NEW_LINE> with open(puzzle_file_name, 'r') as final: <NEW_LINE> <INDENT> L = [i.split() for i in final.read().split('\n')] <NEW_LINE> for i in range (9): <NEW_LINE> <INDENT> for j in range (8): <NEW_LINE> <INDENT> print("{} ".format(L[i][j]), end='', flush=True) <NEW_LINE> <DEDENT> print("{}\n".format(L[i][8]), end='', flush=True) <NEW_LINE> <DEDENT> <DEDENT> print("You can always look at the answer at answer.txt")
Generates a random sudoku using -rnd-init setting the Linux time stamp as seed to minisat Args: puzzle_file_name: File name where the user wants an incomplete grid
625941b40a50d4780f666c5b
def notify(self, info): <NEW_LINE> <INDENT> data = info.copy() <NEW_LINE> data["project"] = self.project <NEW_LINE> data["service"] = self.service <NEW_LINE> key = self.namespace + data["base_id"] + "_" + data["trace_id"] + "_" + data["timestamp"] <NEW_LINE> self.db.set(key, jsonutils.dumps(data))
Send notifications to Redis. :param info: Contains information about trace element. In payload dict there are always 3 ids: "base_id" - uuid that is common for all notifications related to one trace. Used to simplify retrieving of all trace elements from Redis. "parent_id" - uuid of parent element in trace "trace_id" - uuid of current element in trace With parent_id and trace_id it's quite simple to build tree of trace elements, which simplify analyze of trace.
625941b430bbd722463cbb8f
def test_image_get_all_marker_null_disk_format_asc(self): <NEW_LINE> <INDENT> TENANT1 = str(uuid.uuid4()) <NEW_LINE> ctxt1 = context.RequestContext(is_admin=False, tenant=TENANT1, auth_tok='user:%s:user' % TENANT1) <NEW_LINE> UUIDX = str(uuid.uuid4()) <NEW_LINE> self.db_api.image_create(ctxt1, {'id': UUIDX, 'status': 'queued', 'disk_format': None, 'owner': TENANT1}) <NEW_LINE> images = self.db_api.image_get_all(ctxt1, marker=UUIDX, sort_key='disk_format', sort_dir='asc') <NEW_LINE> image_ids = [image['id'] for image in images] <NEW_LINE> expected = [UUID3, UUID2, UUID1] <NEW_LINE> self.assertEqual(sorted(expected), sorted(image_ids))
Check an image with disk_format null is handled Check an image with disk_format null is handled when marker is specified and order is ascending
625941b41d351010ab8558f1
def assert_file_count(self, pattern, count): <NEW_LINE> <INDENT> files = sorted(glob.glob(pattern)) <NEW_LINE> msg = "There should be {} files matching {!r}, but there are these: {}" <NEW_LINE> msg = msg.format(count, pattern, files) <NEW_LINE> assert len(files) == count, msg
Assert that there are `count` files matching `pattern`.
625941b423849d37ff7b2e5e
def IsRecording(self): <NEW_LINE> <INDENT> if self.whichCam() == constants.Camera.Interface.GPControl: <NEW_LINE> <INDENT> return self.getStatus(constants.Status.Status, constants.Status.STATUS.IsRecording) <NEW_LINE> <DEDENT> elif self.whichCam() == constants.Camera.Interface.Auth: <NEW_LINE> <INDENT> if self.getStatus(constants.Hero3Status.IsRecording) == "00": <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1
Returns either 0 or 1 if the camera is recording or not.
625941b48e71fb1e9831d580
def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Box, self).__init__(*args, **kwargs) <NEW_LINE> if 'box_extents' in kwargs: <NEW_LINE> <INDENT> self.box_extents = kwargs['box_extents'] <NEW_LINE> <DEDENT> if 'box_transform' in kwargs: <NEW_LINE> <INDENT> self.box_transform = kwargs['box_transform'] <NEW_LINE> <DEDENT> if 'box_center' in kwargs: <NEW_LINE> <INDENT> self.box_center = kwargs['box_center'] <NEW_LINE> <DEDENT> self._unit_box = creation.box()
Create a Box primitive, which is a subclass of Trimesh Arguments ---------- box_extents: (3,) float, size of box box_transform: (4,4) float, transformation matrix for box box_center: (3,) float, convience function which updates box_transform with a translation- only matrix
625941b43c8af77a43ae3572
def access_database(db_name): <NEW_LINE> <INDENT> path = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> conn = sqlite3.connect(path + '/' + db_name) <NEW_LINE> cur = conn.cursor() <NEW_LINE> return cur, conn
This function takes in a db_file and creates the database named crime.db, which will be used to store all the data. This function also allows us to access the data.
625941b4aad79263cf390806
def read(self, output_data_filename): <NEW_LINE> <INDENT> self._update_from_liggghts(output_data_filename)
read from file Parameters ---------- output_data_filename : name of data-file where info read from (i.e Liggghts's output).
625941b46fece00bbac2d506
@timer <NEW_LINE> def test_gsparams(): <NEW_LINE> <INDENT> rng = galsim.BaseDeviate(1234) <NEW_LINE> ucn = galsim.UncorrelatedNoise(rng=rng, variance=1.e3) <NEW_LINE> gsp = galsim.GSParams(folding_threshold=1.e-4, maxk_threshold=1.e-4, maximum_fft_size=1.e4) <NEW_LINE> ucn1 = ucn.withGSParams(gsp) <NEW_LINE> ucn2 = galsim.UncorrelatedNoise(rng=rng, variance=1.e3, gsparams=gsp) <NEW_LINE> ucn3 = ucn.withGSParams(folding_threshold=1.e-4, maxk_threshold=1.e-4, maximum_fft_size=1.e4) <NEW_LINE> print('ucn1 = ',repr(ucn1)) <NEW_LINE> print('ucn2 = ',repr(ucn2)) <NEW_LINE> print('ucn3 = ',repr(ucn3)) <NEW_LINE> assert ucn != ucn1 <NEW_LINE> assert ucn1 == ucn2 <NEW_LINE> assert ucn1 == ucn3 <NEW_LINE> assert ucn2 == ucn3 <NEW_LINE> assert ucn.withGSParams(ucn.gsparams) is ucn <NEW_LINE> assert ucn1.withGSParams(ucn.gsparams) is not ucn <NEW_LINE> assert ucn1.withGSParams(ucn.gsparams) == ucn <NEW_LINE> ccn = galsim.getCOSMOSNoise(rng=rng) <NEW_LINE> ccn1 = ccn.withGSParams(gsp) <NEW_LINE> ccn2 = galsim.getCOSMOSNoise(rng=rng, gsparams=gsp) <NEW_LINE> ccn3 = ccn.withGSParams(folding_threshold=1.e-4, maxk_threshold=1.e-4, maximum_fft_size=1.e4) <NEW_LINE> assert ccn != ccn1 <NEW_LINE> assert ccn1 == ccn2 <NEW_LINE> assert ccn1 == ccn3 <NEW_LINE> assert ccn2 == ccn3 <NEW_LINE> assert ccn.withGSParams(ccn.gsparams) is ccn <NEW_LINE> assert ccn1.withGSParams(ccn.gsparams) is not ccn <NEW_LINE> assert ccn1.withGSParams(ccn.gsparams) == ccn
Test withGSParams
625941b44f88993c3716be41
def export_csv(self): <NEW_LINE> <INDENT> db_query = sql() <NEW_LINE> result, numrows = db_query.get_all_books() <NEW_LINE> with open('books.csv', 'wb') as csvfile: <NEW_LINE> <INDENT> csvwriter = csv.writer(csvfile, delimiter=',', quotechar='"') <NEW_LINE> for row in result: <NEW_LINE> <INDENT> csvwriter.writerow([row['mtype'],row['author'], row['title']]) <NEW_LINE> <DEDENT> <DEDENT> return
Export the entire database to CSV when called from the command line.
625941b47d847024c06be08c
def get_conn(): <NEW_LINE> <INDENT> if not hasattr(g, 'redis'): <NEW_LINE> <INDENT> g.redis = RedisClient() <NEW_LINE> <DEDENT> return g.redis
get redis client object :return:
625941b4cb5e8a47e48b787d
def RefreshItems(self): <NEW_LINE> <INDENT> selected_text = None <NEW_LINE> selected_index = self.GetFirstSelected() <NEW_LINE> if selected_index >= 0: <NEW_LINE> <INDENT> selected_text = self.GetItemText(selected_index) <NEW_LINE> <DEDENT> selected_index = -1 <NEW_LINE> header_lengths = {} <NEW_LINE> col_lengths = {} <NEW_LINE> for col_name, col_label in self._column_map: <NEW_LINE> <INDENT> col_lengths[col_name] = 0 <NEW_LINE> header_lengths[col_name] = len(col_label + " ") <NEW_LINE> <DEDENT> self.Freeze() <NEW_LINE> for row_index in range(self.ItemCount): <NEW_LINE> <INDENT> row = self._data_map[self.GetItemData(row_index)] <NEW_LINE> col_index = 0 <NEW_LINE> for col_name, col_label in self._column_map: <NEW_LINE> <INDENT> col_value = "" if (col_name not in row or row[col_name] is None ) else unicode(row[col_name]) <NEW_LINE> col_lengths[col_name] = max(col_lengths[col_name], len(col_value)) <NEW_LINE> self.SetStringItem(row_index, col_index, col_value) <NEW_LINE> self.itemDataMap[self.GetItemData(row_index)][col_index] = row[col_name] if col_name in row else None <NEW_LINE> if selected_text == col_value: <NEW_LINE> <INDENT> selected_index = row_index <NEW_LINE> <DEDENT> col_index += 1 <NEW_LINE> <DEDENT> <DEDENT> col_index = 0 <NEW_LINE> for col_name, col_label in self._column_map: <NEW_LINE> <INDENT> self.SetColumnWidth(col_index, wx.LIST_AUTOSIZE if ( not col_index or (col_lengths[col_name] > header_lengths[col_name]) ) else header_lengths[col_name] * self.GetTextExtent("w")[0] ) <NEW_LINE> col_index += 1 <NEW_LINE> <DEDENT> if selected_index >= 0: <NEW_LINE> <INDENT> self.Select(selected_index) <NEW_LINE> <DEDENT> self.Thaw()
Refreshes the content of all rows. Re-selects the previously selected row, if any.
625941b463f4b57ef0000ef1
def iden(string): <NEW_LINE> <INDENT> return string
Identity. Used for after modification of generated string.
625941b415fb5d323cde08d4
def featurize(self) -> Tuple[str, str]: <NEW_LINE> <INDENT> name = self.data <NEW_LINE> first, last = name[0], name[-1] <NEW_LINE> return first, last
converts raw text into a feature Returns: Tuple[str, str]: first and last letter
625941b40fa83653e4656d8b
def __init__(self, pair: Tuple[str, str], pair_id: int = None): <NEW_LINE> <INDENT> self.pair = pair <NEW_LINE> self.id = pair_id <NEW_LINE> self.freq = 0 <NEW_LINE> self.token, self.lemma, self.pos, self.dep = (collections.Counter() for _ in range(4))
A _slots__ class representing a pattern and its frequency information. Attribues: self.pair: The pair of words. self.id: Id of the pair. self.freq: The frequency of the pair. self.token, self.lemma, self.pos, self.dep: The items between the words in the pair represented as token, lemma, pos or dep form and their freq. self.fields: Yield self.token, self.lemma, self.pos and self.dep and their name.
625941b4d10714528d5ffaaa
def init_train_model(self, dir_path, model_name, current_iter, iters_num=None, topics_num=10, twords_num=200, alpha=-1.0, beta=0.01, data_file="", prior_file=""): <NEW_LINE> <INDENT> if current_iter == 0: <NEW_LINE> <INDENT> logging.debug("init a new train model") <NEW_LINE> self.init_corpus_with_file(data_file) <NEW_LINE> self.dir_path = dir_path <NEW_LINE> self.model_name = model_name <NEW_LINE> self.current_iter = current_iter <NEW_LINE> self.iters_num = iters_num <NEW_LINE> self.topics_num = topics_num <NEW_LINE> self.K = topics_num <NEW_LINE> self.twords_num = twords_num <NEW_LINE> self.alpha = numpy.array([alpha if alpha > 0 else (50.0/self.K) for k in range(self.K)]) <NEW_LINE> self.beta = numpy.array([beta if beta > 0 else 0.01 for w in range(self.V)]) <NEW_LINE> self.Z = [[numpy.random.randint(self.K) for n in range(len(self.arts_Z[m]))] for m in range(self.M)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.debug("init an existed model") <NEW_LINE> self.dir_path = dir_path <NEW_LINE> self.model_name = model_name <NEW_LINE> self.current_iter = current_iter <NEW_LINE> self.iters_num = iters_num <NEW_LINE> self.twords_num = twords_num <NEW_LINE> self.load_model() <NEW_LINE> <DEDENT> self.init_statistics() <NEW_LINE> self.sum_alpha_beta() <NEW_LINE> if prior_file: <NEW_LINE> <INDENT> self.load_twords(prior_file) <NEW_LINE> <DEDENT> return self
:key: 初始化训练模型,根据参数current_iter(是否等于0)决定是初始化新模型,还是加载已有模型 :key: 当初始化新模型时,除了prior_file先验文件外,其余所有的参数都需要,且current_iter等于0 :key: 当加载已有模型时,只需要dir_path, model_name, current_iter(不等于0), iters_num, twords_num即可 :param iters_num: 可以为整数值或者“auto”
625941b4aad79263cf390807
def register_names_to_del(self, names): <NEW_LINE> <INDENT> if isinstance(names, basestring): <NEW_LINE> <INDENT> names = [names] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> assert all(isinstance(n, basestring) for n in iter(names)) <NEW_LINE> <DEDENT> except (TypeError, AssertionError): <NEW_LINE> <INDENT> raise ValueError('Invalid names argument') <NEW_LINE> <DEDENT> self.names_to_del = self.names_to_del.union(names)
Register names of fields that should not be pickled. Parameters ---------- names : iterable A collection of strings indicating names of fields on this object that should not be pickled. Notes ----- All names registered will be deleted from the dictionary returned by the model's `__getstate__` method (unless a particular model overrides this method).
625941b4d18da76e2353229d
def turn_away_mode_off(self): <NEW_LINE> <INDENT> self._away = False
Turn away off.
625941b4cc40096d61595721
def register_ajax_handler(self, request, function): <NEW_LINE> <INDENT> if request in self.ajax_handlers: <NEW_LINE> <INDENT> L.error("Error: request:" + request + " is already registered") <NEW_LINE> return False <NEW_LINE> <DEDENT> self.ajax_handlers[request] = function <NEW_LINE> L.info("registered:"+request) <NEW_LINE> return True
component registers its ajax requests handlers via this method
625941b48e05c05ec3eea13d
def sizeof(self, vertex_slice): <NEW_LINE> <INDENT> indices_bytes = calc_slice_bitfield_words(vertex_slice) * 4 <NEW_LINE> vertex_indices = self.indices_to_record[vertex_slice.python_slice] <NEW_LINE> sample_bytes = calc_bitfield_words(vertex_indices.count()) * 4 <NEW_LINE> return 4 + indices_bytes + (sample_bytes * self.simulation_ticks)
Get the size requirements of the region in bytes. Parameters ---------- vertex_slice : :py:func:`slice` A slice object which indicates which rows, columns or other elements of the region should be included. Returns ------- int The number of bytes required to store the data in the given slice of the region.
625941b455399d3f0558847f
def _get_default_config(self): <NEW_LINE> <INDENT> return {'bandwidth': 0, 'ubandwidth': "OFF", 'quota': 0, 'uquota': "OFF", 'vdomains': 0, 'uvdomains': "OFF", 'nsubdomains': 0, 'unsubdomains': "OFF", 'nemails': 0, 'unemails': "OFF", 'nemailf': 0, 'unemailf': "OFF", 'nemailml': 0, 'unemailml': "OFF", 'nemailr': 0, 'unemailr': "OFF", 'mysql': 0, 'umysql': "OFF", 'domainptr': 0, 'udomainptr': "OFF", 'ftp': 0, 'uftp': "OFF", 'aftp': "OFF", 'cgi': "ON", 'php': "ON", 'spam': "ON", 'cron': "ON", 'catchall': "OFF", 'ssl': "OFF", 'ssh': "OFF", 'sysinfo': "OFF", 'dnscontrol': "OFF"}
Get dafault config Returns a dictionary with the default configuration for a reseller user
625941b40a366e3fb873e5e2
def open_and_run(self, when_opened, *args, **kwargs): <NEW_LINE> <INDENT> condition_timer = QtCore.QTimer() <NEW_LINE> def handler(): <NEW_LINE> <INDENT> if self.dialog_opened(): <NEW_LINE> <INDENT> self._gui.invoke_later(when_opened, self) <NEW_LINE> self.dialog_was_opened = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> condition_timer.start() <NEW_LINE> <DEDENT> <DEDENT> condition_timer.setInterval(100) <NEW_LINE> condition_timer.setSingleShot(True) <NEW_LINE> condition_timer.timeout.connect(handler) <NEW_LINE> condition_timer.start() <NEW_LINE> self._assigned = False <NEW_LINE> try: <NEW_LINE> <INDENT> self._gui.invoke_later(self.open, *args, **kwargs) <NEW_LINE> self._helper.event_loop_until_condition( condition=self.value_assigned, timeout=15 ) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> condition_timer.stop() <NEW_LINE> condition_timer.timeout.disconnect(handler) <NEW_LINE> self._helper.event_loop() <NEW_LINE> self.assert_no_errors_collected()
Execute the function to open the dialog and run ``when_opened``. Parameters ---------- when_opened : callable A callable to be called when the dialog has been created and opened. The callable with be called with the tester instance as argument. *args, **kwargs : Additional arguments to be passed to the `function` attribute of the tester. Raises ------ AssertionError if an assertion error was captured during the deferred calls that open and close the dialog. RuntimeError if a result value has not been assigned within 15 seconds after calling `self.function` Any other exception that was captured during the deferred calls that open and close the dialog. .. note:: This method is synchronous
625941b463d6d428bbe442c3
def irlist_len(irlist): <NEW_LINE> <INDENT> if irlist == empty_irlist: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return 1 + irlist_len(irlist_rest(irlist))
Return the length of irlist. >>> irlist_len(empty_irlist) 0 >>> irlist_len(irlist_populate(1, 2, 3, 4, 5)) 5
625941b4c432627299f04a10
def createrawtransaction( self, outpoints: List[Dict[str, Any]], send_to: Dict[str, float], locktime: Optional[int] = None, ) -> str: <NEW_LINE> <INDENT> assert type(outpoints) == list <NEW_LINE> assert type(send_to) == dict <NEW_LINE> assert locktime is None or type(locktime) == int <NEW_LINE> return self.rpc_call("createrawtransaction", outpoints, send_to, locktime)
Creates raw, unsigned transaction without any formal verification. see https://hsd-dev.org/api-docs/index.html#createrawtransaction outpoints ex: [{ "txid": "'$txhash'", "vout": '$txindex' }] send_to ex: { "'$address'": '$amount', "data": "'$data'" }
625941b4e5267d203edcda6e
def test_Nominal_Case(self): <NEW_LINE> <INDENT> log.D(self.test_Nominal_Case.__doc__) <NEW_LINE> log.I("UINT32_Max parameter in nominal case = 50") <NEW_LINE> value = "50" <NEW_LINE> hex_value = "0x32" <NEW_LINE> out, err = self.pfw.sendCmd("setParameter", self.param_name, value) <NEW_LINE> assert err == None, log.E("when setting parameter %s : %s" % (self.param_name, err)) <NEW_LINE> assert out == "Done", log.F("when setting parameter %s : %s" % (self.param_name, out)) <NEW_LINE> out, err = self.pfw.sendCmd("getParameter", self.param_name, "") <NEW_LINE> assert err == None, log.E("when setting parameter %s : %s" % (self.param_name, err)) <NEW_LINE> assert out == value, log.F("BLACKBOARD : Incorrect value for %s, expected: %s, found: %s" % (self.param_name, value, out)) <NEW_LINE> assert commands.getoutput('cat $PFW_RESULT/UINT32_Max') == hex_value, log.F("FILESYSTEM : parameter update error") <NEW_LINE> log.I("test OK")
Testing UINT16 in nominal case = 50 ----------------------------------- Test case description : ~~~~~~~~~~~~~~~~~~~~~~~ - set UINT16 parameter in nominal case = 50 Tested commands : ~~~~~~~~~~~~~~~~~ - [setParameter] function Used commands : ~~~~~~~~~~~~~~~ - [getParameter] function Expected result : ~~~~~~~~~~~~~~~~~ - UINT16 parameter set to 50 - Blackboard and filesystem values checked
625941b4796e427e537b038e
@app.route('/solve', methods=['GET']) <NEW_LINE> def solve(): <NEW_LINE> <INDENT> aalpha = request.args.getlist('alpha[]') <NEW_LINE> print(f'aalpha: {aalpha}') <NEW_LINE> alpha = [int(x) for x in aalpha] <NEW_LINE> qkt = QKTableaux(alpha) <NEW_LINE> mult = Mult() <NEW_LINE> nnum_samples = request.args.get('num_samples') <NEW_LINE> num_samples = int(nnum_samples) if nnum_samples is not None else 0 <NEW_LINE> status, num_solutions, samples = qkt.findAllSolutions( callbackFn=mult.addT, num_samples=num_samples, ) <NEW_LINE> report = { 'status': status, 'num_solutions': num_solutions, 'sample_solutions': samples, 'same_weight_samples': mult.same_weight_samples, } <NEW_LINE> return jsonify(report)
Finds all qkT for a given composition. Query Parameters: - alpha: a list representing the composition. - num_samples: the number of sample qkTs to be returned. Default is 0. Returns (JSON): { status: OPTIMAL/FEASIBLE/INFEASIBLE, num_solutions: the number of solutions, sample_solutions: a list of sample qkTs of length `num_samples`, same_weight_samples: null or a list of two sample qkT with the same wt, }
625941b407f4c71912b11252
def test_tag_ner_list_list_greek(self): <NEW_LINE> <INDENT> text_list = ['τὰ', 'Σίλαριν', 'Σιννᾶν'] <NEW_LINE> tokens = ner.tag_ner('greek', input_text=text_list, output_type=list) <NEW_LINE> target = [('τὰ',), ('Σίλαριν', 'Entity'), ('Σιννᾶν', 'Entity')] <NEW_LINE> self.assertEqual(tokens, target)
Test make_ner(), list, list.
625941b46fece00bbac2d507
@index_blue.route("/news_list") <NEW_LINE> def news_list(): <NEW_LINE> <INDENT> cid = request.args.get("cid", "1") <NEW_LINE> page = request.args.get("page", "1") <NEW_LINE> per_page = request.args.get("per_page", 10) <NEW_LINE> try: <NEW_LINE> <INDENT> cid = int(cid) <NEW_LINE> page = int(page) <NEW_LINE> per_page = int(per_page) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> current_app.logger.error(e) <NEW_LINE> return jsonify(errno=RET.PARAMERR, errmsg="参数错误") <NEW_LINE> <DEDENT> filters = [News.status == 0] <NEW_LINE> if cid != 1: <NEW_LINE> <INDENT> filters.append(News.category_id == cid) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> paginate = News.query.filter(*filters).order_by(News.create_time.desc()).paginate(page, per_page, False) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> current_app.logger.error(e) <NEW_LINE> return jsonify(errno=RET.DBERR, errmsg="数据库查询错误") <NEW_LINE> <DEDENT> news_list = paginate.items <NEW_LINE> current_page = paginate.page <NEW_LINE> total_pages = paginate.pages <NEW_LINE> news_dict_li = [] <NEW_LINE> for news in news_list: <NEW_LINE> <INDENT> news_dict_li.append(news.to_basic_dict()) <NEW_LINE> <DEDENT> data = { "total_pages": total_pages, "current_page": current_page, "news_dict_li": news_dict_li } <NEW_LINE> return jsonify(errno=RET.OK, errmsg="OK", data=data)
获取首页新闻数据 :return:
625941b4a17c0f6771cbde21