code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def CASE193( self, main ): <NEW_LINE> <INDENT> from tests.USECASE.SegmentRouting.SRDynamicConf.dependencies.SRDynamicConfTest import SRDynamicConfTest <NEW_LINE> SRDynamicConfTest.runTest( main, testIndex=193, topology='2x2', onosNodes=3, description='Changing port configuration from tagged 20 and native 10 to ' 'untagged 20', vlan=( 20, 0, 20, 20 ) ) | Tests connectivity after changing vlan configuration of port P1 from tagged 20 and native 10 to
untagged 20.
Port P2 is configured as untagged 10 and port P3 and P4 are configured as tagged 20.
Host h1 ,h3 and h4 are configured as VLAN ID 20 and h2 is not configured.
Sets up 3 ONOS instances
Starts 2x2 leaf-spine topology
Pingall
Changes interface vlan configuration
Pingall | 625941b491af0d3eaac9b7f5 |
def __init__(self, url=None): <NEW_LINE> <INDENT> self._url = None <NEW_LINE> self.discriminator = None <NEW_LINE> if url is not None: <NEW_LINE> <INDENT> self.url = url | MattermostCallback - a model defined in Swagger | 625941b4d18da76e235322b2 |
def _prepend_name_prefix(self, name): <NEW_LINE> <INDENT> if not self.name_prefix: <NEW_LINE> <INDENT> return name <NEW_LINE> <DEDENT> base = self.name_prefix <NEW_LINE> if base[0] != '/': <NEW_LINE> <INDENT> base = '/' + base <NEW_LINE> <DEDENT> if name[0] != '/': <NEW_LINE> <INDENT> name = '/' + name <NEW_LINE> <DEDENT> return base + name | Return file name (ie. path) with the prefix directory prepended | 625941b44e4d5625662d41bf |
def __call__(self, func: FuncType) -> FuncType: <NEW_LINE> <INDENT> func = self.wrap_dependent(func) <NEW_LINE> func.__dependencies__.inspect_dependencies() <NEW_LINE> return func | Wrap a function and attempt to discover it's dependencies by
inspecting the annotations on kwarg-only arguments.
>>>
>>> @injector
>>> def my_func(*, a_frob: Frob):
>>> assert isinstance(a_frob, Frob)
>>> | 625941b4377c676e91271f8c |
def log(manager, notice_uid): <NEW_LINE> <INDENT> action = game_define.GM_ACTION_DELETE_NOTICE <NEW_LINE> log_lst = action_base_gm.log_base(manager) <NEW_LINE> log_lst.append(str(action)) <NEW_LINE> log_lst.append(str(notice_uid)) <NEW_LINE> log_str = '$$'.join(log_lst) <NEW_LINE> return log_str | 输出日志 | 625941b40c0af96317bb7fcb |
def gradX_Y(self, X, Y, dim): <NEW_LINE> <INDENT> diffs = -X[:, [dim]] + Y[:, [dim]].T <NEW_LINE> exps = np.exp(diffs[np.newaxis, :, :] ** 2 / (-2 * self.sigma2s[:, np.newaxis, np.newaxis])) <NEW_LINE> return np.einsum('w,wij,ij->ij', self.wts / self.sigma2s, exps, diffs) | Compute the gradient with respect to the dimension dim of X in k(X, Y).
X: nx x d
Y: ny x d
Return a numpy array of size nx x ny. | 625941b42eb69b55b151c68b |
def mapper_linear_final(self): <NEW_LINE> <INDENT> yield 1,("x_t_x", [list(row) for row in self.x_t_x]) <NEW_LINE> yield 1,("x_t_y", [xy for xy in self.x_t_y]) <NEW_LINE> yield 1,("counts", self.counts) | Transforms numpy arrays x_t_x and x_t_y into json-encodable list format
and sends to reducer | 625941b426238365f5f0ec4a |
def _dict_helper(desc, row): <NEW_LINE> <INDENT> return dict([(desc[col[0]][0], col[1]) for col in enumerate(row)]) | Returns a dictionary for the given cursor.description and result row. | 625941b4283ffb24f3c556ef |
def _create_test_message(): <NEW_LINE> <INDENT> sqs = SQSConnection() <NEW_LINE> sqs_q = sqs.get_queue(g.sitemap_sqs_queue) <NEW_LINE> assert sqs_q, "failed to connect to queue" <NEW_LINE> message = sqs_q.new_message(body=json.dumps({ 'job_name': 'daily-sr-sitemap-reporting', 'location': ('s3://reddit-data-analysis/big-data/r2/prod/' + 'daily_sr_sitemap_reporting/dt=2016-06-14'), 'timestamp': _current_timestamp(), })) <NEW_LINE> sqs_q.write(message) | A dev only function that drops a new message on the sqs queue. | 625941b4498bea3a759b9894 |
def test(self): <NEW_LINE> <INDENT> connection = self._connect() <NEW_LINE> ch = connection.channel() <NEW_LINE> q_name = 'TestQueueBindAndUnbindAndPurge_q' + uuid.uuid1().hex <NEW_LINE> exg_name = 'TestQueueBindAndUnbindAndPurge_exg_' + uuid.uuid1().hex <NEW_LINE> routing_key = 'TestQueueBindAndUnbindAndPurge' <NEW_LINE> res = ch.confirm_delivery() <NEW_LINE> self.assertIsNone(res) <NEW_LINE> ch.exchange_declare(exg_name, exchange_type='direct') <NEW_LINE> self.addCleanup(connection.channel().exchange_delete, exg_name) <NEW_LINE> ch.queue_declare(q_name, auto_delete=True) <NEW_LINE> self.addCleanup(lambda: self._connect().channel().queue_delete(q_name)) <NEW_LINE> frame = ch.queue_bind(q_name, exchange=exg_name, routing_key=routing_key) <NEW_LINE> self.assertIsInstance(frame.method, pika.spec.Queue.BindOk) <NEW_LINE> frame = ch.queue_declare(q_name, passive=True) <NEW_LINE> self.assertEqual(frame.method.message_count, 0) <NEW_LINE> ch.basic_publish(exg_name, routing_key, body='TestQueueBindAndUnbindAndPurge', mandatory=True) <NEW_LINE> frame = ch.queue_declare(q_name, passive=True) <NEW_LINE> self.assertEqual(frame.method.message_count, 1) <NEW_LINE> frame = ch.queue_unbind(queue=q_name, exchange=exg_name, routing_key=routing_key) <NEW_LINE> self.assertIsInstance(frame.method, pika.spec.Queue.UnbindOk) <NEW_LINE> with self.assertRaises(pika.exceptions.UnroutableError): <NEW_LINE> <INDENT> ch.basic_publish(exg_name, routing_key, body='TestQueueBindAndUnbindAndPurge-2', mandatory=True) <NEW_LINE> <DEDENT> frame = ch.queue_purge(q_name) <NEW_LINE> self.assertIsInstance(frame.method, pika.spec.Queue.PurgeOk) <NEW_LINE> self.assertEqual(frame.method.message_count, 1) <NEW_LINE> frame = ch.queue_declare(q_name, passive=True) <NEW_LINE> self.assertEqual(frame.method.message_count, 0) | BlockingChannel: Test queue_bind and queue_unbind | 625941b432920d7e50b27fad |
def get_name(self): <NEW_LINE> <INDENT> return self.module_alias | Get name of module
:return: Name of module
:rtype: str | 625941b44527f215b584c23d |
def getLv(self, vgName, lvName=None): <NEW_LINE> <INDENT> if lvName: <NEW_LINE> <INDENT> lv = self._lvs.get((vgName, lvName)) <NEW_LINE> if not lv or lv.is_stale(): <NEW_LINE> <INDENT> self.stats.miss() <NEW_LINE> lvs = self._reloadlvs(vgName) <NEW_LINE> lv = lvs.get((vgName, lvName)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.stats.hit() <NEW_LINE> <DEDENT> return lv <NEW_LINE> <DEDENT> if self._lvs_needs_reload(vgName): <NEW_LINE> <INDENT> self.stats.miss() <NEW_LINE> lvs = self._reloadlvs(vgName) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.stats.hit() <NEW_LINE> lvs = self._lvs.copy() <NEW_LINE> <DEDENT> lvs = [lv for lv in lvs.values() if not lv.is_stale() and (lv.vg_name == vgName)] <NEW_LINE> return lvs | Get specific LV or all LVs in specified VG.
If there are any stale LVs reload the whole VG, since it would
cost us around same efforts anyhow and these stale LVs can
be in the vg.
We never return Stale or Unreadable LVs when
getting all LVs for a VG, but may return a Stale or an Unreadable
LV when LV name is specified as argument.
Arguments:
vgName (str): VG name to query.
lvName (str): Optional LV name.
Returns:
LV nameduple if lvName is specified, otherwise list of LV
namedtuple for all lvs in VG vgName. | 625941b4c432627299f04a26 |
def add_test_run_listener(self, test_run_listener): <NEW_LINE> <INDENT> self._listeners.append(test_run_listener) | Registers the given TestRunListener. | 625941b415baa723493c3d54 |
def _display_layers(circ: Circuit, qubits: Qubits) -> Circuit: <NEW_LINE> <INDENT> N = len(qubits) <NEW_LINE> qubit_idx = dict(zip(qubits, range(N))) <NEW_LINE> gate_layers = DAGCircuit(circ).layers() <NEW_LINE> layers = [] <NEW_LINE> lcirc = Circuit() <NEW_LINE> layers.append(lcirc) <NEW_LINE> unused = [True] * N <NEW_LINE> for gl in gate_layers: <NEW_LINE> <INDENT> assert isinstance(gl, Circuit) <NEW_LINE> for gate in gl: <NEW_LINE> <INDENT> indices = [qubit_idx[q] for q in gate.qubits] <NEW_LINE> if not all(unused[min(indices):max(indices)+1]): <NEW_LINE> <INDENT> lcirc = Circuit() <NEW_LINE> layers.append(lcirc) <NEW_LINE> unused = [True] * N <NEW_LINE> <DEDENT> unused[min(indices):max(indices)+1] = [False] * (max(indices) - min(indices) + 1) <NEW_LINE> lcirc += gate <NEW_LINE> <DEDENT> <DEDENT> return Circuit(layers) | Separate a circuit into groups of gates that do not visually overlap | 625941b4be383301e01b5275 |
def handle_order_placement(request, checkout): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> order, redirect_url = create_order(checkout) <NEW_LINE> <DEDENT> except InsufficientStock: <NEW_LINE> <INDENT> return redirect('cart:index') <NEW_LINE> <DEDENT> if not order: <NEW_LINE> <INDENT> msg = pgettext('Checkout warning', 'Please review your checkout.') <NEW_LINE> messages.warning(request, msg) <NEW_LINE> <DEDENT> return redirect_url | Try to create an order and redirect the user as necessary.
This is a helper function. | 625941b4711fe17d8254215e |
def remove(self, item): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> row = self.item_list.index(item) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.log.debug("Attempted to remove item {0!r} but it is not in the list".format(item)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.beginRemoveRows(QModelIndex(), row, row) <NEW_LINE> del self.item_list[row] <NEW_LINE> self.endRemoveRows() | Remove the first instance of the specified item from the list. | 625941b4bf627c535bc12fb8 |
def get_mem_static_max(self): <NEW_LINE> <INDENT> if self.app.vmm.offline_mode: <NEW_LINE> <INDENT> return 4096 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.app.vmm.libvirt_conn.getInfo()[1] <NEW_LINE> <DEDENT> except libvirt.libvirtError as e: <NEW_LINE> <INDENT> self.log.warning('Failed to get memory limit for dom0: %s', e) <NEW_LINE> return 4096 | Get maximum memory available to Dom0.
.. seealso:
:py:meth:`qubes.vm.qubesvm.QubesVM.get_mem_static_max` | 625941b40a50d4780f666c71 |
def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return Team( can_public_join = True, created_by = '0', created_on = '0', description = '0', etag = '0', icon = '0', id = '0', modified_by = '0', modified_on = '0', name = '0' ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return Team( ) | Test Team
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included | 625941b426068e7796caeaba |
def set_baud_rate(self, baud_rate): <NEW_LINE> <INDENT> assert isinstance(baud_rate, int) and baud_rate > 0, 'Baud rate MUST be a positive integer' <NEW_LINE> self.baud_rate = baud_rate <NEW_LINE> self.project.logger.info('Set baud rate to %d', self.baud_rate) | Set serial baud rate
:param baud_rate:
:return: | 625941b47c178a314d6ef239 |
def Alarm(self): <NEW_LINE> <INDENT> if self.need_to_pick_dir: <NEW_LINE> <INDENT> dirname = self.AskDirectory( title='Choose Directory For Nose Tests', initialdir=".") <NEW_LINE> self.try_change_to_new_dir( dirname ) <NEW_LINE> <DEDENT> elif numberOfChangedFiles( self.dirname ) > 0: <NEW_LINE> <INDENT> self.callNosy() <NEW_LINE> <DEDENT> self.SetAlarm() | Look for changed files every second, then reset alarm | 625941b4cb5e8a47e48b7892 |
def status(self, compute_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> amphora, fault = self.get_amphora(compute_id) <NEW_LINE> if amphora and amphora.status == 'ACTIVE': <NEW_LINE> <INDENT> return constants.UP <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.exception("Error retrieving nova virtual machine status.") <NEW_LINE> raise exceptions.ComputeStatusException() from e <NEW_LINE> <DEDENT> return constants.DOWN | Retrieve the status of a virtual machine.
:param compute_id: virtual machine UUID
:returns: constant of amphora status | 625941b4adb09d7d5db6c576 |
def _add_dummy_placeholder_item(self, parent_item, refreshing): <NEW_LINE> <INDENT> parent_uid = parent_item.data(self._SG_ITEM_UNIQUE_ID) <NEW_LINE> self._deferred_cache.add_item( parent_uid=None, sg_data={}, field_name="", is_leaf=False, uid=parent_uid ) <NEW_LINE> uid = self._dummy_placeholder_item_uid(parent_item) <NEW_LINE> display_name = shotgun_globals.get_type_display_name( self._deferred_query["entity_type"] ) <NEW_LINE> if refreshing: <NEW_LINE> <INDENT> text = "Retrieving %ss..." % display_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text = "No %ss found" % display_name <NEW_LINE> <DEDENT> exists = self._deferred_cache.item_exists(uid) <NEW_LINE> self._deferred_cache.add_item( parent_uid=parent_uid, sg_data={"text": text, "type": ""}, field_name="text", is_leaf=True, uid=uid, ) <NEW_LINE> if not exists: <NEW_LINE> <INDENT> sub_item = self._create_item( parent=parent_item, data_item=self._deferred_cache.get_entry_by_uid(uid) ) <NEW_LINE> sub_item.setData(True, self._SG_ITEM_FETCHED_MORE) <NEW_LINE> sub_item.setSelectable(False) <NEW_LINE> sub_item.setEnabled(False) <NEW_LINE> sub_item.setIcon(QtGui.QIcon()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sub_item = self._get_item_by_unique_id(uid) <NEW_LINE> if sub_item: <NEW_LINE> <INDENT> self._update_item(sub_item, self._deferred_cache.get_entry_by_uid(uid)) <NEW_LINE> sub_item.setIcon(QtGui.QIcon()) <NEW_LINE> <DEDENT> <DEDENT> return uid | Create a dummy child item under the given item.
These items are used in tree views to show that a deferred query didn't
return any Shotgun record or that the data is being refreshed from Shotgun.
:param parent_item: A :class:`ShotgunStandardItem` instance.
:returns: A string, the unique id for the item. | 625941b499fddb7c1c9de175 |
def plot_logistic_map(survival_factors: np.ndarray) -> None: <NEW_LINE> <INDENT> for it, sf in enumerate(survival_factors): <NEW_LINE> <INDENT> vals = logistic_map(sf) <NEW_LINE> plt.scatter(len(vals) * [sf], vals, color="black", s=0.05) <NEW_LINE> if it % 100 == 0: <NEW_LINE> <INDENT> print("{:.2f}%".format(100 * it / survival_factors.size)) <NEW_LINE> <DEDENT> <DEDENT> plt.title("Logistic map") <NEW_LINE> plt.show() | Plot the logistic map. | 625941b463d6d428bbe442d1 |
def __init__(self, size, default_value=None): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self.resistors = None <NEW_LINE> self.fill(default_value) | Creates a new circuit of `size` nodes and resistors initialized at `default_value`
:param size:
:param default_value: | 625941b499cbb53fe67929c9 |
def update(self, population): <NEW_LINE> <INDENT> added = 0 <NEW_LINE> removed = 0 <NEW_LINE> e_prog = 0 <NEW_LINE> for ind in population: <NEW_LINE> <INDENT> ind_rem, ind_add, ind_e_prog = self.sort_individual(ind) <NEW_LINE> added += ind_add <NEW_LINE> removed += ind_rem <NEW_LINE> e_prog += ind_e_prog <NEW_LINE> <DEDENT> return added, removed, e_prog | Update the epsilon Pareto front hall of fame with the *population* by adding
the individuals from the population that are not dominated by the hall
of fame. If any individual in the hall of fame is dominated it is
removed.
:param population: A list of individual with a fitness attribute to
update the hall of fame with. | 625941b49b70327d1c4e0bb6 |
def onDisconnectionByPeer(self, msg): <NEW_LINE> <INDENT> self.sendNotify(data={'ssh-event': 'disconnected-by-peer', 'more': str(msg)} ) <NEW_LINE> self.stop() | On disconnection by peer | 625941b426238365f5f0ec4b |
def imagenet_resnet_v2_generator(block_fn, layers, num_classes, data_format=None): <NEW_LINE> <INDENT> if data_format is None: <NEW_LINE> <INDENT> data_format = ( 'channels_first' if tf.test.is_built_with_cuda() else 'channels_last') <NEW_LINE> <DEDENT> def model(inputs, is_training): <NEW_LINE> <INDENT> if data_format == 'channels_first': <NEW_LINE> <INDENT> inputs = tf.transpose(inputs, [0, 3, 1, 2]) <NEW_LINE> <DEDENT> inputs = conv2d_fixed_padding( inputs=inputs, filters=64, kernel_size=7, strides=2, data_format=data_format) <NEW_LINE> inputs = tf.identity(inputs, 'initial_conv') <NEW_LINE> inputs = tf.layers.max_pooling2d( inputs=inputs, pool_size=3, strides=2, padding='SAME', data_format=data_format) <NEW_LINE> inputs = tf.identity(inputs, 'initial_max_pool') <NEW_LINE> inputs = block_layer( inputs=inputs, filters=64, block_fn=block_fn, blocks=layers[0], strides=1, is_training=is_training, name='block_layer1', data_format=data_format) <NEW_LINE> block_layer1 = tf.layers.flatten(inputs) <NEW_LINE> inputs = block_layer( inputs=inputs, filters=128, block_fn=block_fn, blocks=layers[1], strides=2, is_training=is_training, name='block_layer2', data_format=data_format) <NEW_LINE> block_layer2 = tf.layers.flatten(inputs) <NEW_LINE> inputs = block_layer( inputs=inputs, filters=256, block_fn=block_fn, blocks=layers[2], strides=2, is_training=is_training, name='block_layer3', data_format=data_format) <NEW_LINE> block_layer3 = tf.layers.flatten(inputs) <NEW_LINE> inputs = block_layer( inputs=inputs, filters=512, block_fn=block_fn, blocks=layers[3], strides=2, is_training=is_training, name='block_layer4', data_format=data_format) <NEW_LINE> block_layer4 = tf.layers.flatten(inputs) <NEW_LINE> inputs = tf.concat([block_layer3, block_layer4], axis=1) <NEW_LINE> return inputs <NEW_LINE> <DEDENT> return model | Generator for ImageNet ResNet v2 models.
Args:
block_fn: The block to use within the model, either `building_block` or
`bottleneck_block`.
layers: A length-4 array denoting the number of blocks to include in each
layer. Each layer consists of blocks that take inputs of the same size.
num_classes: The number of possible classes for image classification.
data_format: The input format ('channels_last', 'channels_first', or None).
If set to None, the format is dependent on whether a GPU is available.
Returns:
The model function that takes in `inputs` and `is_training` and
returns the output tensor of the ResNet model. | 625941b4091ae35668666d48 |
def SetInput1(self, *args): <NEW_LINE> <INDENT> return _itkMaskNegatedImageFilterPython.itkMaskNegatedImageFilterIUC3IUS3IUC3_Superclass_SetInput1(self, *args) | SetInput1(self, itkImageUC3 image1) | 625941b40fa83653e4656da1 |
def testApplyMissingChangeId(self): <NEW_LINE> <INDENT> series = self.GetPatchSeries() <NEW_LINE> patch1, patch2 = patches = self.GetPatches(2) <NEW_LINE> git_repo = os.path.join(self.build_root, patch1.project) <NEW_LINE> patch1.Fetch(git_repo) <NEW_LINE> patch1.GerritDependencies( git_repo, 'refs/remotes/cros/master').AndRaise( cros_patch.BrokenChangeID(patch1, 'Could not find changeid')) <NEW_LINE> self.SetPatchDeps(patch2) <NEW_LINE> self.SetPatchApply(patch2) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertResults(series, patches, [patch2], [patch1], []) <NEW_LINE> self.mox.VerifyAll() | Test that applies changes correctly with a dep with missing changeid. | 625941b482261d6c526ab285 |
def project_on_base(state, base): <NEW_LINE> <INDENT> if not isinstance(base, ApproximationBasis): <NEW_LINE> <INDENT> raise TypeError("Projection only possible on approximation bases.") <NEW_LINE> <DEDENT> projections = calculate_scalar_product_matrix(base.__class__(state), base) <NEW_LINE> scale_mat = calculate_scalar_product_matrix(base, base) <NEW_LINE> res = np.linalg.inv(scale_mat) @ projections.T <NEW_LINE> return np.reshape(res, (scale_mat.shape[0],)) | Projects a *state* on a basis given by *base*.
Args:
state (array_like): List of functions to approximate.
base (:py:class:`.ApproximationBase`): Basis to project onto.
Return:
numpy.ndarray: Weight vector in the given *base* | 625941b438b623060ff0abd1 |
def get_attribute_group(self): <NEW_LINE> <INDENT> return self._group | Get attribute group name | 625941b4e64d504609d74622 |
def get_artists_by_letter(first_letter): <NEW_LINE> <INDENT> pass | Return the list of artist by given first letter | 625941b421a7993f00bc7aca |
def to_js(self): <NEW_LINE> <INDENT> properties = "" <NEW_LINE> length = len(self.properties().values()) <NEW_LINE> for index, prop in enumerate(self.properties().values()): <NEW_LINE> <INDENT> properties += '%s:%s' % (prop.name, prop.to_js()) <NEW_LINE> if index != length - 1: <NEW_LINE> <INDENT> properties += "," <NEW_LINE> <DEDENT> <DEDENT> vars = {'properties': properties, 'class_name': "%s.%s" % (self.__class__.__module__, self.__class__.__name__)} <NEW_LINE> path = os.path.join(os.path.dirname(__file__), 'templates/command.js') <NEW_LINE> return template.render(path, vars) | Generates dojo class | 625941b4d10714528d5ffac1 |
def get_filetype(path): <NEW_LINE> <INDENT> output = str(subprocess.check_output(["file", path])) <NEW_LINE> filetype = output.split(':')[1].strip()[0:19].strip() <NEW_LINE> return filetype | Get the file type from the given binary
Gotten by the getting the first 20 character of the 'file' command | 625941b43cc13d1c6d3c7168 |
def test_main(capsys): <NEW_LINE> <INDENT> main = options('O')().main <NEW_LINE> pArgv = patch.object(sys, 'argv', ('o',)) <NEW_LINE> with pArgv: <NEW_LINE> <INDENT> argv = shlex.split("this is a sandwich") <NEW_LINE> exitCode = main(argv) <NEW_LINE> assert exitCode == 0 <NEW_LINE> out, err = [f.strip() for f in capsys.readouterr()] <NEW_LINE> assert out, err == ('this is a sandwich', "") <NEW_LINE> <DEDENT> with pArgv: <NEW_LINE> <INDENT> argv = shlex.split("this is an error") <NEW_LINE> exitCode = main(argv) <NEW_LINE> assert exitCode == 1 <NEW_LINE> out, err = [f.strip() for f in capsys.readouterr()] <NEW_LINE> rx = re.compile(r'\*\* o exit 1: This is an error!', re.DOTALL) <NEW_LINE> assert re.match(rx, out) <NEW_LINE> assert err == '' <NEW_LINE> <DEDENT> with pArgv: <NEW_LINE> <INDENT> argv = ['--flag'] <NEW_LINE> exitCode = main(argv) <NEW_LINE> assert exitCode == 1 <NEW_LINE> out, err = [f.strip() for f in capsys.readouterr()] <NEW_LINE> rx = re.compile(r'Usage: o.*Say hello.*flag requires argument', re.DOTALL) <NEW_LINE> assert re.match(rx, out) <NEW_LINE> assert err == '' <NEW_LINE> <DEDENT> with pArgv: <NEW_LINE> <INDENT> exitCode = main() <NEW_LINE> assert exitCode == 0 <NEW_LINE> out, err = [f.strip() for f in capsys.readouterr()] <NEW_LINE> assert (out, err) == ('', '') | The factory function main() does command-line shit | 625941b455399d3f05588495 |
def searchFolder(folder): <NEW_LINE> <INDENT> command = None <NEW_LINE> if os.path.exists(folder): <NEW_LINE> <INDENT> for root, dirs, files in os.walk(folder): <NEW_LINE> <INDENT> for cmd in cmdList: <NEW_LINE> <INDENT> if cmd in files: <NEW_LINE> <INDENT> command = os.path.join(root, cmd) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return command | Inline function to search for grass binaries into a folder
with os.walk | 625941b445492302aab5e0a2 |
def _fetchUrl(self,url): <NEW_LINE> <INDENT> url_data = self._urllib.urlopen(url).read() <NEW_LINE> return url_data | Fetch a URL
Args:
url: The URL to retrieve
Returns:
A string containing the body of the response. | 625941b4be8e80087fb20a31 |
def testNetOK(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> requests.get("http://www.baidu.com", headers=self.headers) <NEW_LINE> return True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.printError(e) <NEW_LINE> return False | 测试当前机子不适用代理的网络可用么
:return: | 625941b430dc7b766590174d |
@manager.command <NEW_LINE> def migrate(): <NEW_LINE> <INDENT> from playhouse.migrate import migrate <NEW_LINE> from peewee import CharField, TextField, IntegerField <NEW_LINE> migrate( ) | Run migrate database tasks. | 625941b43317a56b86939a4d |
def emit_line(self, line): <NEW_LINE> <INDENT> buf = readline.get_line_buffer() <NEW_LINE> self.emit('\r', line, end='\n') <NEW_LINE> self.emit(self.prompt, buf) | Write string line to output without breaking input | 625941b499fddb7c1c9de176 |
def __init__(self, size, b): <NEW_LINE> <INDENT> self.array = [None] * size <NEW_LINE> self.table_size = size <NEW_LINE> self.count = 0 <NEW_LINE> self.b = b <NEW_LINE> self.collision_counter = 0 | When an object is created using the SeperateChainingHashTable, a table with linear probing of a default size is created.
:param: Size of the table (size is a prime number)
:precondition: None
:postcondition: None
:complexity: Best Case = Worst Case = O(1), constant time complexity, since there are single statements being executed. | 625941b4f8510a7c17cf94e7 |
def toFeatureCoverage(*args): <NEW_LINE> <INDENT> return _ilwisobjects.FeatureCoverage_toFeatureCoverage(*args) | toFeatureCoverage(Object obj) -> FeatureCoverage | 625941b4d486a94d0b98df31 |
def __len__(self): <NEW_LINE> <INDENT> return len(self.descs) | Return the number of descriptors in the apd | 625941b45510c4643540f1dc |
def testStoragepoolNodetypes(self): <NEW_LINE> <INDENT> pass | Test StoragepoolNodetypes | 625941b4293b9510aa2c307c |
def delete_color(self, button=None): <NEW_LINE> <INDENT> color_stack = self.ids.color_stack <NEW_LINE> for color_widget in color_stack.selected_nodes: <NEW_LINE> <INDENT> color_stack.remove_widget(color_widget) <NEW_LINE> self.palette.colors.remove(color_widget.color) <NEW_LINE> <DEDENT> color_stack.clear_selection() <NEW_LINE> self.ids.action_view.remove_widget(self.delete_button) <NEW_LINE> self.mode = 'normal' | Deletes a color from its palette and removes it. | 625941b421a7993f00bc7acb |
def main(): <NEW_LINE> <INDENT> box_information_services = BoxInformationServices() <NEW_LINE> try: <NEW_LINE> <INDENT> box_information_services.run() <NEW_LINE> <DEDENT> except rospy.ROSInterruptException: <NEW_LINE> <INDENT> pass | Initialize BoxInformationServices. | 625941b4b57a9660fec33661 |
def save_module(mapped_module, filename="pickeled_pbandj.model", path=None): <NEW_LINE> <INDENT> if path == None: <NEW_LINE> <INDENT> path = './' + mapped_module.module_name <NEW_LINE> <DEDENT> pickeled_model_file = open(os.path.join(path, filename), 'w') <NEW_LINE> pickle.dump(mapped_module, pickeled_model_file) <NEW_LINE> pickeled_model_file.close() | Save a pickled representation of a mapped_module
| 625941b4d164cc6175782b30 |
def parse(self, data): <NEW_LINE> <INDENT> self.units = pickle.loads(data).units | parser to process the given source string | 625941b497e22403b379cd7b |
def test_query_customer_list_email(self): <NEW_LINE> <INDENT> customers = self._create_customers(10) <NEW_LINE> test_email = customers[0].email <NEW_LINE> email_customers = [customer for customer in customers if customer.email == test_email] <NEW_LINE> resp = self.app.get('/customers', query_string='email={}'.format(test_email)) <NEW_LINE> self.assertEqual(resp.status_code, HTTP_200_OK) <NEW_LINE> data = resp.get_json() <NEW_LINE> self.assertEqual(len(data), len(email_customers)) <NEW_LINE> for customer in data: <NEW_LINE> <INDENT> self.assertEqual(customer['email'], test_email) | Query Customer by Email | 625941b4851cf427c661a2fe |
def toPath(self, converterPath, isFirst, isLast): <NEW_LINE> <INDENT> assert isinstance(converterPath, ConverterPath) <NEW_LINE> return converterPath.normalize(self.value) | @see: Match.toPath | 625941b4a8370b7717052684 |
def run_threads(collection, target): <NEW_LINE> <INDENT> threads = [] <NEW_LINE> for i in range(NTHREADS): <NEW_LINE> <INDENT> bound_target = partial(target, collection, i) <NEW_LINE> threads.append(threading.Thread(target=bound_target)) <NEW_LINE> <DEDENT> for t in threads: <NEW_LINE> <INDENT> t.start() <NEW_LINE> <DEDENT> for t in threads: <NEW_LINE> <INDENT> t.join(60) <NEW_LINE> assert not t.isAlive() | Run a target function in many threads.
target is a function taking a Collection and an integer. | 625941b48c0ade5d55d3e7a2 |
def CreatePathMessageFromFolder(target_folder, shuffle=True, image_types=['png', 'jpg', 'jpeg'], balance=True): <NEW_LINE> <INDENT> class_path_lst = glob(os.path.join(target_folder, '*')) <NEW_LINE> class_dict = {} <NEW_LINE> image_path_message = [] <NEW_LINE> image_path_lst = [] <NEW_LINE> for index, class_path in enumerate(class_path_lst): <NEW_LINE> <INDENT> if not os.path.isdir(class_path): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> class_name = class_path.split('/')[-1] <NEW_LINE> class_dict[class_name] = index <NEW_LINE> imagelist = [] <NEW_LINE> for image_type in image_types: <NEW_LINE> <INDENT> imagelist += glob(os.path.join(class_path, '*'+image_type)) <NEW_LINE> <DEDENT> image_path_lst.append([[path, index] for path in imagelist]) <NEW_LINE> <DEDENT> if balance: <NEW_LINE> <INDENT> max_num = max([len(i) for i in image_path_lst]) <NEW_LINE> for image_paths in image_path_lst: <NEW_LINE> <INDENT> add_num = max_num - len(image_paths) <NEW_LINE> add_index = list(np.random.choice(len(image_paths), add_num)) <NEW_LINE> image_paths += [image_paths[j] for j in add_index] <NEW_LINE> image_path_message += image_paths <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for image_paths in image_path_lst: <NEW_LINE> <INDENT> image_path_message += image_paths <NEW_LINE> <DEDENT> <DEDENT> if shuffle: <NEW_LINE> <INDENT> random.shuffle(image_path_message) <NEW_LINE> <DEDENT> return image_path_message | Create item list of image paths
Args:
target_folder(str): A folder that contains all classes of images,
e.g.
target_folder
├── class0
├── image0.jpg
├── image1.jpg
├── ...
├── class1
├── image256.jpg
├── ...
├── ...
shuffle(bool): Whether to shuffle image path list.
image_types(list): The extension types of images.
balance(bool): Whether to balance each class.
Returns:
images_path_message(list): the message list format is [[`image_path`, `label`], ...].
e.g. [['/data/dogs_vs_cats/train/dogs/001.jpg', 1],
['/data/dogs_vs_cats/train/dogs/001.jpg', 2],
...] | 625941b4d53ae8145f87a059 |
def __init__(self, master=None): <NEW_LINE> <INDENT> Frame.__init__(self, master) <NEW_LINE> self.master.rowconfigure(0, weight=1) <NEW_LINE> self.master.columnconfigure(0, weight=1) <NEW_LINE> self.grid(sticky=N+S+E+W) <NEW_LINE> l0 = Label(self, text="Email Database Search", font=("Helvetica", 16)) <NEW_LINE> l0.grid(row=0, column=1, columnspan=2) <NEW_LINE> l1 = Label(self, text="Not Before (yyy-mm-dd):") <NEW_LINE> l1.grid(row=1, column=1, sticky=N+S+E) <NEW_LINE> self.mindate = Entry(self) <NEW_LINE> self.mindate.grid(row=1, column=2, sticky=N+S+W) <NEW_LINE> l2 = Label(self, text="Not After (yyy-mm-dd):") <NEW_LINE> l2.grid(row=2, column=1, sticky=N+S+E) <NEW_LINE> self.maxdate = Entry(self) <NEW_LINE> self.maxdate.grid(row=2, column=2, sticky=N+S+W) <NEW_LINE> l3 = Label(self, text="Sender's E-mail Contains:") <NEW_LINE> l3.grid(row=3, column=1, sticky=N+S+E) <NEW_LINE> self.addsearch = Entry(self) <NEW_LINE> self.addsearch.grid(row=3, column=2, sticky=N+S+W) <NEW_LINE> l4 = Label(self, text="Sender's Name Contains:") <NEW_LINE> l4.grid(row=4, column=1, sticky=N+S+E) <NEW_LINE> self.namesearch = Entry(self) <NEW_LINE> self.namesearch.grid(row=4, column=2, sticky=N+S+W) <NEW_LINE> button = Button(self, text="Search", command=self.search_mail) <NEW_LINE> button.grid(row=5, column=2) <NEW_LINE> self.msgsubs = Listbox(self, height=10, width=100) <NEW_LINE> self.msgsubs.grid(row=8, column=1, columnspan=2) <NEW_LINE> self.msgsubs.bind("<Double-Button-1>", self.display_mail) <NEW_LINE> self.message = Text(self, width=100) <NEW_LINE> self.message.grid(row=9, column=1, columnspan=2) | Establish the window structure, leaving some widgets accessible
as app instance variables. Connect button clicks to search_mail
method and subject double-clicks to display_mail method. | 625941b4099cdd3c635f0a3f |
def convert_to_int(byte_arr): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for i in byte_arr: <NEW_LINE> <INDENT> result.append(int.from_bytes(i, byteorder='big')) <NEW_LINE> <DEDENT> result.insert(0, 1) <NEW_LINE> return result | Converts an array of bytes into an array of integers | 625941b4ad47b63b2c509d6e |
def json(self) -> Dict: <NEW_LINE> <INDENT> _d = self.pass_through_dict <NEW_LINE> _d['sentence_id'] = self.sentence_id <NEW_LINE> _d['translation'] = self.translation <NEW_LINE> _d['score'] = self.score <NEW_LINE> if self.nbest_translations is not None and len(self.nbest_translations) > 1: <NEW_LINE> <INDENT> _d['translations'] = self.nbest_translations <NEW_LINE> _d['scores'] = self.nbest_scores <NEW_LINE> <DEDENT> return _d | Returns a dictionary suitable for json.dumps() representing all
the information in the class. It is initialized with any keys
present in the corresponding `TranslatorInput` object's pass_through_dict.
Keys from here that are not overwritten by Sockeye will thus be passed
through to the output.
:return: A dictionary. | 625941b4ec188e330fd5a58b |
def angleChanged(self, *args, **kwargs): <NEW_LINE> <INDENT> pass | QGraphicsRotation.angleChanged [signal] | 625941b4627d3e7fe0d68c31 |
def get_entry(self, entry_id): <NEW_LINE> <INDENT> query_url = self.url_base + 'rest/entries/' + str(entry_id) <NEW_LINE> params = self.create_query_params() <NEW_LINE> return self.execute_query(query_url, params) | Retrieve a record for a single entry (title) by its identifier. | 625941b415fb5d323cde08eb |
def get_blocks_read(self): <NEW_LINE> <INDENT> return bool(self.get('blocks.read')) | @return:
@rtype: bool | 625941b496565a6dacc8f4b8 |
def flag(self, content_id, flag_request): <NEW_LINE> <INDENT> return self.start().uri('/content/item/flag').url_segment(content_id).request(flag_request).post().go() | Calls CleanSpeak to indicate that a user has flagged another user's content (also known as reporting and often used to allow users to
report content/chat from other users). This calls CleanSpeak's /content/item/flag end-point.
:parameter content_id: The id of the piece of content that is being flagged (see the docs for more information).
:parameter flag_request: The flag request that is converted to JSON and sent to CleanSpeak (see the docs for more information)
:type content_id: uuid
:type flag_request: object
:returns: A ClientResponse object that contains the response information from the API call. | 625941b4e1aae11d1e749a96 |
def GenerateConfig(context): <NEW_LINE> <INDENT> cluster_types_root = ''.join([ context.env['project'], '/', context.properties['clusterType'] ]) <NEW_LINE> cluster_types = { 'Service': ''.join([ cluster_types_root, ':', '/api/v1/namespaces/{namespace}/services' ]), 'Deployment': ''.join([ cluster_types_root, '-apps', ':', '/apis/apps/v1beta1/namespaces/{namespace}/deployments' ]) } <NEW_LINE> name_prefix = context.env['deployment'] + '-' + context.env['name'] <NEW_LINE> port = context.properties['port'] <NEW_LINE> resources = [{ 'name': name_prefix + '-service', 'type': cluster_types['Service'], 'properties': { 'apiVersion': 'v1', 'kind': 'Service', 'namespace': 'default', 'metadata': { 'name': name_prefix + '-service', 'labels': { 'id': 'deployment-manager' } }, 'spec': { 'type': 'NodePort', 'ports': [{ 'port': port, 'targetPort': port, 'protocol': 'TCP' }], 'selector': { 'app': name_prefix } } } }, { 'name': name_prefix + '-deployment', 'type': cluster_types['Deployment'], 'properties': { 'apiVersion': 'apps/v1beta1', 'kind': 'Deployment', 'namespace': 'default', 'metadata': { 'name': name_prefix + '-deployment' }, 'spec': { 'replicas': 1, 'template': { 'metadata': { 'labels': { 'name': name_prefix + '-deployment', 'app': name_prefix } }, 'spec': { 'containers': [{ 'name': 'container', 'image': context.properties['image'], 'ports': [{ 'containerPort': port }] }] } } } } }] <NEW_LINE> return {'resources': resources} | Generate YAML resource configuration. | 625941b42ae34c7f2600cf14 |
def key_has_item(self, key, item): <NEW_LINE> <INDENT> if key in self and item in self[key]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | look at only one association
will not fail if key does not exist. | 625941b42c8b7c6e89b355a7 |
def isidentifier(candidate): <NEW_LINE> <INDENT> return isstring(candidate) and (re.match(identifier, candidate) is not None) | Asserts that a given candidate is a string with identifier limitations | 625941b4dc8b845886cb5317 |
def extract_image_classification(model): <NEW_LINE> <INDENT> model.extract_torchscript() <NEW_LINE> model.extract_onnx() | Runs extraction common for all image classification models | 625941b444b2445a33931e84 |
def option_wrong_value_msg(name, value, value_range): <NEW_LINE> <INDENT> return ("'%s' is not a valid 'options.%s' value.\nPossible values are %s" % (value, name, value_range)) | The provided value is not among the range of values that it should
be | 625941b48e7ae83300e4adaf |
def informativeStrains(self, samplelist=None, include_variances = None): <NEW_LINE> <INDENT> if not samplelist: <NEW_LINE> <INDENT> samplelist = self.samplelist <NEW_LINE> <DEDENT> samples = [] <NEW_LINE> values = [] <NEW_LINE> variances = [] <NEW_LINE> for sample in samplelist: <NEW_LINE> <INDENT> if sample in self.allTraitData: <NEW_LINE> <INDENT> _val, _var = self.allTraitData[sample].value, self.allTraitData[sample].variance <NEW_LINE> if _val != None: <NEW_LINE> <INDENT> if include_variances: <NEW_LINE> <INDENT> if _var != None: <NEW_LINE> <INDENT> samples.append(sample) <NEW_LINE> values.append(_val) <NEW_LINE> variances.append(_var) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> samples.append(sample) <NEW_LINE> values.append(_val) <NEW_LINE> variances.append(None) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return samples, values, variances, len(samples) | if readData was called, use this to output informative samples (sample with values) | 625941b466673b3332b91e7a |
def test_get_tickets_return(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> zenpy_client = Zenpy(**creds) <NEW_LINE> tickets = get_tickets(zenpy_client) <NEW_LINE> <DEDENT> except APIException: <NEW_LINE> <INDENT> print("Error connecting to API") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> assert tickets is not None | Test get_tickets() using pytest framework
:return: void | 625941b476e4537e8c35145a |
def test_add_order_line_activity_file(self): <NEW_LINE> <INDENT> pass | Test case for add_order_line_activity_file
Attach a file to an orderLineActivity # noqa: E501 | 625941b463b5f9789fde6ec9 |
def string (name): <NEW_LINE> <INDENT> print ('Hello, ' + name) | описание функции | 625941b4aad79263cf39081e |
def first_nonzero_bit(bs): <NEW_LINE> <INDENT> bs = bs[2:] <NEW_LINE> return bs[::-1].index('1') | Return the one-based array index of the first non-zero bit (from the left)
for a given binary string. | 625941b40fa83653e4656da2 |
def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) | Return a `str` version of this QueryTimesliceAggregation object. | 625941b44e4d5625662d41c1 |
def set_gateway(self, gateway): <NEW_LINE> <INDENT> for l3config in self._tree.iterfind("ipv4Configuration"): <NEW_LINE> <INDENT> l3config.attrib['addressConfiguration'] = "Fixed" <NEW_LINE> for gateway_obj in l3config.iterfind("DefaultGateway"): <NEW_LINE> <INDENT> self._set_address(gateway_obj, gateway) | Sets the IPv4 gateway for a ByteBlower Port
:param gateway: The gateway for the ByteBlower Port in the form of "10.4.8.1"
:type gateway: str | 625941b48e05c05ec3eea154 |
def check_call(cmd, cwd=None): <NEW_LINE> <INDENT> p = run(cmd, cwd=cwd, check=True) <NEW_LINE> return p.returncode | Drop in replacement for ``subprocess.check_call`` like functionality | 625941b4a17c0f6771cbde38 |
def num_seats_free(self, slot): <NEW_LINE> <INDENT> assert (slot.night_id == self.night_id) <NEW_LINE> Base.metadata.create_all(engine) <NEW_LINE> session = Session() <NEW_LINE> my_reservations = session.query(Reservation). filter(Reservation.table_id == self.id). filter(Reservation.slot_id == slot.id). all() <NEW_LINE> pax_sum = sum(len(group) for group in my_reservations) <NEW_LINE> session.close() <NEW_LINE> return self.num_seats - pax_sum | :type slot: object | 625941b47047854f462a11f1 |
def json_diff_keypath(self, source, target, mappings): <NEW_LINE> <INDENT> result = { "res_list": [], } <NEW_LINE> for mapping in mappings: <NEW_LINE> <INDENT> source_key = list(mapping["source"].keys())[0] <NEW_LINE> print(source_key) <NEW_LINE> soure_res = self.get_value_by_jsonpath(source, mapping["source"][source_key]) <NEW_LINE> print("soure res : {}".format(soure_res)) <NEW_LINE> target_key = list(mapping["target"].keys())[0] <NEW_LINE> target_res = self.get_value_by_jsonpath(target, mapping["target"][target_key]) <NEW_LINE> print("target res: {}".format(target_res)) <NEW_LINE> if type(soure_res) != type(target_res): <NEW_LINE> <INDENT> result["message"] = "校验格式错误" <NEW_LINE> return result <NEW_LINE> <DEDENT> if len(soure_res) != len(target_res): <NEW_LINE> <INDENT> result["message"] = "数据量不匹配" <NEW_LINE> return result <NEW_LINE> <DEDENT> check_set = set() <NEW_LINE> for soure_res_value in soure_res: <NEW_LINE> <INDENT> check_set.add(soure_res_value) <NEW_LINE> <DEDENT> for index, target_res_value in enumerate(target_res): <NEW_LINE> <INDENT> if target_res_value not in check_set: <NEW_LINE> <INDENT> result["res_list"].append("index= {}, value={}未在源数据中".format(index, target_res_value)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result | json比对
:param soure: 源json数据
:param target: 目标json数据
:param pathMappings: pathmappings
:return: | 625941b4fb3f5b602dac347a |
def endpoint(path: str) -> Callable[[], Endpoint]: <NEW_LINE> <INDENT> def wrapper(method): <NEW_LINE> <INDENT> return Endpoint(path, build_converter(method)) <NEW_LINE> <DEDENT> return wrapper | Decorator for creating an
Arguments:
path: The path to the API endpoint (relative to the API's
``base_url``).
Returns:
The wrapper for the endpoint method. | 625941b426238365f5f0ec4c |
def _get_always_compare_med(self): <NEW_LINE> <INDENT> return self.__always_compare_med | Getter method for always_compare_med, mapped from YANG variable /bgp/peer_groups/peer_group/afi_safis/afi_safi/route_selection_options/state/always_compare_med (boolean)
YANG Description: Compare multi-exit discriminator (MED) value from
different ASes when selecting the best route. The
default behavior is to only compare MEDs for paths
received from the same AS.
| 625941b4d18da76e235322b5 |
def bias_variable(shape) -> tf.Variable: <NEW_LINE> <INDENT> initial = tf.random_normal(shape) <NEW_LINE> return tf.Variable(b_alpha * initial) | 偏置项 初始化
:param shape:
:return: | 625941b426068e7796caeabb |
def getPIDGains(self): <NEW_LINE> <INDENT> return _robotsim.SimRobotController_getPIDGains(self) | getPIDGains(SimRobotController self)
Gets the PID gains for the PID controller. | 625941b415baa723493c3d55 |
def test_onlyR(self): <NEW_LINE> <INDENT> data = { 'toSort' :[{'d': '01:06:2015', 'originalOrder': 1, 'p': 250, 'r': 1}, {'d': '15:06:2015', 'originalOrder': 2, 'p': 200, 'r': 2}, {'d': '02:06:2015', 'originalOrder': 3, 'p': 100, 'r': 2}], 'dPriority': 0, 'pPriority': 0, 'rPriority': 1 } <NEW_LINE> sortedData = jsonRequest(data) <NEW_LINE> self.assertEquals(sortedData[0]['originalOrder'], 1) <NEW_LINE> self.assertEquals(sortedData[1]['originalOrder'], 2) <NEW_LINE> self.assertEquals(sortedData[2]['originalOrder'], 3) | Order by r only, items keep their position when equals | 625941b4baa26c4b54cb0f07 |
def _get_committee(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cand = self.congress.members.get(self.candidate['bio_id']) <NEW_LINE> <DEDENT> except CongressError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if 'committees' in cand['roles'][0] and len(cand['roles'][0]['committees']) > 0: <NEW_LINE> <INDENT> if len(cand['roles'][0]['committees']) == 1: <NEW_LINE> <INDENT> self.spprt_funcs.remove(self._get_committee_text) <NEW_LINE> <DEDENT> committees = cand['roles'][0]['committees'] <NEW_LINE> committee = random.choice(committees) <NEW_LINE> h = HTMLParser.HTMLParser() <NEW_LINE> return h.unescape(committee['name']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.spprt_funcs.remove(self._get_committee_text) <NEW_LINE> return None | Returns the name of a random committee on which the candidate serves. | 625941b4a05bb46b383ec612 |
def get_grid_id_maps(self): <NEW_LINE> <INDENT> prefix = 'beijing' if self.city == const.BJ else 'london' <NEW_LINE> id2index = {('%s_grid_%03d' % (prefix, i)): self.to_row(i) * self.sample_column + self.to_col(i) for i in range(0, self.row * self.column)} <NEW_LINE> return id2index | Create a map between grid-ids and their index in the sub-sampled vector presentation
Ids are incremented to the north then reset from west to east
:return: | 625941b4c432627299f04a28 |
def __getitem__(self, pair): <NEW_LINE> <INDENT> if not (isinstance(pair, tuple) and len(pair) == 2): <NEW_LINE> <INDENT> raise TypeError('indexing into an operation table requires exactly two elements') <NEW_LINE> <DEDENT> g, h = pair <NEW_LINE> try: <NEW_LINE> <INDENT> row = self._elts.index(g) <NEW_LINE> col = self._elts.index(h) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise IndexError('invalid indices of operation table: (%s, %s)' % (g, h)) <NEW_LINE> <DEDENT> return self._elts[self._table[row][col]] | Returns the element of the table, given the elements indexing its position.
INPUT:
- pair -- two elements of the structure
OUTPUT:
The element of the structure computed by the operation for
the two input elements (in the order provided).
This uses the table as a look-up device. If you want to use
the operation, then use the operation.
EXAMPLES::
sage: from sage.matrix.operation_table import OperationTable
sage: G=DiCyclicGroup(3)
sage: T=OperationTable(G, operator.mul)
sage: T.column_keys()
((), (5,6,7), ..., (1,4,2,3)(5,7))
sage: T[G('(1,2)(3,4)(5,6,7)'), G('(1,3,2,4)(5,7)')]
(1,4,2,3)(5,6)
TESTS::
sage: from sage.matrix.operation_table import OperationTable
sage: G = DiCyclicGroup(3)
sage: T = OperationTable(G, operator.mul)
sage: T[G('(1,2)(3,4)(5,6,7)')]
Traceback (most recent call last):
...
TypeError: indexing into an operation table requires exactly two elements
sage: T[G('(1,2)(3,4)(5,6,7)'), G('(1,3,2,4)(5,7)'), G('(1,3,2,4)(5,7)')]
Traceback (most recent call last):
...
TypeError: indexing into an operation table requires exactly two elements
sage: T[2, 3]
Traceback (most recent call last):
...
IndexError: invalid indices of operation table: (2, 3)
sage: T['(1,512)', '(1,3,2,4)(5,7)']
Traceback (most recent call last):
...
IndexError: invalid indices of operation table: ((1,512), (1,3,2,4)(5,7)) | 625941b45fdd1c0f98dc0015 |
def __init__(self, dimension=None, shape=None, metric=None, learning_rate=None, neighborhood_size=None, noise_variance=None, seed=12345, growth_interval=2, max_connection_age=5, error_decay=0.99, neighbor_error_decay=0.99): <NEW_LINE> <INDENT> self._size = 2 <NEW_LINE> super(GrowingGas, self).__init__(dimension=dimension, shape=shape, metric=metric, learning_rate=learning_rate, neighborhood_size=neighborhood_size, noise_variance=noise_variance, seed=12345) <NEW_LINE> self._growth_interval = growth_interval <NEW_LINE> self._max_connection_age = max_connection_age <NEW_LINE> self._error_decay = error_decay <NEW_LINE> self._neighbor_error_decay = neighbor_error_decay <NEW_LINE> self._errors = np.zeros(self.shape, 'd') <NEW_LINE> self._connections = np.zeros((self._size, self._size), '=i2') - 1 <NEW_LINE> self._cue_count = 0 | Initialize a new Growing Gas with parameters. | 625941b4be383301e01b5276 |
def writeInt(self, n): <NEW_LINE> <INDENT> bs = struct.pack(">i", n) <NEW_LINE> return self._out.write(bs) | Writes a single 4 byte int big endian. | 625941b47c178a314d6ef23b |
def interpreting_bio(self): <NEW_LINE> <INDENT> for screen_name in self.screen_name_with_bio_description: <NEW_LINE> <INDENT> self.screen_names.append(screen_name) <NEW_LINE> <DEDENT> bio_anlysis_with_screen_name={} <NEW_LINE> pro_trump_based_on_bio=[] <NEW_LINE> anti_trump_based_on_bio=[] <NEW_LINE> rest_of_users=[] <NEW_LINE> for screen_name in self.screen_name_with_bio_description: <NEW_LINE> <INDENT> words_in_bio=str(self.screen_name_with_bio_description[screen_name]) <NEW_LINE> words_split_up=words_in_bio.split() <NEW_LINE> for word in words_split_up: <NEW_LINE> <INDENT> import string <NEW_LINE> word_with_no_puncuation_attached=word.translate(str.maketrans('','',string.punctuation)) <NEW_LINE> lower_case_no_puncuation=word_with_no_puncuation_attached.lower() <NEW_LINE> if lower_case_no_puncuation in self.hashtags_dictionary["Pro trump hashtags"]: <NEW_LINE> <INDENT> pro_trump_based_on_bio.append(screen_name) <NEW_LINE> <DEDENT> if lower_case_no_puncuation in self.hashtags_dictionary["anti trump hashtags"]: <NEW_LINE> <INDENT> anti_trump_based_on_bio.append(screen_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rest_of_users.append(screen_name) <NEW_LINE> <DEDENT> <DEDENT> pro_trump_no_duplicates_bio=list(set(pro_trump_based_on_bio)) <NEW_LINE> anti_trump_no_duplicates_bio=list(set(anti_trump_based_on_bio)) <NEW_LINE> common_in_anti_and_pro=list(set(pro_trump_no_duplicates_bio)&set(anti_trump_no_duplicates_bio)) <NEW_LINE> for screen_name in common_in_anti_and_pro: <NEW_LINE> <INDENT> pro_trump_no_duplicates_bio.remove(screen_name) <NEW_LINE> anti_trump_no_duplicates_bio.remove(screen_name) <NEW_LINE> <DEDENT> common_in_anti_and_screen_names=list(set(self.screen_names)&set(anti_trump_no_duplicates_bio)) <NEW_LINE> common_in_pro_and_screen_names=list(set(self.screen_names)&set(pro_trump_no_duplicates_bio)) <NEW_LINE> for screen_name in common_in_anti_and_screen_names: <NEW_LINE> <INDENT> self.screen_names.remove(screen_name) <NEW_LINE> <DEDENT> for screen_name in common_in_pro_and_screen_names: <NEW_LINE> <INDENT> self.screen_names.remove(screen_name) <NEW_LINE> <DEDENT> for screen_name in pro_trump_no_duplicates_bio: <NEW_LINE> <INDENT> bio_anlysis_with_screen_name[screen_name]=[.7] <NEW_LINE> <DEDENT> for screen_name in anti_trump_no_duplicates_bio: <NEW_LINE> <INDENT> bio_anlysis_with_screen_name[screen_name]=[-.7] <NEW_LINE> <DEDENT> for screen_name in self.screen_names: <NEW_LINE> <INDENT> bio_anlysis_with_screen_name[screen_name]=[0] <NEW_LINE> <DEDENT> <DEDENT> return(bio_anlysis_with_screen_name) | going through the bio's of every screen name and seeing if they state anything pro or anti about trump by parsing each word
from their bio and comparing it to the hashtags_dictionary then assigning polarity to each screen name based on
if they have something on their bio that is positive negative or a neutral about trump
this funciton returns a dictionary with the screen name and the polarity assigned based on the evalution from
their bio | 625941b4bf627c535bc12fba |
def switchFrame(self, selector): <NEW_LINE> <INDENT> el = self.getElement(selector) <NEW_LINE> self.driver.switch_to.frame(el) | Switch to the specified frame.
Usage:
driver.switch_to_frame("i,el") | 625941b4711fe17d82542160 |
def db_for_write(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'integrate': <NEW_LINE> <INDENT> return 'Integration' <NEW_LINE> <DEDENT> elif model._meta.app_label == 'stormwater': <NEW_LINE> <INDENT> return 'stormwater' <NEW_LINE> <DEDENT> elif model._meta.app_label == 'yelp': <NEW_LINE> <INDENT> return 'Yelp' <NEW_LINE> <DEDENT> return 'default' | Point all operations on myapp models to 'other' | 625941b4566aa707497f435f |
@login_required(login_url='/login/') <NEW_LINE> def home(request): <NEW_LINE> <INDENT> if not is_logged_in(request): <NEW_LINE> <INDENT> return HttpResponseRedirect('/login/') <NEW_LINE> <DEDENT> template_context = {} <NEW_LINE> wp = Client('https://blakefinney.wordpress.com/xmlrpc.php', settings.WORDPRESS_USERNAME, settings.WORDPRESS_PASSWORD) <NEW_LINE> all_posts = wp.call(GetPosts()) <NEW_LINE> if len(all_posts) > 0: <NEW_LINE> <INDENT> stories = [] <NEW_LINE> for post in all_posts: <NEW_LINE> <INDENT> stories.append({"id": post.id, "title": post.title, "image": post.thumbnail['link'], "content":post.content}) <NEW_LINE> <DEDENT> template_context.update(stories=stories) <NEW_LINE> <DEDENT> template_context.update(div1_standings=[{"team_id": 1, "team_name": "Team 1"}, {"team_id": 2, "team_name": "Team 2"}, {"team_id": 3, "team_name": "Team 3"}, {"team_id": 4, "team_name": "Team 4"}, {"team_id": 5, "team_name": "Team 5"}, {"team_id": 6, "team_name": "Team 6"}], div2_standings=[{"team_id": 7, "team_name": "Team 7"}, {"team_id": 8, "team_name": "Team 8"}, {"team_id": 9, "team_name": "Team 9"}, {"team_id": 10, "team_name": "Team 10"}, {"team_id": 11, "team_name": "Team 11"}, {"team_id": 12, "team_name": "Team 12"}]) <NEW_LINE> template_context.update(div1_name='Northern Division', div2_name='Southern Division') <NEW_LINE> return render(request, 'base/home.html', context=template_context) | Home Page View | 625941b58da39b475bd64d5a |
def test_update(self): <NEW_LINE> <INDENT> r1 = Rectangle(10, 10, 10, 10) <NEW_LINE> r1.update(89) <NEW_LINE> self.assertEqual(r1.id, 89) <NEW_LINE> r1.update(89, 2) <NEW_LINE> self.assertEqual(r1.id, 89) <NEW_LINE> self.assertEqual(r1.width, 2) <NEW_LINE> r1.update(89, 2, 3) <NEW_LINE> self.assertEqual(r1.id, 89) <NEW_LINE> self.assertEqual(r1.width, 2) <NEW_LINE> self.assertEqual(r1.height, 3) <NEW_LINE> r1.update(89, 2, 3, 4) <NEW_LINE> self.assertEqual(r1.id, 89) <NEW_LINE> self.assertEqual(r1.width, 2) <NEW_LINE> self.assertEqual(r1.height, 3) <NEW_LINE> self.assertEqual(r1.x, 4) <NEW_LINE> r1.update(89, 2, 3, 4, 5) <NEW_LINE> self.assertEqual(r1.id, 89) <NEW_LINE> self.assertEqual(r1.width, 2) <NEW_LINE> self.assertEqual(r1.height, 3) <NEW_LINE> self.assertEqual(r1.x, 4) <NEW_LINE> self.assertEqual(r1.y, 5) <NEW_LINE> r1.update(id=1) <NEW_LINE> self.assertEqual(r1.id, 1) <NEW_LINE> r1.update(width=2) <NEW_LINE> self.assertEqual(r1.id, 1) <NEW_LINE> self.assertEqual(r1.width, 2) <NEW_LINE> r1.update(height=3) <NEW_LINE> self.assertEqual(r1.id, 1) <NEW_LINE> self.assertEqual(r1.width, 2) <NEW_LINE> self.assertEqual(r1.height, 3) <NEW_LINE> r1.update(x=5) <NEW_LINE> self.assertEqual(r1.id, 1) <NEW_LINE> self.assertEqual(r1.width, 2) <NEW_LINE> self.assertEqual(r1.height, 3) <NEW_LINE> self.assertEqual(r1.x, 5) <NEW_LINE> r1.update(y=7) <NEW_LINE> self.assertEqual(r1.id, 1) <NEW_LINE> self.assertEqual(r1.width, 2) <NEW_LINE> self.assertEqual(r1.height, 3) <NEW_LINE> self.assertEqual(r1.x, 5) <NEW_LINE> self.assertEqual(r1.y, 7) <NEW_LINE> r1.update(2, 4, 6, 8, 10, id=3, width=5, height=7, x=9, y=11) <NEW_LINE> self.assertEqual(r1.id, 2) <NEW_LINE> self.assertEqual(r1.width, 4) <NEW_LINE> self.assertEqual(r1.height, 6) <NEW_LINE> self.assertEqual(r1.x, 8) <NEW_LINE> self.assertEqual(r1.y, 10) | Test update method | 625941b4aad79263cf39081f |
def test_wrong_parameter(self): <NEW_LINE> <INDENT> msg = 'An error code 400 Bad Request is expected for an unknown ' + 'parameter' <NEW_LINE> req = ul.Request('%s?net=GE&wrongparam=1' % self.host) <NEW_LINE> try: <NEW_LINE> <INDENT> u = ul.urlopen(req) <NEW_LINE> u.read().decode('utf-8') <NEW_LINE> <DEDENT> except HTTPError as e: <NEW_LINE> <INDENT> self.assertEqual(e.code, 400, msg) <NEW_LINE> return <NEW_LINE> <DEDENT> self.assertTrue(False, msg) <NEW_LINE> return | Unknown parameter. | 625941b4287bf620b61d3854 |
def _get_offset(self, oid): <NEW_LINE> <INDENT> (offset,) = self.db.select_one("SELECT COUNT(oid) FROM %s WHERE oid < %d" % (self.table, oid)) <NEW_LINE> return offset | Returns the offset of oid in the sqlite table.
Parameters:
oid -- the oid of the row to check | 625941b463f4b57ef0000f07 |
def find_results(self, output=None, output_prefix=None, **kwds): <NEW_LINE> <INDENT> output = output or self.results['output'] <NEW_LINE> output_prefix = output_prefix or self.results['output_prefix'] <NEW_LINE> pattern = _pattern_to_regex(output_prefix or self.results.get('output_prefix')) <NEW_LINE> results = _find(output, pattern, regex=True) <NEW_LINE> for result in results: <NEW_LINE> <INDENT> yield HDF5ResultsStore(result, **kwds) | A list of :ref:`HDF5ResultsStore` results
| 625941b426238365f5f0ec4d |
def write(x, y, z, neg, outer_size, outer_size_squared): <NEW_LINE> <INDENT> literal = (z - 1) * (outer_size_squared) + (y - 1) * (outer_size) + (x - 1) + 1 <NEW_LINE> return -literal if neg else literal | return literal string from x, y, z literal coords | 625941b4167d2b6e31218982 |
def slice_to_cube(self, axis, chunk, **kwargs): <NEW_LINE> <INDENT> if self.data.ndim == 3: <NEW_LINE> <INDENT> raise cu.CubeError(4, 'Can only slice a hypercube into a cube') <NEW_LINE> <DEDENT> item = [slice(None, None, None) for _ in range(4)] <NEW_LINE> if isinstance(chunk, tuple): <NEW_LINE> <INDENT> if cu.iter_isinstance(chunk, (u.Quantity, u.Quantity)): <NEW_LINE> <INDENT> pixel0 = cu.convert_point(chunk[0].value, chunk[0].unit, self.axes_wcs, axis) <NEW_LINE> pixel1 = cu.convert_point(chunk[1].value, chunk[1].unit, self.axes_wcs, axis) <NEW_LINE> item[axis] = slice(pixel0, pixel1, None) <NEW_LINE> <DEDENT> elif cu.iter_isinstance((chunk, int, int)): <NEW_LINE> <INDENT> item[axis] = slice(chunk[0], chunk[1], None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise cu.CubeError(5, "Parameters must be of the same type") <NEW_LINE> <DEDENT> newdata = self.data[item].sum(axis) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> unit = chunk.unit if isinstance(chunk, u.Quantity) else None <NEW_LINE> pixel = cu.convert_point(chunk, unit, self.axes_wcs, axis) <NEW_LINE> item[axis] = pixel <NEW_LINE> newdata = self.data[item] <NEW_LINE> <DEDENT> wcs_indices = [0, 1, 2, 3] <NEW_LINE> wcs_indices.remove(3 - axis) <NEW_LINE> newwcs = wu.reindex_wcs(self.axes_wcs, np.array(wcs_indices)) <NEW_LINE> if axis == 2 or axis == 3: <NEW_LINE> <INDENT> newwcs = wu.add_celestial_axis(newwcs) <NEW_LINE> newwcs.was_augmented = True <NEW_LINE> <DEDENT> cube = Cube(newdata, newwcs, meta=self.meta, **kwargs) <NEW_LINE> return cube | For a hypercube, return a 3-D cube that has been cut along the given
axis and with data corresponding to the given chunk.
Parameters
----------
axis: int
The axis to cut from the hypercube
chunk: int, astropy Quantity or tuple:
The data to take from the axis | 625941b59b70327d1c4e0bb8 |
def solid_solution_random(structure, elem_frac_site, elem_list): <NEW_LINE> <INDENT> atom_list = [] <NEW_LINE> for i in range(len(elem_list)): <NEW_LINE> <INDENT> atom_list = atom_list + ( [i] * int(len(structure) / 2 / len(elem_list) + 0.5) ) <NEW_LINE> <DEDENT> random.shuffle(atom_list) <NEW_LINE> j = 0 <NEW_LINE> for i in range(len(structure)): <NEW_LINE> <INDENT> if str(structure[i]).split()[-1] == elem_frac_site: <NEW_LINE> <INDENT> structure[i] = elem_list[atom_list[j]] <NEW_LINE> j += 1 <NEW_LINE> <DEDENT> <DEDENT> return structure | An alternative way to generate random structrues, where SQS has limitations.
The method is typicall acceptable when the supercell is large, i.e. hundreds of atoms
For example, when use for atomman stacking fault generations:
If mcsqs is used after the fault is generated, then the atoms at the fault have different
geometry enviorement from the bulk atoms. If mcsqs is applied to the surface system without
fault, after the mcsqs in pymatgen the structure will past to atomman, and the surface method
must be used again, the atomman fault method will bulid two identical SQS above and under the
fault (slide plane), even sizemults=[1,1,1], i.e. this double the surface system
This function use random.shuffle(), alternatively random.choice() can be used. However,
random.choice (code immediately below) does not give equal number of two types of atoms,
sometimes the discrpency is large
if str(fault_sys_pymatgen[i]).split()[-1] == elem_frac_site:
fault_sys_pymatgen[i] = random.choice(elem_list)
Args:
structure: pymatgen structure
elem_frac_site: the site for solid solution
elem_list: species and composition of the solid solution
Return:
pymatgen supercell structure with random occupuation | 625941b50fa83653e4656da3 |
def _handle_events(self, events): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> for event in events: <NEW_LINE> <INDENT> self.callbacks.on_event(self.tracker, event) <NEW_LINE> if event.path_selector is not None: <NEW_LINE> <INDENT> paths = set(event.path_selector(self.tracker)) <NEW_LINE> if event.path_regenerator is not None: <NEW_LINE> <INDENT> new_paths = set(event.path_regenerator(self.tracker, paths)) <NEW_LINE> removed_paths = paths.difference(new_paths) <NEW_LINE> new_new_paths = new_paths.difference(paths) <NEW_LINE> for path in removed_paths: <NEW_LINE> <INDENT> self._remove_path(path) <NEW_LINE> <DEDENT> for path in new_new_paths: <NEW_LINE> <INDENT> self._add_path(path, event.flags.paths_state) <NEW_LINE> <DEDENT> paths = new_paths <NEW_LINE> <DEDENT> if event.flags.paths_state == _PathState.updated: <NEW_LINE> <INDENT> for path in paths: <NEW_LINE> <INDENT> if self.path_states[path] == _PathState.updating: <NEW_LINE> <INDENT> self._set_path_state(path, _PathState.up_to_date) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._set_path_state(path, _PathState.outdated) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for path in paths: <NEW_LINE> <INDENT> self._set_path_state(path, event.flags.paths_state) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if event.task_selector is not None: <NEW_LINE> <INDENT> tasks = set(event.task_selector(self.tracker)) <NEW_LINE> if event.task_regenerator is not None: <NEW_LINE> <INDENT> new_tasks = set(event.task_regenerator(self.tracker, tasks)) <NEW_LINE> removed_tasks = tasks.difference(new_tasks) <NEW_LINE> new_new_tasks = new_tasks.difference(tasks) <NEW_LINE> for task in removed_tasks: <NEW_LINE> <INDENT> self._remove_task(task) <NEW_LINE> if event.flags.removed_tasks_outdate_paths: <NEW_LINE> <INDENT> for path in task.output_paths(): <NEW_LINE> <INDENT> self._set_path_state(path, _PathState.outdated) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for task in new_new_tasks: <NEW_LINE> <INDENT> self._add_task(task) <NEW_LINE> <DEDENT> tasks = new_tasks <NEW_LINE> <DEDENT> if event.flags.tasks_tags is not None: <NEW_LINE> <INDENT> for task in tasks: <NEW_LINE> <INDENT> self._replace_task_tags(task, event.flags.tasks_tags) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return [] | Applies the events.
Note: does not handle the events beyond applying them, e.g. does not run
tasks for newly outdated paths. Not thread safe.
Returns:
A list of valid yet unhandled events. These should be passed back in to
the function at a later point. | 625941b54d74a7450ccd3fa6 |
def mesh_basis_fn(bx, by): <NEW_LINE> <INDENT> vx, ex = bx[0], bx[1] <NEW_LINE> vy, ey = by[0], by[1] <NEW_LINE> v = ( lambda i, j: lambda x, y: np.array([vx(i)(x) * vy(j)(y)]), ) <NEW_LINE> e = ( lambda i, j: lambda x, y: np.array([ex(i)(x) * vy(j)(y), np.array([0])]), lambda i, j: lambda x, y: np.array([np.array([0]), vx(i)(x) * ey(j)(y)]), ) <NEW_LINE> f = ( lambda i, j: lambda x, y: np.array([ex(i)(x) * ey(j)(y)]), ) <NEW_LINE> return v, e, f | >>> gx = Grid_1D.chebyshev(2)
>>> B = mesh_basis_fn(gx.B, gx.B)
>>> x, y = np.linspace(-1, 1, 3), np.linspace(-1, 1, 3)
>>> B[1][1](1, 0)(x, y)
array([array([0]), array([ 0. , 0.5, 0. ])], dtype=object) | 625941b563f4b57ef0000f08 |
def arg1(self): <NEW_LINE> <INDENT> if self.commandType() == 'C_ARITHMETIC': <NEW_LINE> <INDENT> return self.current_command <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.current_command.split()[1] | returns first argument of command | 625941b51b99ca400220a894 |
def p_ifstatement_else_if(p): <NEW_LINE> <INDENT> p[0] = ast.If(p[3], p[6], p[9]) | if_statement : IF LPAREN expression RPAREN LBRACK statement_list RBRACK ELSE if_statement | 625941b54527f215b584c240 |
def test_setByCall(self): <NEW_LINE> <INDENT> self.assertEqual(context.call({"x": "y"}, context.get, "x"), "y") | Values may be associated with keys by passing them in a dictionary as
the first argument to L{twisted.python.context.call}. | 625941b53cc13d1c6d3c716a |
Subsets and Splits