code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def copy_file(self, from_path, to_subfolder, to_filename): <NEW_LINE> <INDENT> target_folder = join(self.output, to_subfolder) <NEW_LINE> target_file = join(target_folder, to_filename) <NEW_LINE> if isdir(target_folder): <NEW_LINE> <INDENT> if isfile(target_file): <NEW_LINE> <INDENT> self.log.info("File was existed. Move to Review") <NEW_LINE> if not isdir(join(self.review, to_subfolder)): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.log.debug("Create sub-folder") <NEW_LINE> mkdir(join(self.review, to_subfolder)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ExPhotoRename("Fail in create folder %s: %s" % (target_folder, e.message)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> number = 0 <NEW_LINE> to_fname_base = to_filename.split('.')[0] <NEW_LINE> to_fname_ext = to_filename.split('.')[1] <NEW_LINE> while True: <NEW_LINE> <INDENT> if not isfile(join(self.review, "%s/%s" % (to_subfolder, to_filename))): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self.log.debug("%s was existed" % to_filename) <NEW_LINE> number += 1 <NEW_LINE> to_filename = "%s-%s.%s" % (to_fname_base, number, to_fname_ext) <NEW_LINE> <DEDENT> <DEDENT> target_file = join(self.review, "%s/%s" % (to_subfolder, to_filename)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> mkdir(target_folder) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ExPhotoRename("Fail in create folder %s: %s" % (target_folder, e.message)) <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> move(from_path, target_file) <NEW_LINE> self.log.info("Success in move file") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ExPhotoRename("Fail in move file %s: %s" % (target_file, e.message)) | Do coying file. If target was existed then copying to review folder
instead. In review folder, last number of filename is increased if file
was existed. | 625941b476d4e153a657e8f5 |
def make_training_graph(graph, test_node, n): <NEW_LINE> <INDENT> edge=[] <NEW_LINE> graphcopy=graph.copy() <NEW_LINE> edge=sorted(graphcopy.edges(test_node)) <NEW_LINE> i=1 <NEW_LINE> for e in edge: <NEW_LINE> <INDENT> if(i<=n): <NEW_LINE> <INDENT> graphcopy.remove_edge(e[0],e[1]) <NEW_LINE> i+=1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return graphcopy <NEW_LINE> pass | To make a training graph, we need to remove n edges from the graph.
As in lecture, we'll assume there is a test_node for which we will
remove some edges. Remove the edges to the first n neighbors of
test_node, where the neighbors are sorted alphabetically.
E.g., if 'A' has neighbors 'B' and 'C', and n=1, then the edge
('A', 'B') will be removed.
Be sure to *copy* the input graph prior to removing edges.
Params:
graph.......a networkx Graph
test_node...a string representing one node in the graph whose
edges will be removed.
n...........the number of edges to remove.
Returns:
A *new* networkx Graph with n edges removed.
In this doctest, we remove edges for two friends of D:
>>> g = example_graph()
>>> sorted(g.neighbors('D'))
['B', 'E', 'F', 'G']
>>> train_graph = make_training_graph(g, 'D', 2)
>>> sorted(train_graph.neighbors('D'))
['F', 'G'] | 625941b4293b9510aa2c305f |
def _getContentsForBranch(self, branch, item, keyItem): <NEW_LINE> <INDENT> return item | Given a branch, item and keyItem,
return the contents for the branch. | 625941b44527f215b584c222 |
def render_template(self, *args, **kwargs): <NEW_LINE> <INDENT> assert len(args) == 0 <NEW_LINE> return self._template.render(obj=self, **kwargs) | Render Jinja2 MATLAB® script template.
All keyword arguments are passed to the Jinja2 template. | 625941b42eb69b55b151c66f |
def build_from_module(cfg, module, default_args=None): <NEW_LINE> <INDENT> assert isinstance(cfg, dict) and 'type' in cfg <NEW_LINE> assert isinstance(default_args, dict) or default_args is None <NEW_LINE> args = cfg.copy() <NEW_LINE> obj_type = args.pop('type') <NEW_LINE> if isinstance(obj_type, str): <NEW_LINE> <INDENT> obj_cls = getattr(module, obj_type) <NEW_LINE> if obj_cls is None: <NEW_LINE> <INDENT> raise KeyError('{} is not in the {} module'.format( obj_type, module)) <NEW_LINE> <DEDENT> <DEDENT> elif inspect.isclass(obj_type): <NEW_LINE> <INDENT> obj_cls = obj_type <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('type must be a str or valid type, but got {}'.format( type(obj_type))) <NEW_LINE> <DEDENT> if default_args is not None: <NEW_LINE> <INDENT> for name, value in default_args.items(): <NEW_LINE> <INDENT> args.setdefault(name, value) <NEW_LINE> <DEDENT> <DEDENT> return obj_cls(**args) | Build a module from config dict.
Args:
cfg (dict): Config dict. It should at least contain the key "type".
module (:obj:`module`): The module to search the type from.
default_args (dict, optional): Default initialization arguments.
Returns:
obj: The constructed object. | 625941b4f9cc0f698b1403cb |
def with_data(self, data) -> "SpatialMap": <NEW_LINE> <INDENT> result = copy(self) <NEW_LINE> result._data = data <NEW_LINE> return result | Return a copy of this object with different data mapped to the sites | 625941b466656f66f7cbbf70 |
@pytest.mark.skipif('not HAS_SCIPY') <NEW_LINE> def test_compute_ts_map(tmpdir): <NEW_LINE> <INDENT> data = load_poisson_stats_image(extra_info=True) <NEW_LINE> kernel = Gaussian2DKernel(2.5) <NEW_LINE> data['exposure'] = np.ones(data['counts'].shape) * 1E12 <NEW_LINE> for _, func in zip(['counts', 'background', 'exposure'], [np.nansum, np.nansum, np.mean]): <NEW_LINE> <INDENT> data[_] = downsample_2N(data[_], 2, func) <NEW_LINE> <DEDENT> result = compute_ts_map(data['counts'], data['background'], data['exposure'], kernel) <NEW_LINE> for name, order in zip(['ts', 'amplitude', 'niter'], [2, 5, 0]): <NEW_LINE> <INDENT> result[name] = np.nan_to_num(result[name]) <NEW_LINE> result[name] = upsample_2N(result[name], 2, order=order) <NEW_LINE> <DEDENT> assert_allclose(1705.840212274973, result.ts[99, 99], rtol=1e-3) <NEW_LINE> assert_allclose([[99], [99]], np.where(result.ts == result.ts.max())) <NEW_LINE> assert_allclose(6, result.niter[99, 99]) <NEW_LINE> assert_allclose(1.0227934338735763e-09, result.amplitude[99, 99], rtol=1e-3) <NEW_LINE> filename = str(tmpdir.join('ts_test.fits')) <NEW_LINE> result.write(filename, header=data['header']) <NEW_LINE> read_result = TSMapResult.read(filename) <NEW_LINE> for _ in ['ts', 'sqrt_ts', 'amplitude', 'niter']: <NEW_LINE> <INDENT> assert result[_].dtype == read_result[_].dtype <NEW_LINE> assert_equal(result[_], read_result[_]) | Minimal test of compute_ts_map | 625941b48e71fb1e9831d57b |
def test_list_returner_functions(self): <NEW_LINE> <INDENT> self.assertListEqual(sysmod.list_returner_functions(), []) <NEW_LINE> self.assertListEqual(sysmod.list_returner_functions('sqlite3.get_*'), []) | Test if it list the functions for all returner modules. | 625941b46aa9bd52df036b68 |
def isprop(object_): <NEW_LINE> <INDENT> return isinstance(object_, property) | Return true if the object is a property of the class.
Used to extent inspect built-in Python module.
References:
- https://docs.python.org/3/library/inspect.html. | 625941b494891a1f4081b86e |
def GPIB_trigger(self): <NEW_LINE> <INDENT> self.adapter.connection.assert_trigger() | Initate trigger via low-level GPIB-command (aka GET - group execute trigger). | 625941b4187af65679ca4eea |
def _recalculate_boxes(self): <NEW_LINE> <INDENT> self._radio_button_pos = (self.x + self.BUTTON_RADIUS, self.y + self._label_size[1] / 2) <NEW_LINE> self._label_focus_area = pygame.Rect((self.x + self.BUTTON_RADIUS * 2, self.y), self._label_size) | recalculate the bounding boxes once the position is changed | 625941b48c3a873295158184 |
def __init__(self, atoms, selection=None, description=''): <NEW_LINE> <INDENT> if selection is None: <NEW_LINE> <INDENT> selection = np.ones(len(atoms), dtype='bool') <NEW_LINE> <DEDENT> self.selection = selection <NEW_LINE> self.atoms = atoms <NEW_LINE> self.description = description | Args:
atoms: Base atom object
selection (opt.): Selected atoms
description (opt.): Descriptive string of the subset. | 625941b47c178a314d6ef21e |
def GSetupDownloadFile(self, file_path, options): <NEW_LINE> <INDENT> self._cc() <NEW_LINE> c_path = _GCStringIn(file_path.encode(_enc)) <NEW_LINE> rc = _gclibo.GSetupDownloadFile(self._gcon, c_path, options, self._buf, _buf_size) <NEW_LINE> if (options != 0): <NEW_LINE> <INDENT> _rc(rc) <NEW_LINE> <DEDENT> info_dict = {} <NEW_LINE> for line in str(self._buf.value.decode(_enc)).split("\"\n"): <NEW_LINE> <INDENT> fields = line.split(',',1) <NEW_LINE> if (fields[0] == ""): continue <NEW_LINE> elif len(fields) >= 2: <NEW_LINE> <INDENT> info_dict[fields[0].strip("\"\'")] = fields[1].strip("\"\'") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> info_dict[fields[0].strip("\"\'")] = '' <NEW_LINE> <DEDENT> <DEDENT> if (options == 0): <NEW_LINE> <INDENT> info_dict["options"] = rc <NEW_LINE> <DEDENT> return info_dict | Downloads specified sectors from a Galil compressed backup (gcb) file to a controller.
Returns a dictionary with the controller information stored in the gcb file.
If options is specified as 0, an additional "options" key will be in the dictionary indicating the info sectors available in the gcb | 625941b4d10714528d5ffaa4 |
def set_log_color(logger_name, level, color): <NEW_LINE> <INDENT> _custom_colors[logger_name, level] = color | Sets the color displayed in the console, according to the logger name and level
| 625941b49b70327d1c4e0b9a |
def _get_earliest_date(self, logfile): <NEW_LINE> <INDENT> line = logfile.readline() <NEW_LINE> date = datetime.strptime(line[0:19], "%Y-%m-%d %H:%M:%S") <NEW_LINE> logfile.seek(0) <NEW_LINE> return date | read in first line of file and parse the date | 625941b4d99f1b3c44c67366 |
def _find_packages(self, repos: Set[BintrayRepoDescriptor]) -> [BintrayPackageDescriptor]: <NEW_LINE> <INDENT> pkgs = [BintrayPackageDescriptor(repo.repoowner, repo.reponame, p['name'], p['linked']) for repo in repos for p in self.client.get_all('/repos/' + repo.repoowner + '/' + repo.reponame + '/packages')] <NEW_LINE> for index, pkg in enumerate(pkgs): <NEW_LINE> <INDENT> if pkg.linked: <NEW_LINE> <INDENT> bt_path = '/packages/' + pkg.repoowner + '/' + pkg.reponame + '/' + pkg.name <NEW_LINE> print("_find_packages: get https://api.bintray.com{}".format(bt_path)) <NEW_LINE> bt_package = self.client.get(bt_path) <NEW_LINE> pkgs[index] = BintrayPackageDescriptor(bt_package['owner'], bt_package['repo'], pkg.name, False) <NEW_LINE> <DEDENT> <DEDENT> return list(set(pkgs)) | Returns all packages in the known repositories | 625941b4091ae35668666d2c |
def get_property(self, property): <NEW_LINE> <INDENT> return self.__properties.get(property, None) | Returns the value of a given property.
:param property
:return value | 625941b438b623060ff0abbc |
def stop_button(machine): <NEW_LINE> <INDENT> key = "machine='" + str(machine) + "'" <NEW_LINE> dt.machines.update_data('stop_button', '1', key) <NEW_LINE> dt.machines.update_data('start_button_begin', '2018-01-01 00:00', key) <NEW_LINE> dt.machines.update_data('start_button_end', '2018-01-01 00:00', key) | Turns the given machine off.
The machine will stop working. | 625941b466673b3332b91e5d |
def cmSvdKriging(s, CFG): <NEW_LINE> <INDENT> X = CFG.X <NEW_LINE> F = CFG.Fmp <NEW_LINE> U = CFG.U <NEW_LINE> S = CFG.S <NEW_LINE> V = CFG.V <NEW_LINE> r = CFG.r <NEW_LINE> reg = CFG.reg <NEW_LINE> n = len(F) <NEW_LINE> c = np.zeros( n ) <NEW_LINE> for i in range(0,n): <NEW_LINE> <INDENT> c[i] = aux.cov(s,X[i],r) <NEW_LINE> <DEDENT> b = np.dot(np.transpose(U), c) <NEW_LINE> x = b*S/(S*S + reg ) <NEW_LINE> lam = np.dot( np.transpose(V) , np.transpose(x) ) <NEW_LINE> f = np.zeros( len(F[0]) ) <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> f = f + lam[i] * F[i] <NEW_LINE> <DEDENT> sigmaSquare = aux.cov(0,0,r) - np.sum(lam*c[0:n]) <NEW_LINE> if sigmaSquare < 0 and -sigmaSquare > reg: <NEW_LINE> <INDENT> print("Negative kriged variance. Probably because data points are too close. ") <NEW_LINE> <DEDENT> return f, sigmaSquare | do krigin using SVD and tychonoff regularization.
we are looking to solve the following:
[ ] [ ] [ ]
[ C ] * [lam] = [c]
[ ] [ ] [ ]
where:
C is a covariance matrix between observations (have n of those)
lambda are weights
c is the covariance between the given s and the n observations
function parameters:
s - where we want to estimate our function \ process
CFG - an object that contains all the data we need for the computation
returns - mean and standard deviation for point s | 625941b47d43ff24873a2a69 |
def p_binary_operator_sets(symbol): <NEW_LINE> <INDENT> symbol[0] = BinaryOperator(symbol[1],symbol[2],symbol[3], localize(symbol,2)) | expresion : expresion CONTAINMENT expresion
| expresion UNION expresion
| expresion INTERSECTION expresion
| expresion DIFERENCE expresion
| expresion PLUSMAP expresion
| expresion MINUSMAP expresion
| expresion TIMESMAP expresion
| expresion DIVIDEMAP expresion
| expresion MODULEMAP expresion | 625941b4d18da76e23532296 |
def activity_type_exists(activity_type): <NEW_LINE> <INDENT> if activity_type in _object_id_validators: <NEW_LINE> <INDENT> return activity_type <NEW_LINE> <DEDENT> return validators.activity_type_exists(activity_type) | Wrap ckan.logic.validators.activity_type_exists to support additional
activity stream types | 625941b450485f2cf553cb5f |
def update_weights(self, alpha=None): <NEW_LINE> <INDENT> alpha = check_value(alpha, self.alpha, 'fractional order - alpha') <NEW_LINE> self.alpha = alpha <NEW_LINE> self.gleg.update_weights(alpha=alpha) <NEW_LINE> self.rs.update_weights(alpha=alpha) | Update quadrature weights.
The quadrature weights are a function of :math:`\alpha`. To
facilitate usage of the quadrature object, you can update the
weights with a new :math:`\alpha` without creating a whole
new object.
Args:
* **alpha** (:py:class:`float`): Exponent of singular kernel. | 625941b467a9b606de4a7c83 |
def get_ingredients(booktitle): <NEW_LINE> <INDENT> happy_regex = re.compile(r"(?i)[^hapybirthd]") <NEW_LINE> text = gb.raw(fileids=booktitle) <NEW_LINE> dough = re.sub(happy_regex, "", text) <NEW_LINE> return dough | selects only the best ingredients for the cake | 625941b4d6c5a10208143e0c |
def testInlineResponse20024FilterList(self): <NEW_LINE> <INDENT> pass | Test InlineResponse20024FilterList | 625941b4ad47b63b2c509d52 |
def tamagawa_product(self): <NEW_LINE> <INDENT> return prod([ld.tamagawa_number() for ld in self.local_data()], Integer(1)) | Return the product of the Tamagawa numbers `c_v` where `v` runs
over all prime ideals of `K`.
.. note::
See also tamagawa_product_bsd(), which includes an
additional factor when the model is not globally minimal,
as required by the BSD formula.
OUTPUT:
A positive integer.
EXAMPLES::
sage: K.<i> = NumberField(x^2+1)
sage: E = EllipticCurve([0,2+i])
sage: E.tamagawa_product()
1
sage: E = EllipticCurve([(2*i+1)^2,i*(2*i+1)^7])
sage: E.tamagawa_product()
4
An example over `\QQ`::
sage: E = EllipticCurve('30a')
sage: E.tamagawa_product()
6
An example with everywhere good reduction, where the product
is empty::
sage: x = polygen(QQ)
sage: K.<a> = NumberField(x^2 - 38)
sage: E = EllipticCurve( [a, -a + 1, a + 1, -5*a + 15, -5*a + 21])
sage: E.tamagawa_numbers()
[]
sage: E.tamagawa_product()
1 | 625941b4498bea3a759b9878 |
def numberOfArithmeticSlices(self, A): <NEW_LINE> <INDENT> diff = sys.maxsize <NEW_LINE> count = 0 <NEW_LINE> n = 0 <NEW_LINE> for i in xrange(1, len(A)): <NEW_LINE> <INDENT> if diff != A[i] - A[i-1]: <NEW_LINE> <INDENT> diff = A[i] - A[i-1] <NEW_LINE> count += sum(xrange(n + 1)) <NEW_LINE> n = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> n += 1 <NEW_LINE> <DEDENT> <DEDENT> if n > 0: <NEW_LINE> <INDENT> count += sum(xrange(n + 1)) <NEW_LINE> <DEDENT> return count | :type A: List[int]
:rtype: int | 625941b430c21e258bdfa264 |
def _optimize(self, initial_tour, Debuglevel=0): <NEW_LINE> <INDENT> minchange = -1 <NEW_LINE> tour = initial_tour <NEW_LINE> while minchange < 0: <NEW_LINE> <INDENT> minchange = 0 <NEW_LINE> for i in range(self.size - 3): <NEW_LINE> <INDENT> for j in range(i + 2, self.size - 1): <NEW_LINE> <INDENT> t1 = tour[i] <NEW_LINE> t2 = tour[i + 1] <NEW_LINE> t3 = tour[j] <NEW_LINE> t4 = tour[j + 1] <NEW_LINE> change = (self.dis_mat[t1 - 1][t3 - 1] + self.dis_mat[t2 - 1][t4 - 1] - self.dis_mat[t1 - 1][t2 - 1] - self.dis_mat[t3 - 1][t4 - 1]) <NEW_LINE> if change < minchange: <NEW_LINE> <INDENT> minchange = change <NEW_LINE> tour = self.Swap(tour, i + 1, j) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if Debuglevel: <NEW_LINE> <INDENT> print("Tour After Change : ", minchange, "Distances: ", self.get_distance(tour)) <NEW_LINE> <DEDENT> <DEDENT> self.best_tour = tour <NEW_LINE> return tour | Improve existing tour
using 2-opt method | 625941b4f7d966606f6a9dcf |
def include_repository(self, handle): <NEW_LINE> <INDENT> obj = self.get_unfiltered_repository(handle) <NEW_LINE> return obj and not obj.get_privacy() | Predicate returning True if object is to be included, else False | 625941b4b830903b967e96df |
def test_patient_list_page_with_own_patients_data_n_invitation_data(self): <NEW_LINE> <INDENT> fixture_name = 'patient_list_data_no_follow_data_invitation.html' <NEW_LINE> fixture_filepath = self._get_fixture_file_path(fixture_name) <NEW_LINE> if fixture_filepath: <NEW_LINE> <INDENT> tree = etree.parse(fixture_filepath) <NEW_LINE> expected_output = etree.tostring(tree, method='html') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _logger.warning( 'IOError: Error reading fixture "{}". Hence, ' 'this test has not been actually executed'.format( fixture_name ) ) <NEW_LINE> <DEDENT> patients = self.patients_fixtures[:] <NEW_LINE> for p in patients: <NEW_LINE> <INDENT> if not p.get('summary'): <NEW_LINE> <INDENT> p['summary'] = False <NEW_LINE> <DEDENT> <DEDENT> notifications = [ { 'id': 1, 'message': 'You have been invited to follow 1 patient(s) ' 'from Norah Miller' } ] <NEW_LINE> api_data = { 'notifications': notifications, 'items': patients, 'notification_count': 0, 'followed_items': [], 'section': 'patient', 'username': 'nadine', 'urls': self.controller_urls } <NEW_LINE> rendered_template = self._render_template( self.cr, self.uid, self.template_id, options=api_data) <NEW_LINE> temp_tree = etree.fromstring(rendered_template) <NEW_LINE> rendered_and_parsed = etree.tostring(temp_tree, method='html') <NEW_LINE> compressed_expected_output = self._compress_string(expected_output) <NEW_LINE> compressed_rendered_parsed = self._compress_string( '<!DOCTYPE html>'+rendered_and_parsed) <NEW_LINE> self.assertEqual( compressed_expected_output, compressed_rendered_parsed ) | Test the 'patient list' page with information about own patients
and invitation from other users. | 625941b4a219f33f3462873b |
def test_email_multipart_with_links(self): <NEW_LINE> <INDENT> self._test_email("multipart", dformat="html", links=True) | display the text/html part of a multipart message | 625941b415baa723493c3d38 |
def test_register_user(self): <NEW_LINE> <INDENT> res = self.client.post('/api/v2/auth/signup', json=self.new_user) <NEW_LINE> data = res.get_json() <NEW_LINE> self.assertEqual(data['status'], 201) <NEW_LINE> self.assertEqual(data['message'], 'Success') <NEW_LINE> self.assertEqual(data['data'][0]['user']['firstname'], 'Andrew') <NEW_LINE> self.assertIn('token', data['data'][0]) <NEW_LINE> self.assertEqual(res.status_code, 201) | Tests that a user was registered successfully | 625941b4d53ae8145f87a03e |
def unload(self): <NEW_LINE> <INDENT> for action in self.actions: <NEW_LINE> <INDENT> self.iface.removePluginMenu( self.tr(u'&Cartographic Line Generalization'), action) <NEW_LINE> self.iface.removeToolBarIcon(action) <NEW_LINE> <DEDENT> del self.toolbar | Removes the plugin menu item and icon from QGIS GUI. | 625941b44d74a7450ccd3f8a |
def writeYamlStorage(butlerLocation, obj): <NEW_LINE> <INDENT> additionalData = butlerLocation.getAdditionalData() <NEW_LINE> locations = butlerLocation.getLocations() <NEW_LINE> with SafeFilename(os.path.join(butlerLocation.getStorage().root, locations[0])) as locationString: <NEW_LINE> <INDENT> logLoc = LogicalLocation(locationString, additionalData) <NEW_LINE> with open(logLoc.locString(), "w") as outfile: <NEW_LINE> <INDENT> yaml.dump(obj, outfile) | Writes an object to a YAML file specified by ButlerLocation.
Parameters
----------
butlerLocation : ButlerLocation
The location for the object to be written.
obj : object instance
The object to be written. | 625941b43617ad0b5ed67cc5 |
def discover(path, filter_specs=filter_specs): <NEW_LINE> <INDENT> for dirpath, _, filenames in os.walk(path): <NEW_LINE> <INDENT> for spec in filter_specs(filenames): <NEW_LINE> <INDENT> yield os.path.join(dirpath, spec) | Discover all of the specs recursively inside ``path``.
Successively yields the (full) relative paths to each spec. | 625941b473bcbd0ca4b2be43 |
def send(self, request): <NEW_LINE> <INDENT> _await_result(self._policy.on_request, request) <NEW_LINE> try: <NEW_LINE> <INDENT> response = self.next.send(request) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> if not _await_result(self._policy.on_exception, request): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> _await_result(self._policy.on_response, request, response) <NEW_LINE> <DEDENT> return response | Modifies the request and sends to the next policy in the chain.
:param request: The PipelineRequest object.
:type request: ~azure.core.pipeline.PipelineRequest
:return: The PipelineResponse object.
:rtype: ~azure.core.pipeline.PipelineResponse | 625941b499cbb53fe67929ae |
def disvecinf(self, x, y, aq=None): <NEW_LINE> <INDENT> raise 'Must overload Element.disvecinf()' | Returns 2 complex arrays of size (nparam, naq, npval) | 625941b432920d7e50b27f92 |
def emit(node, output=None, Dumper=Dumper, **parameters): <NEW_LINE> <INDENT> if output is None: <NEW_LINE> <INDENT> dumper = Dumper(StringIO.StringIO(), **parameters) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dumper = Dumper(output, **parameters) <NEW_LINE> <DEDENT> dumper.emit(node) <NEW_LINE> if output is None: <NEW_LINE> <INDENT> return dumper.output.getvalue() | Emits the given node to the output.
If output is None, returns the produced YAML document. | 625941b421bff66bcd68471c |
def test_geocode_with_address(self): <NEW_LINE> <INDENT> self.geocode_run( {"query": "Camp des Landes, 41200 VILLEFRANCHE-SUR-CHER", "query_type": "StreetAddress", "exactly_one": True}, {"latitude": 47.293048, "longitude": 1.718985, "address": "le camp des landes, 41200 Villefranche-sur-Cher" }, ) | IGNFrance.geocode Address | 625941b499fddb7c1c9de15a |
def do_check(func, files, status): <NEW_LINE> <INDENT> for file_name in files: <NEW_LINE> <INDENT> with open(file_name, 'r') as f: <NEW_LINE> <INDENT> output = func.parse(f.read(), file_name) <NEW_LINE> <DEDENT> if output: <NEW_LINE> <INDENT> status.append("{0}: {1}".format(file_name, output)) <NEW_LINE> <DEDENT> <DEDENT> return status | Generic do_check helper method
Args:
func (function): Specific function to call
files (list): list of files to run against
status (list): list of pre-receive check failures to eventually print
to the user
Returns:
status list of current pre-redeive check failures. Might be an empty
list. | 625941b4498bea3a759b9879 |
def edit_distance(a, b): <NEW_LINE> <INDENT> prev_cost = range(0, len(b) + 1) <NEW_LINE> for end_a in range(1, len(a) + 1): <NEW_LINE> <INDENT> cur_cost = [end_a] <NEW_LINE> for end_b in range(1, len(b) + 1): <NEW_LINE> <INDENT> c = min( cur_cost[-1] + 1, prev_cost[end_b] + 1, prev_cost[end_b - 1] + (0 if a[end_a - 1] == b[end_b - 1] else 1) ) <NEW_LINE> cur_cost.append(c) <NEW_LINE> <DEDENT> prev_cost = cur_cost <NEW_LINE> <DEDENT> return prev_cost[-1] | Computes the line-based edit distance between two lists, i.e.
the smallest number of list items to delete, insert or replace
that would transform the content of one list into the other. | 625941b416aa5153ce36223f |
def check_for_setup_error(self): <NEW_LINE> <INDENT> if not self._check_gpfs_state(): <NEW_LINE> <INDENT> msg = (_('GPFS is not active.')) <NEW_LINE> LOG.error(msg) <NEW_LINE> raise exception.GPFSException(msg) <NEW_LINE> <DEDENT> if not self.configuration.gpfs_share_export_ip: <NEW_LINE> <INDENT> msg = (_('gpfs_share_export_ip must be specified.')) <NEW_LINE> LOG.error(msg) <NEW_LINE> raise exception.InvalidParameterValue(err=msg) <NEW_LINE> <DEDENT> gpfs_base_dir = self.configuration.gpfs_mount_point_base <NEW_LINE> if not gpfs_base_dir.startswith('/'): <NEW_LINE> <INDENT> msg = (_('%s must be an absolute path.'), gpfs_base_dir) <NEW_LINE> LOG.error(msg) <NEW_LINE> raise exception.GPFSException(msg) <NEW_LINE> <DEDENT> if not self._is_dir(gpfs_base_dir): <NEW_LINE> <INDENT> msg = (_('%s is not a directory.'), gpfs_base_dir) <NEW_LINE> LOG.error(msg) <NEW_LINE> raise exception.GPFSException(msg) <NEW_LINE> <DEDENT> if not self._is_gpfs_path(gpfs_base_dir): <NEW_LINE> <INDENT> msg = (_('%s is not on GPFS. Perhaps GPFS not mounted.'), gpfs_base_dir) <NEW_LINE> LOG.error(msg) <NEW_LINE> raise exception.GPFSException(msg) <NEW_LINE> <DEDENT> if self.configuration.gpfs_nfs_server_type not in ['KNFS', 'GNFS']: <NEW_LINE> <INDENT> msg = (_('Invalid gpfs_nfs_server_type value: %s. ' 'Valid values are: "KNFS", "GNFS".'), self.configuration.gpfs_nfs_server_type) <NEW_LINE> LOG.error(msg) <NEW_LINE> raise exception.InvalidParameterValue(err=msg) <NEW_LINE> <DEDENT> if self.configuration.gpfs_nfs_server_list is None: <NEW_LINE> <INDENT> msg = (_('Missing value for gpfs_nfs_server_list.')) <NEW_LINE> LOG.error(msg) <NEW_LINE> raise exception.InvalidParameterValue(err=msg) | Returns an error if prerequisites aren't met. | 625941b430dc7b7665901732 |
def test_2(self): <NEW_LINE> <INDENT> pool = ResourcePool(cache_size=None) <NEW_LINE> pool.register_resource(ResourceA) <NEW_LINE> pool.register_resource(ResourceB) <NEW_LINE> variables = dict(foo="hey", bah="ho") <NEW_LINE> resource = pool.get_resource("resource.a", variables) <NEW_LINE> self.assertTrue(isinstance(resource, ResourceA)) <NEW_LINE> self.assertEqual(resource.variables, variables) <NEW_LINE> resource_ = pool.get_resource_from_handle(resource.handle) <NEW_LINE> self.assertTrue(resource_ is resource) <NEW_LINE> resource_ = pool.get_resource("resource.a", variables) <NEW_LINE> self.assertTrue(resource_ is resource) <NEW_LINE> pool.clear_caches() <NEW_LINE> resource_ = pool.get_resource("resource.a", variables) <NEW_LINE> self.assertEqual(resource_.variables, variables) <NEW_LINE> self.assertTrue(resource_ is not resource) | basic resource loading test. | 625941b4d58c6744b4257a28 |
@robot_start_sign("WooYun爬虫机器人") <NEW_LINE> def run_wooyun(): <NEW_LINE> <INDENT> data = robot_WooYun.api_request() <NEW_LINE> if data: <NEW_LINE> <INDENT> robot_WooYun.key_words_check(robot_WooYun.data_achieve(data)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass | 调用WooYun.py中的功能函数 | 625941b4d10714528d5ffaa5 |
def __init__(self, case, name): <NEW_LINE> <INDENT> self._state = None <NEW_LINE> self._case_id = case <NEW_LINE> self._attributes = None <NEW_LINE> self.valid_case_id = None <NEW_LINE> self._name = name | Initialize the sensor. | 625941b4eab8aa0e5d26d925 |
@staff_member_required <NEW_LINE> def image_upload(request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> upload_full_path = '%scontent/images/' % settings.MEDIA_ROOT <NEW_LINE> upload = request.FILES['image'] <NEW_LINE> filename = '%s.%s' % (uuid4(), upload.name.split('.')[-1]) <NEW_LINE> dest = open(os.path.join(upload_full_path, filename), 'wb+') <NEW_LINE> for chunk in upload.chunks(): <NEW_LINE> <INDENT> dest.write(chunk) <NEW_LINE> <DEDENT> dest.close() <NEW_LINE> result = '{status:"UPLOADED", image_url:"%s"}' % ('%scontent/images/%s' % (settings.MEDIA_URL, filename)) <NEW_LINE> return HttpResponse(result, mimetype='text/html') <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return HttpResponse("error") | Fazer upload de imagem | 625941b4462c4b4f79d1d497 |
def isapi_error(environ, start_response): <NEW_LINE> <INDENT> status = '404 OK' <NEW_LINE> start_response(status, [('Content-type', 'text/plain')]) <NEW_LINE> return ['Page not found'] | Send a nice error page to the client | 625941b47cff6e4e8111774d |
@router.get("/", response_model=List[schema.UserResponse]) <NEW_LINE> def read_users( db: Session = Depends(deps.get_db), skip: int = 0, limit: int = 100, current_user: model.User = Depends(deps.get_current_active_superuser), ): <NEW_LINE> <INDENT> users = crud.user.get_multi( db, skip=skip, limit=limit, ) <NEW_LINE> data = list() <NEW_LINE> for user in users: <NEW_LINE> <INDENT> user = crud.user.get_user(db, user.id) <NEW_LINE> data.append( dict( **user, user=dict( phone=user.phone, id_no=user.id_no, title=user.title, biography=user.biography, gender=user.gender, organization=user.organization, ), ) ) <NEW_LINE> <DEDENT> return data | Retrieve users. | 625941b4cb5e8a47e48b7878 |
def forward(self, x): <NEW_LINE> <INDENT> x = torch.sigmoid(self.input(x)) <NEW_LINE> x = torch.sigmoid(self.hidden(x)) <NEW_LINE> x = torch.sigmoid(self.output(x)) <NEW_LINE> return x | Apply a forward pass of the network. | 625941b4fff4ab517eb2f200 |
@password_app.route('/changed_successful') <NEW_LINE> def password_successful_changed(): <NEW_LINE> <INDENT> return render_template('password/password_changed.html') | Returns a static template informing the user about the password change | 625941b48a43f66fc4b53e31 |
def detect_prefix_arch(directory=None): <NEW_LINE> <INDENT> if not directory: <NEW_LINE> <INDENT> directory = os.path.expanduser("~/.wine") <NEW_LINE> <DEDENT> registry_path = os.path.join(directory, 'system.reg') <NEW_LINE> if not os.path.isdir(directory) or not os.path.isfile(registry_path): <NEW_LINE> <INDENT> logger.debug("No prefix found in %s, defaulting to 32bit", directory) <NEW_LINE> return 'win32' <NEW_LINE> <DEDENT> with open(registry_path, 'r') as registry: <NEW_LINE> <INDENT> for i in range(5): <NEW_LINE> <INDENT> line = registry.readline() <NEW_LINE> if 'win64' in line: <NEW_LINE> <INDENT> logger.debug("Detected 64bit prefix in %s", directory) <NEW_LINE> return 'win64' <NEW_LINE> <DEDENT> elif 'win32' in line: <NEW_LINE> <INDENT> logger.debug("Detected 32bit prefix in %s", directory) <NEW_LINE> return 'win32' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> logger.debug("Can't detect prefix arch for %s, defaulting to 32bit", directory) <NEW_LINE> return 'win32' | Given a wineprefix directory, return its architecture | 625941b47b25080760e39223 |
def validate(self): <NEW_LINE> <INDENT> if not isinstance(self._data, dict): <NEW_LINE> <INDENT> raise TypeError('freight forwarder configuration file must be a dict.') <NEW_LINE> <DEDENT> current_log_level = logger.get_level() <NEW_LINE> if self._verbose: <NEW_LINE> <INDENT> logger.set_level('DEBUG') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.set_level('ERROR') <NEW_LINE> <DEDENT> logger.info('Starting configuration validation', extra={"formatter": 'config-start'}) <NEW_LINE> config_data = self._data.copy() <NEW_LINE> try: <NEW_LINE> <INDENT> self._walk_tree(config_data, ROOT_SCHEME) <NEW_LINE> <DEDENT> except ConfigValidationException as e: <NEW_LINE> <INDENT> e.log_error() <NEW_LINE> raise <NEW_LINE> <DEDENT> logger.info("Config validation passed.", extra={'formatter': 'config-success'}) <NEW_LINE> logger.set_level(current_log_level) | Validate the contents of the configuration file. Will return None if validation is successful or
raise an error if not. | 625941b47b180e01f3dc45cf |
def get_adaptive_likelihood(point, error, errorType, dt, system, integration, *params, **kwargs): <NEW_LINE> <INDENT> if "Time" not in errorType: <NEW_LINE> <INDENT> return point, error <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> timeError = [0 if errorType[i] != "Time" else error[i] for i in range(len(error))] <NEW_LINE> <DEDENT> if "amount" in kwargs: <NEW_LINE> <INDENT> amount = kwargs["amount"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> amount = 500 <NEW_LINE> <DEDENT> if len(params) == 0: <NEW_LINE> <INDENT> params = None <NEW_LINE> <DEDENT> if "ensemble" not in kwargs: <NEW_LINE> <INDENT> pseudoObservations = [MiscFunctions.generate_typed_error(point, error, errorType, dt, system, integration, params) for i in range(amount)] <NEW_LINE> varLists = AnalysisOperations.get_var_lists_from_points(pseudoObservations) <NEW_LINE> return [np.mean(l) for l in varLists], [np.std(l, ddof=1) for l in varLists] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pseudoObservations = [MiscFunctions.generate_typed_error(point, timeError, errorType, dt, system, integration, params) for i in range(amount)] <NEW_LINE> varLists = AnalysisOperations.get_var_lists_from_points(pseudoObservations) <NEW_LINE> probabilities = [np.array([0.0 for j in range(len(kwargs["ensemble"]))]) for i in range(len(varLists))] <NEW_LINE> ensembleValues = np.array(AnalysisOperations.get_var_lists_from_points(kwargs["ensemble"])) <NEW_LINE> for var in range(len(ensembleValues)): <NEW_LINE> <INDENT> if errorType[var] != "Time": <NEW_LINE> <INDENT> for point in range(len(pseudoObservations)): <NEW_LINE> <INDENT> normal = np.array(mlab.normpdf(ensembleValues[var], varLists[var][point], error[var])) <NEW_LINE> probabilities[var] += normal <NEW_LINE> <DEDENT> probabilitySum = sum(probabilities[var]) <NEW_LINE> probabilities[var] /= probabilitySum <NEW_LINE> if probabilitySum == 0: <NEW_LINE> <INDENT> probabilities[var] = np.array([1/len(probabilities[var]) for i in probabilities[var]]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for i in probabilities: <NEW_LINE> <INDENT> for j in i: <NEW_LINE> <INDENT> if j < 0: <NEW_LINE> <INDENT> print("In get_adaptive_likelihood, i=", j) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return np.array(probabilities) | Prototype method to get observation likelihood based on time error as well as state error.
Kwargs:
ensemble: If present, returns probability distribution as a discrete list of probabilities instead of a normal.
amount: If present, dictates the amount of random ramples used to get discrete distribution. Default is 500. | 625941b471ff763f4b549455 |
def send_a_thankyou(): <NEW_LINE> <INDENT> donor_names = donor_data.keys() <NEW_LINE> while True: <NEW_LINE> <INDENT> choice = str(input("Please enter donor name (enter \"list\" to show list of donor names, enter \"q\" to quit)")) <NEW_LINE> if choice == "q": <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> elif choice == "list": <NEW_LINE> <INDENT> print("List of donor names") <NEW_LINE> print(("{}\n" * len(donor_data)).format(*donor_data.keys())) <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(choice) == 0: <NEW_LINE> <INDENT> print("name can not be empty") <NEW_LINE> continue <NEW_LINE> <DEDENT> elif choice in donor_names: <NEW_LINE> <INDENT> print("Donor already in the donor names list.. using existing donor name") <NEW_LINE> <DEDENT> elif len(choice) > 0: <NEW_LINE> <INDENT> donor_data[choice] = [] <NEW_LINE> <DEDENT> <DEDENT> break <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> amount = input("Please enter donation amount") <NEW_LINE> if float(amount) <= 0: <NEW_LINE> <INDENT> print("amount donated must be a +ve number") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print("Enter positive number") <NEW_LINE> <DEDENT> <DEDENT> donor_data[choice].append(float(amount)) <NEW_LINE> print(generate_letter(choice)) | Sends thank you message for the donors
| 625941b47cff6e4e8111774e |
def _procure_orderpoint_confirm(self, cr, uid, use_new_cursor=False, company_id = False, context=None): <NEW_LINE> <INDENT> if context is None: <NEW_LINE> <INDENT> context = {} <NEW_LINE> <DEDENT> if use_new_cursor: <NEW_LINE> <INDENT> cr = openerp.registry(cr.dbname).cursor() <NEW_LINE> <DEDENT> orderpoint_obj = self.pool.get('stock.warehouse.orderpoint') <NEW_LINE> procurement_obj = self.pool.get('procurement.order') <NEW_LINE> offset = 0 <NEW_LINE> ids = [1] <NEW_LINE> dom = company_id and [('company_id', '=', company_id)] or [] <NEW_LINE> while ids: <NEW_LINE> <INDENT> ids = orderpoint_obj.search(cr, uid, dom, offset=offset, limit=100) <NEW_LINE> for op in orderpoint_obj.browse(cr, uid, ids, context=context): <NEW_LINE> <INDENT> prods = self._product_virtual_get(cr, uid, op) <NEW_LINE> if prods is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if prods < op.product_min_qty: <NEW_LINE> <INDENT> qty = max(op.product_min_qty, op.product_max_qty) - prods <NEW_LINE> reste = qty % op.qty_multiple <NEW_LINE> if reste > 0: <NEW_LINE> <INDENT> qty += op.qty_multiple - reste <NEW_LINE> <DEDENT> if qty <= 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> qty -= orderpoint_obj.subtract_procurements(cr, uid, op, context=context) <NEW_LINE> if qty > 0: <NEW_LINE> <INDENT> proc_id = procurement_obj.create(cr, uid, self._prepare_orderpoint_procurement(cr, uid, op, qty, context=context), context=context) <NEW_LINE> self.check(cr, uid, [proc_id]) <NEW_LINE> self.run(cr, uid, [proc_id]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> offset += len(ids) <NEW_LINE> if use_new_cursor: <NEW_LINE> <INDENT> cr.commit() <NEW_LINE> <DEDENT> <DEDENT> if use_new_cursor: <NEW_LINE> <INDENT> cr.commit() <NEW_LINE> cr.close() <NEW_LINE> <DEDENT> return {} | Create procurement based on Orderpoint
:param bool use_new_cursor: if set, use a dedicated cursor and auto-commit after processing each procurement.
This is appropriate for batch jobs only. | 625941b4e8904600ed9f1cf1 |
def jwk_load(self, aname): <NEW_LINE> <INDENT> self.logger.debug('DBStore.jwk_load({0})'.format(aname)) <NEW_LINE> account_list = self._account_search('name', aname) <NEW_LINE> jwk_dict = {} <NEW_LINE> if account_list: <NEW_LINE> <INDENT> jwk_dict = json.loads(account_list[3]) <NEW_LINE> jwk_dict['alg'] = account_list[2] <NEW_LINE> <DEDENT> self.logger.debug('DBStore.jwk_load() ended with: {0}'.format(jwk_dict)) <NEW_LINE> return jwk_dict | looad account informatino and build jwk key dictionary | 625941b4a05bb46b383ec5f5 |
def test_known_user(self): <NEW_LINE> <INDENT> num_users = User.objects.count() <NEW_LINE> self.headers[self.header] = self.known_user.username <NEW_LINE> response = self.client.get('/remote_user/', **self.headers) <NEW_LINE> self.assertEqual(response.context['user'].username, 'knownuser') <NEW_LINE> self.assertEqual(User.objects.count(), num_users) <NEW_LINE> self.headers[self.header] = self.known_user2.username <NEW_LINE> response = self.client.get('/remote_user/', **self.headers) <NEW_LINE> self.assertEqual(response.context['user'].username, 'knownuser2') <NEW_LINE> self.assertEqual(User.objects.count(), num_users) | Tests the case where the username passed in the header is a valid User. | 625941b4097d151d1a222c2b |
def second_phase(players): <NEW_LINE> <INDENT> player = next(p for p in players if len(p.prefs) > 1) <NEW_LINE> while True: <NEW_LINE> <INDENT> cycle = locate_all_or_nothing_cycle(player) <NEW_LINE> pairs = get_pairs_to_delete(cycle) <NEW_LINE> for player, other in pairs: <NEW_LINE> <INDENT> _delete_pair(player, other) <NEW_LINE> <DEDENT> if any(p.prefs == [] for p in players): <NEW_LINE> <INDENT> warnings.warn( NoStableMatchingWarning( "The following players have emptied their preference list: " f"{[p for p in players if not p.prefs]}" ) ) <NEW_LINE> break <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> player = next(p for p in players if len(p.prefs) > 1) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> for player in players: <NEW_LINE> <INDENT> player._unmatch() <NEW_LINE> if player.prefs: <NEW_LINE> <INDENT> player._match(player.get_favourite()) <NEW_LINE> <DEDENT> <DEDENT> return players | Conduct the second phase of the algorithm where all-or-nothing cycles
(rotations) are located and removed from the game. | 625941b40383005118ecf3ac |
def test_logged_in_user_gets_200_status(self): <NEW_LINE> <INDENT> self.client.force_login(self.user_1) <NEW_LINE> response = self.client.get(reverse_lazy('library')) <NEW_LINE> self.assertTrue(response.status_code == 200) | Logged in user gets 200 status on library get. | 625941b4b5575c28eb68ddc4 |
def read_xyzfile2(outfile): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> A = open(outfile, "r").readlines()[2:] <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> sys.exit("File %s not found." % outfile) <NEW_LINE> <DEDENT> A = open(outfile, "r").readlines()[2:] <NEW_LINE> A = np.array([line.split() for line in A]).astype(float) <NEW_LINE> names, xyz = A[:, 0].astype(int), A[:, 1:4] <NEW_LINE> return names, xyz | Read one xyz outfile into a numpy matrix.
Return vector of names and (n, 3) xyz matrix. | 625941b46e29344779a623de |
def mergeTrees(self, t1, t2): <NEW_LINE> <INDENT> if not t1 and not t2: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> val = 0 <NEW_LINE> val += 0 if not t1 else t1.val <NEW_LINE> val += 0 if not t2 else t2.val <NEW_LINE> node = TreeNode(val) <NEW_LINE> node.left = self.mergeTrees(None if not t1 else t1.left, None if not t2 else t2.left) <NEW_LINE> node.right = self.mergeTrees(None if not t1 else t1.right, None if not t2 else t2.right) <NEW_LINE> return node | :type t1: TreeNode
:type t2: TreeNode
:rtype: TreeNode | 625941b426238365f5f0ec30 |
def skel_load(skeleton, inventory): <NEW_LINE> <INDENT> for key, value in skeleton.items(): <NEW_LINE> <INDENT> _parse_belongs_to( key, belongs_to=value['belongs_to'], inventory=inventory ) | Build out data as provided from the defined `skel` dictionary.
:param skeleton: ``dict`` Dictionary defining group and component
memberships for the inventory.
:param inventory: ``dict`` Living dictionary of inventory | 625941b48e05c05ec3eea138 |
def get_beat(y, PARAMETERS): <NEW_LINE> <INDENT> S_p = librosa.feature.melspectrogram(y=y, sr=PARAMETERS['load']['sr'], n_fft=PARAMETERS['stft']['n_fft'], hop_length=PARAMETERS['beat']['hop_length'], n_mels=PARAMETERS['mel']['n_mels'], fmax=PARAMETERS['mel']['fmax']) <NEW_LINE> S_p = librosa.logamplitude(S_p, ref_power=S_p.max()) <NEW_LINE> odf = librosa.onset.onset_strength(S=S_p, aggregate=np.median) <NEW_LINE> tempo, beats = librosa.beat.beat_track(onset_envelope=odf, sr=PARAMETERS['load']['sr'], hop_length=PARAMETERS['beat']['hop_length']) <NEW_LINE> beat_times = librosa.frames_to_time(beats, sr=PARAMETERS['load']['sr'], hop_length=PARAMETERS['beat']['hop_length']) <NEW_LINE> return tempo, beat_times, odf | Estimate beat times and tempo | 625941b4dd821e528d63af73 |
def __init__(self, config:Config): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.time_hrz_len = config.time_hrz_len <NEW_LINE> self.unit_table = {} <NEW_LINE> self.n_supps = 0 <NEW_LINE> self.n_stores = 0 <NEW_LINE> self.constrs = [] <NEW_LINE> self.config_parse() <NEW_LINE> self.P = cp.Variable(integer=True) | Attrs:
| config: the config object
| time_hrz_len: the length of time horizon
| unit_table: the dict used for unit lookup
| n_supps: the # of supplier units
| n_stores: the # of store units
| constrs: all the constraints used to solve the model
| P: the var representing the overall profit
the target var that we would like to maximize | 625941b460cbc95b062c6311 |
def load_rokdoc_well_markers(infile): <NEW_LINE> <INDENT> with open(infile, 'r') as fd: <NEW_LINE> <INDENT> buf = fd.readlines() <NEW_LINE> <DEDENT> marker = [] <NEW_LINE> well = [] <NEW_LINE> md = [] <NEW_LINE> tvdkb = [] <NEW_LINE> twt = [] <NEW_LINE> tvdss = [] <NEW_LINE> x = [] <NEW_LINE> y = [] <NEW_LINE> for line in buf[5:]: <NEW_LINE> <INDENT> c1, c2, c3, c4, c5 = line.split("'") <NEW_LINE> c6, c7, c8, c9, c10, c11 = c5.strip().split() <NEW_LINE> marker.append(c2) <NEW_LINE> well.append(c4) <NEW_LINE> md.append(float(c6)) <NEW_LINE> tvdkb.append(float(c7)) <NEW_LINE> twt.append(float(c8)) <NEW_LINE> tvdss.append(float(c9)) <NEW_LINE> x.append(float(c10)) <NEW_LINE> y.append(float(c11)) <NEW_LINE> <DEDENT> markers = {} <NEW_LINE> for each in list(set(well)): <NEW_LINE> <INDENT> markers[each] = {} <NEW_LINE> <DEDENT> for i in range(len(marker)): <NEW_LINE> <INDENT> cur_well = well[i] <NEW_LINE> cur_marker = marker[i] <NEW_LINE> cur_md = md[i] <NEW_LINE> cur_tvdkb = tvdkb[i] <NEW_LINE> cur_tvdss = tvdss[i] <NEW_LINE> cur_twt = twt[i] <NEW_LINE> cur_x = x[i] <NEW_LINE> cur_y = y[i] <NEW_LINE> markers[cur_well][cur_marker] = {'md': cur_md, 'tvdkb': cur_tvdkb, 'tvdss': cur_tvdss, 'twt': cur_twt, 'x': cur_x, 'y': cur_y} <NEW_LINE> <DEDENT> return markers | Function to load well markers exported from RokDoc in ASCII format. | 625941b430bbd722463cbb8a |
def evaluate_OR_with_OR(or_operation_1, or_operation_2): <NEW_LINE> <INDENT> result = deque() <NEW_LINE> for or_operation in [or_operation_1, or_operation_2]: <NEW_LINE> <INDENT> for or_term in or_operation: <NEW_LINE> <INDENT> if or_term.allow: <NEW_LINE> <INDENT> result.appendleft(or_term) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append(or_term) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result | The result is populated with all the terms. Allowed terms are put at
the start. | 625941b44527f215b584c224 |
def get_next_assessment_part(self): <NEW_LINE> <INDENT> return | Gets the next ``AssessmentPart`` in this list.
:return: the next ``AssessmentPart`` in this list. The ``has_next()`` method should be used to test that a next ``AssessmentPart`` is available before calling this method.
:rtype: ``osid.assessment.authoring.AssessmentPart``
:raise: ``IllegalState`` -- no more elements available in this list
:raise: ``OperationFailed`` -- unable to complete request
*compliance: mandatory -- This method must be implemented.* | 625941b42eb69b55b151c671 |
def __init__(self, epoch_iters, cfg): <NEW_LINE> <INDENT> self._cfg = cfg <NEW_LINE> self.epoch_iters = epoch_iters <NEW_LINE> self.MAX_EPOCH = cfg.SOLVER.MAX_EPOCH * epoch_iters <NEW_LINE> self.iter_timer = Timer() <NEW_LINE> self.data_timer = Timer() <NEW_LINE> self.net_timer = Timer() <NEW_LINE> self.loss = ScalarMeter(cfg.LOG_PERIOD) <NEW_LINE> self.loss_total = 0.0 <NEW_LINE> self.lr = None <NEW_LINE> self.mb_top1_err = ScalarMeter(cfg.LOG_PERIOD) <NEW_LINE> self.mb_top5_err = ScalarMeter(cfg.LOG_PERIOD) <NEW_LINE> self.num_top1_mis = 0 <NEW_LINE> self.num_top5_mis = 0 <NEW_LINE> self.num_samples = 0 <NEW_LINE> self.output_dir = cfg.OUTPUT_DIR | Args:
epoch_iters (int): the overall number of iterations of one epoch.
cfg (CfgNode): configs. | 625941b445492302aab5e087 |
def _exchange_energy(self, coupling): <NEW_LINE> <INDENT> exchange_energy = 0 <NEW_LINE> for i in range(0, self._rows): <NEW_LINE> <INDENT> for j in range(0, self._columns): <NEW_LINE> <INDENT> interaction = self._lattice[i-1, j] + self._lattice[(i+1) % self._rows, j] + self._lattice[i, j-1] + self._lattice[i, (j+1) % self._columns] <NEW_LINE> exchange_energy += self._lattice[i, j]*interaction <NEW_LINE> <DEDENT> <DEDENT> return -0.5*coupling*exchange_energy | Return exchange energy of the lattice | 625941b40a50d4780f666c57 |
def _select_compcor(compcor_cols, n_compcor): <NEW_LINE> <INDENT> if (n_compcor != "auto") and (n_compcor < len(compcor_cols)): <NEW_LINE> <INDENT> compcor_cols = compcor_cols[0:n_compcor] <NEW_LINE> <DEDENT> return compcor_cols | Retain a specified number of compcor components. | 625941b4656771135c3eb63a |
def create_directory(self, base_dir, new_dir): <NEW_LINE> <INDENT> if not os.path.exists(base_dir): <NEW_LINE> <INDENT> os.makedirs(base_dir) <NEW_LINE> <DEDENT> os.chdir(base_dir) <NEW_LINE> if not os.path.exists(new_dir): <NEW_LINE> <INDENT> os.makedirs(new_dir) | Check if 'sorting' directories exist in the specified location. If
not, create the directories. | 625941b421a7993f00bc7ab0 |
def insert(L, b): <NEW_LINE> <INDENT> i = b <NEW_LINE> while i != 0 and L[i - 1] >= L[b]: <NEW_LINE> <INDENT> i = i - 1 <NEW_LINE> <DEDENT> value = L[b] <NEW_LINE> del L[b] <NEW_LINE> L.insert(i, value) | Insert L[b] where it belongs in L[0:b + 1];
L[0:b - 1] must already be sorted. | 625941b46fece00bbac2d502 |
def RL(self): <NEW_LINE> <INDENT> return self.link | Returns the RoboDK link Robolink() (old version). | 625941b4f548e778e58cd343 |
def get_string(self): <NEW_LINE> <INDENT> string = bytearray() <NEW_LINE> c = self.__file.read(1) <NEW_LINE> while c not in (b'\x00', ''): <NEW_LINE> <INDENT> string += c <NEW_LINE> c = self.__file.read(1) <NEW_LINE> <DEDENT> return string.decode('utf-8', errors='replace') | Read a null-terminated string from macho. | 625941b48c3a873295158186 |
def get_password(self, username): <NEW_LINE> <INDENT> if username not in self._users: <NEW_LINE> <INDENT> raise LookupError("No such user: %s" % username) <NEW_LINE> <DEDENT> return self._users[username] | Returns password hash for specified username.
Callers must check for LookupError, which is raised in
the case of a non-existent user specified. | 625941b445492302aab5e088 |
def network_device_name(): <NEW_LINE> <INDENT> net_list = virsh.nodedev_list(tree='', cap='net') <NEW_LINE> net_lists = net_list.stdout.strip().splitlines() <NEW_LINE> device_check = False <NEW_LINE> route_cmd = " route | grep default" <NEW_LINE> route_default = process.run(route_cmd, shell=True).stdout_text.strip().split(' ') <NEW_LINE> ip_default = route_default[-1] <NEW_LINE> for net_device_name in net_lists: <NEW_LINE> <INDENT> if net_device_name.find(ip_default) == -1: <NEW_LINE> <INDENT> net_device_address = nodedev_xml.NodedevXML.new_from_dumpxml(net_device_name).parent <NEW_LINE> if 'pci' in net_device_address: <NEW_LINE> <INDENT> device_check = True <NEW_LINE> return net_device_name <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not device_check: <NEW_LINE> <INDENT> test.cancel('Param device_address is not configured.') | Get the address of network pci device | 625941b4e64d504609d74608 |
def do_logout(self, session, line): <NEW_LINE> <INDENT> raise Exception | 响应logout命令
:param session:
:param line:
:return: | 625941b491af0d3eaac9b7db |
def by_trial(self,manual=False,split=[],ythres=-1000,end=True): <NEW_LINE> <INDENT> if not manual: <NEW_LINE> <INDENT> ypos0 = np.where(self.data['ypos'] < ythres)[0] <NEW_LINE> split = ypos0[np.where(np.diff(ypos0) > 1)[0]] <NEW_LINE> <DEDENT> trials = np.split(self.data,split) <NEW_LINE> trials = [ t[2:-10] for t in trials if len(t) > 100 ] <NEW_LINE> if end: <NEW_LINE> <INDENT> trials = trials[:-1] <NEW_LINE> <DEDENT> shape = [] <NEW_LINE> contour = [] <NEW_LINE> grating = [] <NEW_LINE> loom = [] <NEW_LINE> for t in trials: <NEW_LINE> <INDENT> if not (t['shape'] == t['shape'][0]).all(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif b'contour' in t['shape'][0]: <NEW_LINE> <INDENT> contour.append(ContourTrial(t['t'], t['orientation'], t['xpos'], t['ypos'])) <NEW_LINE> <DEDENT> elif b'grating' in t['shape'][0]: <NEW_LINE> <INDENT> grating.append(GratingTrial(t['t'], t['orientation'], t['xpos'])) <NEW_LINE> <DEDENT> elif b'loom' in t['shape'][0]: <NEW_LINE> <INDENT> loom.append(LoomTrial(t['t'], t['orientation'], t['xpos'], t['ypos'])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shape.append(ShapeTrial(t['t'], t['shape'], t['orientation'], t['xpos'])) <NEW_LINE> <DEDENT> <DEDENT> return shape, contour, grating, loom | Splits trials based on changes in ypos | 625941b430dc7b7665901733 |
def get_short_name(self): <NEW_LINE> <INDENT> return self.email | . | 625941b463d6d428bbe442bf |
def __init__(self, lookup_path): <NEW_LINE> <INDENT> self.lookup_path = lookup_path | Initialize LookupTable object.
:param lookup_path: name of the lookup table without
full path, eg. "se-fornmin (sv)/types" | 625941b4be7bc26dc91cd3cf |
def deserialize(self, str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.plant_states is None: <NEW_LINE> <INDENT> self.plant_states = None <NEW_LINE> <DEDENT> end = 0 <NEW_LINE> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> self.plant_states = [] <NEW_LINE> for i in range(0, length): <NEW_LINE> <INDENT> val1 = prx_simulation.msg.state_msg() <NEW_LINE> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> pattern = '<%sd'%length <NEW_LINE> start = end <NEW_LINE> end += struct.calcsize(pattern) <NEW_LINE> val1.elements = struct.unpack(pattern, str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += length <NEW_LINE> if python3: <NEW_LINE> <INDENT> val1.node_name = str[start:end].decode('utf-8') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val1.node_name = str[start:end] <NEW_LINE> <DEDENT> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += length <NEW_LINE> if python3: <NEW_LINE> <INDENT> val1.consumer_name = str[start:end].decode('utf-8') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val1.consumer_name = str[start:end] <NEW_LINE> <DEDENT> start = end <NEW_LINE> end += 8 <NEW_LINE> (val1.timestamp,) = _struct_d.unpack(str[start:end]) <NEW_LINE> self.plant_states.append(val1) <NEW_LINE> <DEDENT> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> self.plant_paths = [] <NEW_LINE> for i in range(0, length): <NEW_LINE> <INDENT> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += length <NEW_LINE> if python3: <NEW_LINE> <INDENT> val1 = str[start:end].decode('utf-8') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val1 = str[start:end] <NEW_LINE> <DEDENT> self.plant_paths.append(val1) <NEW_LINE> <DEDENT> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += length <NEW_LINE> if python3: <NEW_LINE> <INDENT> self.node_name = str[start:end].decode('utf-8') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.node_name = str[start:end] <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> except struct.error as e: <NEW_LINE> <INDENT> raise genpy.DeserializationError(e) | unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str`` | 625941b41b99ca400220a878 |
def get_datapath_name(self, dpid): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.dpid_dict[dpid] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return dpid | Get the switch name using dpid
Args:
dpid: datapath_id
Returns:
name: datapath name | 625941b42c8b7c6e89b35593 |
def peek(self): <NEW_LINE> <INDENT> return self.item[len(self.item) - 1] | Get the front element.
:rtype: int | 625941b4d164cc6175782b16 |
def __init__(self, times, values, name=None, description=None, file_origin=None, **annotations): <NEW_LINE> <INDENT> BaseNeo.__init__(self, name=name, file_origin=file_origin, description=description, **annotations) <NEW_LINE> self.times = times <NEW_LINE> self.values = values <NEW_LINE> self.segment = None <NEW_LINE> self.recordingchannel = None | Initalize a new IrregularlySampledSignal. | 625941b43617ad0b5ed67cc7 |
def predict_from_off_shelf_model(model, messages): <NEW_LINE> <INDENT> sid_predictions = [] <NEW_LINE> correct_predictions = 0 <NEW_LINE> for index, message in enumerate(messages): <NEW_LINE> <INDENT> final_pred = None <NEW_LINE> ss = model.polarity_scores(message) <NEW_LINE> if ss['neg'] > mixed_threshold and ss['pos'] > mixed_threshold: <NEW_LINE> <INDENT> final_pred = 3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> top_score = ss['neg'] <NEW_LINE> final_pred = 1 <NEW_LINE> if ss['neu'] > top_score: <NEW_LINE> <INDENT> top_score = ss['neu'] <NEW_LINE> final_pred = 2 <NEW_LINE> <DEDENT> if ss['pos'] > top_score: <NEW_LINE> <INDENT> top_score = ss['pos'] <NEW_LINE> final_pred = 4 <NEW_LINE> <DEDENT> <DEDENT> sid_predictions.append(final_pred) <NEW_LINE> <DEDENT> return np.asarray(sid_predictions) | Use a NLTK Sentiment Intensity Analyzer to compute predictions for a list of messages.
This function should be able to predict on the models that SentimentIntensityAnalyzer()
outputs.
Args:
model: A trained model from SentimentIntensityAnalyzer()
messages: A numpy array where each index is a list containing exactly one message in bytes form.
Returns: The trained model | 625941b4046cf37aa974cb13 |
def showStats2(request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> restingTime = workout.training_stats.get('restTime', 0) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> restingTime = 0 <NEW_LINE> <DEDENT> if workout.isFinished: <NEW_LINE> <INDENT> return render(request, 'stat2.html', { 'restingTime': str(int(restingTime)) + ' s'}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render(request, 'blankMoves.html') | Updates current live stats while the workout is going. | 625941b4925a0f43d2549c3b |
def just_hget(namespace, key): <NEW_LINE> <INDENT> if not db_client: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not namespace: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not key: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> value = db_client.hget(namespace, key) <NEW_LINE> return value | :param namespace:
:param key:
:return: None 或 原始的字符串 | 625941b4ec188e330fd5a571 |
def _read_error_handler(self, exc, blocksize): <NEW_LINE> <INDENT> self.last_exception = exc <NEW_LINE> return None | Log error when reading from file | 625941b48a349b6b435e7f3e |
def set_AWSSecretKeyId(self, value): <NEW_LINE> <INDENT> super(DescribeVolumesInputSet, self)._set_input('AWSSecretKeyId', value) | Set the value of the AWSSecretKeyId input for this Choreo. ((required, string) The Secret Key ID provided by Amazon Web Services.) | 625941b466656f66f7cbbf73 |
def mylocale(): <NEW_LINE> <INDENT> import gettext <NEW_LINE> try: <NEW_LINE> <INDENT> mylanguage = "%s" % locale.getdefaultlocale()[0][0:2] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> mylanguage = "en" <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> mylocale = gettext.translation('bakertool', localedir='locale', languages=[mylanguage]) <NEW_LINE> mylocale.install() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> mylocale = gettext.translation('bakertool', localedir='locale', languages=["en"]) <NEW_LINE> mylocale.install() | Disable in the original gui module the below changes:
#import gettext
#_ = gettext.lgettext
Include in the original main module the below changes:
import mylocale
mylocale.mylocale() | 625941b421bff66bcd68471e |
def pig_latin(phrase): <NEW_LINE> <INDENT> vowels = {'a', 'e', 'i', 'o', 'u'} <NEW_LINE> words = phrase.split() <NEW_LINE> for i, word in enumerate(words): <NEW_LINE> <INDENT> if word[0].lower() in vowels: <NEW_LINE> <INDENT> new_word = word + "yay" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_word = word[1:] + word[0] + "ay" <NEW_LINE> <DEDENT> words[i] = new_word <NEW_LINE> <DEDENT> return " ".join(words) | Turn a phrase into pig latin.
There will be no uppercase letters or punctuation in the phrase.
>>> pig_latin('hello awesome programmer')
'ellohay awesomeyay rogrammerpay' | 625941b467a9b606de4a7c86 |
def __init__(self, shape=(1, 1, 1)): <NEW_LINE> <INDENT> self.cells = np.empty(shape, dtype=list) <NEW_LINE> self.shape = shape <NEW_LINE> self.dim = len(shape) <NEW_LINE> self.clean() <NEW_LINE> indices = tuple(range(a) for a in shape) <NEW_LINE> index_splitted = np.meshgrid(*indices) <NEW_LINE> self.index_list = np.vstack([i.ravel() for i in index_splitted]).T | Células para preenchimento de objetos
Args:
shape (tuple, optional): formato do bloco
de células. Defaults to (1, 1, 1). | 625941b4046cf37aa974cb14 |
def populate_attribute_names(): <NEW_LINE> <INDENT> with session_scope() as session: <NEW_LINE> <INDENT> attributes = session.query(Attribute).filter(Attribute.name == '').all() <NEW_LINE> for attribute in attributes: <NEW_LINE> <INDENT> attribute.name = ATTRIBUTE_ID.get(attribute.attribute_id, '') <NEW_LINE> if not attribute.name: <NEW_LINE> <INDENT> logging.warning( 'Attribute({}) still has no name.'.format(attribute)) | Get Attribute() objects with no name and attempts to populate the name.
This might run with cron although the name are hardcoded. | 625941b416aa5153ce362241 |
def __emit_material(self, material, scene): <NEW_LINE> <INDENT> asr_mat = material.appleseed <NEW_LINE> asr_node_tree = asr_mat.osl_node_tree <NEW_LINE> use_nodes = self.__is_node_material(asr_mat, asr_node_tree) <NEW_LINE> material_node = None <NEW_LINE> node_list = None <NEW_LINE> front_material_name = "" <NEW_LINE> if use_nodes: <NEW_LINE> <INDENT> for node in asr_node_tree.nodes: <NEW_LINE> <INDENT> if node.node_type == 'osl_surface': <NEW_LINE> <INDENT> material_node = node <NEW_LINE> <DEDENT> <DEDENT> node_list = material_node.traverse_tree() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if asr_mat.bsdf_type in ['specular_btdf', 'diffuse_btdf']: <NEW_LINE> <INDENT> front_material_name = material.name + "_front" <NEW_LINE> back_material_name = material.name + "_back" <NEW_LINE> self.__emit_front_material(material, front_material_name, scene) <NEW_LINE> self.__emit_back_material(material, back_material_name, scene) <NEW_LINE> <DEDENT> <DEDENT> if front_material_name == "": <NEW_LINE> <INDENT> front_material_name = material.name <NEW_LINE> if use_nodes: <NEW_LINE> <INDENT> surface_name = front_material_name + "_surface" <NEW_LINE> self.__emit_osl_material(material, front_material_name, surface_name, scene, material_node, node_list) <NEW_LINE> back_material_name = front_material_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__emit_front_material(material, front_material_name, scene) <NEW_LINE> if self.__is_light_emitting_material(asr_mat, scene): <NEW_LINE> <INDENT> back_material_name = "__default_material" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> back_material_name = front_material_name <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return front_material_name, back_material_name | Write the material. | 625941b421bff66bcd68471f |
def __init__(self, path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.alignment_segment = self.fetch().__next__() <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> raise OSError("Check SAM cigar/ MDZ field formatting. Truncated File error") <NEW_LINE> <DEDENT> self.reference_path = None <NEW_LINE> self.seq_alignment = None <NEW_LINE> self.initialized = False | Create wrapper for pysam to deal with single alignment sam files
:param path: path to temp single alignment sam file | 625941b4f548e778e58cd344 |
def touch(self, path): <NEW_LINE> <INDENT> path = self._absnorm(path) <NEW_LINE> if os.path.isdir(path): <NEW_LINE> <INDENT> self._error("Cannot touch '%s' because it is a directory." % path) <NEW_LINE> <DEDENT> if not os.path.exists(os.path.dirname(path)): <NEW_LINE> <INDENT> self._error("Cannot touch '%s' because its parent directory does " "not exist." % path) <NEW_LINE> <DEDENT> if os.path.exists(path): <NEW_LINE> <INDENT> mtime = round(time.time()) <NEW_LINE> os.utime(path, (mtime, mtime)) <NEW_LINE> self._link("Touched existing file '%s'.", path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> open(path, 'w').close() <NEW_LINE> self._link("Touched new file '%s'.", path) | Emulates the UNIX touch command.
Creates a file, if it does not exist. Otherwise changes its access and
modification times to the current time.
Fails if used with the directories or the parent directory of the given
file does not exist. | 625941b4d58c6744b4257a29 |
def atobplot(xa, ya, xb, yb, color='k', linetype='arrow', showplot=1, hxfac=1, hyfac=1, **other): <NEW_LINE> <INDENT> n = len0(xa) <NEW_LINE> nb = len0(xb) <NEW_LINE> if not n and not nb: <NEW_LINE> <INDENT> n = 1 <NEW_LINE> xa = [xa] <NEW_LINE> ya = [ya] <NEW_LINE> xb = [xb] <NEW_LINE> yb = [yb] <NEW_LINE> <DEDENT> elif not n: <NEW_LINE> <INDENT> n = nb <NEW_LINE> xa = [xa] * n <NEW_LINE> ya = [ya] * n <NEW_LINE> <DEDENT> elif not nb: <NEW_LINE> <INDENT> xb = [xb] * n <NEW_LINE> yb = [yb] * n <NEW_LINE> <DEDENT> isint = isinteractive() <NEW_LINE> ioff() <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> plot([xa[i], xb[i]], [ya[i], yb[i]], color=color, **other) <NEW_LINE> if linetype == 'arrow': <NEW_LINE> <INDENT> dx = xb[i] - xa[i] <NEW_LINE> dy = yb[i] - ya[i] <NEW_LINE> hx = -dx + -dy <NEW_LINE> hy = dx + -dy <NEW_LINE> hx = 0.1 * hx <NEW_LINE> hy = 0.1 * hy <NEW_LINE> plot([xb[i], xb[i] + hx * hxfac], [yb[i], yb[i] + hy * hyfac], color=color, **other) <NEW_LINE> plot([xb[i], xb[i] - hy * hxfac], [yb[i], yb[i] + hx * hyfac], color=color, **other) <NEW_LINE> <DEDENT> if linetype == 'xo': <NEW_LINE> <INDENT> plot(xa, ya, 'x', mfc=color, mec=color, **other) <NEW_LINE> plot(xb, yb, 'o', mfc=color, **other) <NEW_LINE> <DEDENT> <DEDENT> if isint and showplot: <NEW_LINE> <INDENT> ion() <NEW_LINE> show() | DRAWS LINES FROM a TO b | 625941b497e22403b379cd61 |
def write_entry_points(): <NEW_LINE> <INDENT> script_dir = os.path.join(sys.exec_prefix, 'scripts') <NEW_LINE> for entry_point in iter_entry_points(EP_GROUP): <NEW_LINE> <INDENT> pkg_name = entry_point.dist.project_name <NEW_LINE> pkg_version = entry_point.dist.version <NEW_LINE> for name, text, mode in get_script_args( "%s==%s" % (pkg_name, pkg_version), entry_point.name): <NEW_LINE> <INDENT> write_script(script_dir, name, text, mode) | search for packages in the lib directory which specify excel
entry points, and install scripts for those entry points | 625941b4287bf620b61d383a |
def source_lines(self): <NEW_LINE> <INDENT> return [self.source_buffer.source_line(line) for line in range(self.line(), self.end().line() + 1)] | Returns the lines of source code containing the entirety of this range. | 625941b46fb2d068a760ee6a |
@cbook.deprecated("2.2") <NEW_LINE> def rms_flat(a): <NEW_LINE> <INDENT> return np.sqrt(np.mean(np.abs(a) ** 2)) | Return the root mean square of all the elements of *a*, flattened out. | 625941b4fbf16365ca6f5f8b |
def cloud_count_owncloud(cm_files, cloud_count=False): <NEW_LINE> <INDENT> for i, mask in enumerate(cm_files): <NEW_LINE> <INDENT> print(os.path.basename(mask)) <NEW_LINE> with rasterio.open(cm_files[i]) as scl: <NEW_LINE> <INDENT> _data = scl.read(1) <NEW_LINE> maxi = np.max(_data) <NEW_LINE> mini = np.min(_data) <NEW_LINE> if maxi > 1 or mini < 0: <NEW_LINE> <INDENT> print('warning, skipping due to invalid data{}'.format(uni)) <NEW_LINE> continue <NEW_LINE> <DEDENT> if cloud_count: <NEW_LINE> <INDENT> data = 1 - _data <NEW_LINE> <DEDENT> if not cloud_count: <NEW_LINE> <INDENT> data = _data <NEW_LINE> <DEDENT> print(np.min(data), np.max(data)) <NEW_LINE> <DEDENT> if i == 0: <NEW_LINE> <INDENT> meta = scl.meta.copy() <NEW_LINE> data_accum = copy.copy(data) <NEW_LINE> <DEDENT> print(np.min(data_accum), np.max(data_accum)) <NEW_LINE> data_accum += data <NEW_LINE> <DEDENT> return data_accum, meta | Counts pixels containing cloud and cloud shadow
Assuming 0 for cloud and 1 for none cloud pixels
Parameters
----------
cm_files : list
List containing sen2cor scl file pathes
cloud_count : bool
If True, number of occuring clouds is counted per pixel.
If False, number of quality data (NO cloud) is counted per pixel.
Returns
-------
numpy data array containing counts and rasterio meta
array, meta | 625941b4a4f1c619b28afe0d |
Subsets and Splits