code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def close(self): <NEW_LINE> <INDENT> return self._df["close"]
[float] 收盘价
625941b221bff66bcd6846fa
def longestPalindrome(self, s): <NEW_LINE> <INDENT> result = "" <NEW_LINE> if len(set(s))==1: <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> l = len(s) <NEW_LINE> for i in range(l): <NEW_LINE> <INDENT> temp = self.checkPal(s, i, l, 0, 0) <NEW_LINE> if len(temp) > len(result): <NEW_LINE> <INDENT> result = temp <NEW_LINE> <DEDENT> if i - 1 >= 0 and s[i-1] == s[i]: <NEW_LINE> <INDENT> temp = self.checkPal(s, i, l, 1, 0) <NEW_LINE> if len(temp) > len(result): <NEW_LINE> <INDENT> result = temp <NEW_LINE> <DEDENT> <DEDENT> if i + 1 < l and s[i+1] == s[i]: <NEW_LINE> <INDENT> temp = self.checkPal(s, i, l, 0, 1) <NEW_LINE> if len(temp) > len(result): <NEW_LINE> <INDENT> result = temp <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result
:type s: str :rtype: str
625941b291f36d47f21ac292
def get_principal_axis_3D(points): <NEW_LINE> <INDENT> coord= numpy.array(points, float) <NEW_LINE> center= numpy.mean(coord, 0) <NEW_LINE> coord= coord - center <NEW_LINE> inertia = numpy.dot(coord.transpose(), coord) <NEW_LINE> e_values, e_vectors = numpy.linalg.eig(inertia) <NEW_LINE> order = numpy.argsort(e_values) <NEW_LINE> eval3, eval2, eval1 = e_values[order] <NEW_LINE> axis3, axis2, axis1 = e_vectors[:, order].transpose() <NEW_LINE> scale_factor = 20 <NEW_LINE> v1= 3 * scale_factor * axis1 <NEW_LINE> v2= 2 * scale_factor * axis2 <NEW_LINE> v3= 1 * scale_factor * axis3 <NEW_LINE> return geom.Ref3d3d(geom.Pos3d(center[0],center[1],center[2]),geom.Vector3d(v1[0],v1[1],v1[2]),geom.Vector3d(v2[0],v2[1],v2[2]),geom.Vector3d(v3[0],v3[1],v3[2]))
Compute principal axes :param points: 3D point cloud.
625941b2cad5886f8bd26d7f
def handle_keyboard_interrupt(func): <NEW_LINE> <INDENT> @functools.wraps(func) <NEW_LINE> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = func(*args, **kwargs) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> logger.debug( "Running %s: Received KeyboardInterrupt, " "hence shutting down the process gracefully.", func.__qualname__, ) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> return wrapper
Decorator for handling keyboard interrupt. It is assumed that this decorator is used in subprocesses. (the debug message is formulated accordingly)
625941b23346ee7daa2b2b05
def game_action_to_str(action): <NEW_LINE> <INDENT> def convert(o): <NEW_LINE> <INDENT> if o is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif isinstance(o, bool): <NEW_LINE> <INDENT> return int(o) <NEW_LINE> <DEDENT> elif isinstance(o, Card): <NEW_LINE> <INDENT> return o.ident <NEW_LINE> <DEDENT> elif isinstance(o, basestring): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> s = _CONVERT_STRINGS[o] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return o <NEW_LINE> <DEDENT> <DEDENT> action_list = [action.action] + map(convert, action.args) <NEW_LINE> return json.dumps(action_list, separators=(',',':'))
Serialize a GameAction as a string.
625941b2fbf16365ca6f5f60
def __update_mouse(self): <NEW_LINE> <INDENT> if not self.__stats.running: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> x, y = ctypes.c_int(0), ctypes.c_int(0) <NEW_LINE> state = sdl2.mouse.SDL_GetMouseState(ctypes.byref(x), ctypes.byref(y)) <NEW_LINE> world_unit = 1 / min(self.__systems.window.size) <NEW_LINE> self.__stats.mouse_pos.x = x.value * world_unit <NEW_LINE> self.__stats.mouse_pos.y = y.value * world_unit <NEW_LINE> if state > 0 and self.__stats.mouse_down is None: <NEW_LINE> <INDENT> self.__stats.mouse_down = self.__stats.mouse_pos + 0.0 <NEW_LINE> <DEDENT> elif state == 0 and self.__stats.mouse_down is not None and self.__stats.mouse_up is None: <NEW_LINE> <INDENT> self.__stats.mouse_up = self.__stats.mouse_pos + 0.0 <NEW_LINE> <DEDENT> elif state == 0 and self.__stats.mouse_down is not None and self.__stats.mouse_up is not None: <NEW_LINE> <INDENT> self.__stats.mouse_down = None <NEW_LINE> self.__stats.mouse_up = None
Updates 'mouse_pos'.
625941b2507cdc57c6306a6f
def __init__(self, url): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36', }
初始化参数 :param url:需要爬取的公交路线的网址url
625941b2fb3f5b602dac3434
@app.route('/train', methods=['GET','POST']) <NEW_LINE> def trainAPI(): <NEW_LINE> <INDENT> if not request.json: <NEW_LINE> <INDENT> print("ERROR: No request data received") <NEW_LINE> return jsonify(False) <NEW_LINE> <DEDENT> test = False <NEW_LINE> if 'mode' in request.json and request.json['mode'] == 'test': <NEW_LINE> <INDENT> test = True <NEW_LINE> <DEDENT> print("... model training begin") <NEW_LINE> train(test=test) <NEW_LINE> print("... model training done") <NEW_LINE> return(jsonify(True))
train API
625941b226238365f5f0ec0a
def assign_bins_fix_width(self, lim, wid, num, include_max=False): <NEW_LINE> <INDENT> self.lim = lim <NEW_LINE> self.wid = self.get_bin_width(lim, wid, num) <NEW_LINE> self.mins = ut_array.arange_safe(self.lim, self.wid, include_max) <NEW_LINE> if include_max: <NEW_LINE> <INDENT> self.mids = self.mins[:-1] + 0.5 * self.wid <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.mins.size == 1 and np.isinf(self.mins): <NEW_LINE> <INDENT> self.mids = np.abs(self.mins) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.mids = self.mins + 0.5 * self.wid <NEW_LINE> <DEDENT> <DEDENT> self.num = self.mins.size <NEW_LINE> self.wids = np.zeros(self.num) + self.wid
Import bin limits, number of bins, bin width, whether to include limit maximum in bin minnima. If limit is infinite, set width to infinity. If number defined, set width to give that number of bins.
625941b25e10d32532c5eccd
def get_key_run(self, machine, day_index): <NEW_LINE> <INDENT> if self.machine_runs is None: <NEW_LINE> <INDENT> raise ArgumentError("report not initialized") <NEW_LINE> <DEDENT> if day_index >= self.num_prior_days_to_include: <NEW_LINE> <INDENT> raise ArgumentError("invalid day index") <NEW_LINE> <DEDENT> runs = self.machine_runs.get((machine.id, day_index)) <NEW_LINE> if runs is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return runs[0]
get_key_run(machine, day_index) -> Run or None Get the "key" run for the given machine and day index, or None if there are no runs for that machine and day. The key run is an arbitrarily selected run from all the available runs that reported for the reported run order, for that machine and day.
625941b2e8904600ed9f1cc6
def find_collection_usager(self, user_id: str): <NEW_LINE> <INDENT> filtre = { ConstantesGrosFichiers.DOCUMENT_FICHIER_UUID_DOC: user_id, } <NEW_LINE> collection = self.get_collection() <NEW_LINE> collection_usager = collection.find_one(filtre) <NEW_LINE> if collection_usager is None: <NEW_LINE> <INDENT> filtre = { ConstantesGrosFichiers.DOCUMENT_FICHIER_UUID_DOC: ConstantesGrosFichiers.LIBVAL_UUID_COLLECTION_USAGERS } <NEW_LINE> collection_usagers = collection.find_one(filtre) <NEW_LINE> if collection_usagers is not None: <NEW_LINE> <INDENT> uuid_collection_usagers = collection_usagers[ConstantesGrosFichiers.DOCUMENT_FICHIER_UUID_DOC] <NEW_LINE> raise CollectionAbsenteException(uuid_parent=uuid_collection_usagers) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise CollectionAbsenteException() <NEW_LINE> <DEDENT> <DEDENT> return collection_usager
Trouve la collection correspondant au user_id :param user_id: :return:
625941b2091ae35668666d04
def cancel(self, subscription): <NEW_LINE> <INDENT> customer_obj = self.conekta.Customer.find(subscription.customer.id) <NEW_LINE> customer_obj.subscription.cancel() <NEW_LINE> return True
Delete cancel
625941b215baa723493c3d0f
def clear(self): <NEW_LINE> <INDENT> for i in reversed(range(self.layout().count())): <NEW_LINE> <INDENT> self.layout().itemAt(i).widget().deleteLater()
Removes the widgets attached to the main window.
625941b2435de62698dfd9f2
def attach_physics_node(self, physics_node_name=DEFAULT_PHYSICS_NODE_NAME, physics_model=DEFAULT_PHYSICS_MODEL_NAME, node_name=DEFAULT_NODE_NAME, inline_parameters=None): <NEW_LINE> <INDENT> assert isinstance(node_name, NodeName) <NEW_LINE> physics_node = { 'body': physics_model, 'target': str(node_name) } <NEW_LINE> if inline_parameters is not None: <NEW_LINE> <INDENT> for k, v in inline_parameters.iteritems(): <NEW_LINE> <INDENT> physics_node[k] = v <NEW_LINE> <DEDENT> <DEDENT> self.asset['physicsnodes'][physics_node_name] = physics_node
Attach a physics node to the JSON representation.
625941b2462c4b4f79d1d46d
def test_boblight_client_get_lights(self, boblightd, tested_client): <NEW_LINE> <INDENT> tested_client.say_hello() <NEW_LINE> tested_client.get_lights() <NEW_LINE> assert sorted([light.name for light in tested_client.lights.values()]) == sorted(['right', 'left']) <NEW_LINE> assert tested_client.lights['left'].left == 0 <NEW_LINE> assert tested_client.lights['left'].right == 0.5 <NEW_LINE> assert tested_client.lights['right'].left == 0.5 <NEW_LINE> assert tested_client.lights['right'].right == 1
Check that client handles light configuration
625941b24a966d76dd550da8
def test_events(): <NEW_LINE> <INDENT> tempdir = _TempDir() <NEW_LINE> raw = read_raw_brainvision(vhdr_path, eog=eog, preload=True) <NEW_LINE> events = raw.get_brainvision_events() <NEW_LINE> assert_array_equal(events, [[487, 1, 253], [497, 1, 255], [1770, 1, 254], [1780, 1, 255], [3253, 1, 254], [3263, 1, 255], [4936, 1, 253], [4946, 1, 255], [6620, 1, 254], [6630, 1, 255]]) <NEW_LINE> mne_events = mne.find_events(raw, stim_channel='STI 014') <NEW_LINE> assert_array_equal(events[:, [0, 2]], mne_events[:, [0, 2]]) <NEW_LINE> index = events[:, 2] == 255 <NEW_LINE> events = events[index] <NEW_LINE> raw.set_brainvision_events(events) <NEW_LINE> mne_events = mne.find_events(raw, stim_channel='STI 014') <NEW_LINE> assert_array_equal(events[:, [0, 2]], mne_events[:, [0, 2]]) <NEW_LINE> nchan = raw.info['nchan'] <NEW_LINE> ch_name = raw.info['chs'][-2]['ch_name'] <NEW_LINE> events = np.empty((0, 3)) <NEW_LINE> raw.set_brainvision_events(events) <NEW_LINE> assert_equal(raw.info['nchan'], nchan - 1) <NEW_LINE> assert_equal(len(raw._data), nchan - 1) <NEW_LINE> assert_equal(raw.info['chs'][-1]['ch_name'], ch_name) <NEW_LINE> fname = op.join(tempdir, 'evt_raw.fif') <NEW_LINE> raw.save(fname) <NEW_LINE> events = [[10, 1, 2]] <NEW_LINE> raw.set_brainvision_events(events) <NEW_LINE> assert_equal(raw.info['nchan'], nchan) <NEW_LINE> assert_equal(len(raw._data), nchan) <NEW_LINE> assert_equal(raw.info['chs'][-1]['ch_name'], 'STI 014')
Test reading and modifying events
625941b2d7e4931a7ee9dcbe
def remove_file_if_exists(path): <NEW_LINE> <INDENT> if os.path.isfile(path): <NEW_LINE> <INDENT> logging.info('Found file %s, removing...' % path) <NEW_LINE> os.remove(path)
Check if a file exists, and remove if it does. This also logs any action to the root logger. Args: path (string): the path to the file to check and/or remove.
625941b2d10714528d5ffa83
def create_subscription(driver, time_value='5d'): <NEW_LINE> <INDENT> email, name = generate_email_name() <NEW_LINE> email_field = get_email_field(driver) <NEW_LINE> email_field.send_keys(email) <NEW_LINE> username_field = get_username_field(driver) <NEW_LINE> username_field.send_keys(name) <NEW_LINE> time_field = get_time_field(driver) <NEW_LINE> time_field.clear() <NEW_LINE> time_field.send_keys(f'{time_value}') <NEW_LINE> subscribe_button = get_subscribe_button(driver) <NEW_LINE> subscribe_button.click() <NEW_LINE> return email, name
utility to create new subscription :return: tuple(email, name)
625941b26e29344779a623b4
def check_state(self): <NEW_LINE> <INDENT> for floor in self.floors: <NEW_LINE> <INDENT> chips = {chip.element for chip in floor if isinstance(chip, Chip)} <NEW_LINE> gens = {gen.element for gen in floor if isinstance(gen, Gen)} <NEW_LINE> irradiated = len(gens) <NEW_LINE> unprotected = False <NEW_LINE> for chip in chips: <NEW_LINE> <INDENT> if chip not in gens: <NEW_LINE> <INDENT> unprotected = True <NEW_LINE> <DEDENT> <DEDENT> if irradiated and unprotected: <NEW_LINE> <INDENT> raise Irradiated <NEW_LINE> <DEDENT> <DEDENT> if len(self.floors[-1]) == self.total_items: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
>>> state = State([[], [], []]) >>> state.check_state() True >>> State([[Gen('a')], [Chip('b')]]).check_state() False >>> State([[Gen('h'), Chip('h'), Gen('l')]]).check_state() True >>> State([[Gen('b'), Chip('b')]]).check_state() True >>> State([[Gen('h'), Chip('t'), Gen('l')]]).check_state() Traceback (most recent call last): File "<stdin>", line 1, in ? Irradiated >>> State([[Chip('a'), Chip('b')], [Gen('a')], [Gen('b')]]).check_state() False >>> State([[Chip('b')], [Chip('a'), Gen('a')], [Gen('b')]]).check_state() False >>> State([[Chip('b')], [], [Chip('a'), Gen('a'), Gen('b')]]).check_state() False
625941b28c0ade5d55d3e75d
def download_file_helper(url, filename='', download=False): <NEW_LINE> <INDENT> if settings.DEBUG: <NEW_LINE> <INDENT> return redirect(url) <NEW_LINE> <DEDENT> response = HttpResponse() <NEW_LINE> response['Content-Type'] = '' <NEW_LINE> response['X-Accel-Redirect'] = url <NEW_LINE> if download: <NEW_LINE> <INDENT> response['Content-Disposition'] = 'attachment; filename="{}"'.format( filename) <NEW_LINE> <DEDENT> return response
Set download to make browsers download instead of displaying inline.
625941b232920d7e50b27f71
def run(self): <NEW_LINE> <INDENT> print("M - manual control; R - distance, A - Control all legs") <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> char = getch() <NEW_LINE> if char == "a": self.move_all() <NEW_LINE> if char == "f": self.walk() <NEW_LINE> if char == "g": self.crawl(.02, 0, True) <NEW_LINE> if char == "z": self.crawl(.03, 20) <NEW_LINE> if char == "x": self.crawl(.05, 40) <NEW_LINE> if char == "c": self.crawl(.07, 60) <NEW_LINE> if char == "v": self.crawl(.09, 70) <NEW_LINE> if char == "b": self.crawl(.15, 60) <NEW_LINE> if char == "n": self.crawl(.3, 50) <NEW_LINE> if char == "m": self.crawl(.3, 25) <NEW_LINE> if char == "t": self.geometric_control(0, 10) <NEW_LINE> if char == 'p': <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> self.shutdown() <NEW_LINE> <DEDENT> self.shutdown()
Main loop for keyboard control
625941b26fece00bbac2d4d8
def _input(self, name, value='', attrs=[], comment=None): <NEW_LINE> <INDENT> element_type = self._called_method <NEW_LINE> a = self.attrs(attrs, ' ') <NEW_LINE> attrs2 = [('type', element_type), ('name', name)] <NEW_LINE> if element_type != 'file': <NEW_LINE> <INDENT> attrs2.append(('value', value)) <NEW_LINE> <DEDENT> a2 = self.attrs(attrs2, ' ') <NEW_LINE> return self.element('input', '%s%s' % (a2, a), comment=comment)
generates a text/hidden/checkbox/radio input @type name: str @param name: name of field @type value: basestring @param value: default value of field @type attrs: basestring|list|tuple|dict|OrderedDict @param attrs: attributes @type comment: None|basestring @param comment: <!-- /something --> behind the tag @rtype: basestring @return: XHTML element - field
625941b27c178a314d6ef1f5
def get_contour_line_style(self): <NEW_LINE> <INDENT> return self.contour_line_style
Returns the line style for contour lines as a string. Default is a solid line as "-".
625941b250812a4eaa59c0c4
def get_hashtags(s): <NEW_LINE> <INDENT> hashtags=[] <NEW_LINE> s=s.split() <NEW_LINE> for word in s: <NEW_LINE> <INDENT> if word[0]=='#': <NEW_LINE> <INDENT> if validate_hashtag(word) == True: <NEW_LINE> <INDENT> hashtags.append(word) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return(hashtags)
Takes in string as arguement splits string by spaces tests for hashtag for every word returns list of valid hashtags
625941b24f88993c3716be13
def max(self,x): <NEW_LINE> <INDENT> while x.right != self.nil: <NEW_LINE> <INDENT> x = x.right <NEW_LINE> <DEDENT> return x
Calculates maximun element of the tree @return node x time-complexity: O(h) space-complexity: O(1)
625941b20a50d4780f666c2c
def _register_build(manifest, keeper_url, keeper_token): <NEW_LINE> <INDENT> data = {'git_refs': manifest.refs} <NEW_LINE> if manifest.build_id is not None: <NEW_LINE> <INDENT> data['slug'] = manifest.build_id <NEW_LINE> <DEDENT> if manifest.requester_github_handle is not None: <NEW_LINE> <INDENT> data['github_requester'] = manifest.requester_github_handle <NEW_LINE> <DEDENT> r = requests.post( keeper_url + '/products/{p}/builds/'.format( p=manifest.product_name), auth=(keeper_token, ''), json=data) <NEW_LINE> if r.status_code != 201: <NEW_LINE> <INDENT> raise KeeperError(r.json()) <NEW_LINE> <DEDENT> build_info = r.json() <NEW_LINE> log.debug(r.json()) <NEW_LINE> return build_info
Register this documentation build with LTD Keeper This registration step tells ltd-mason where to upload the documentation files (bucket and directory). Raises ------ KeeperError Any anomaly with LTD Keeper interaction.
625941b2627d3e7fe0d68bec
def get_possible_tabkeys_lengths(self): <NEW_LINE> <INDENT> if self.rules: <NEW_LINE> <INDENT> max_len = self.rules["above"] <NEW_LINE> return [len(self.rules[x]) for x in range(2, max_len+1)][:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> least_commit_len = int(self.ime_properties.get('least_commit_length')) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> least_commit_len = 0 <NEW_LINE> <DEDENT> if least_commit_len > 0: <NEW_LINE> <INDENT> return list(range(least_commit_len, self._mlen + 1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return []
Return a list of the possible lengths for tabkeys in this table. Example: If the table source has rules like: RULES = ce2:p11+p12+p21+p22;ce3:p11+p21+p22+p31;ca4:p11+p21+p31+p41 self._rules will be set to self._rules={2: [(1, 1), (1, 2), (2, 1), (2, 2)], 3: [(1, 1), (1, 2), (2, 1), (3, 1)], 4: [(1, 1), (2, 1), (3, 1), (-1, 1)], 'above': 4} and then this function returns “[4, 4, 4]” Or, if the table source has no RULES but LEAST_COMMIT_LENGTH=2 and MAX_KEY_LENGTH = 4, then it returns “[2, 3, 4]” I cannot find any tables which use LEAST_COMMIT_LENGTH though.
625941b266656f66f7cbbf4f
def sh_no_block(cmd, *args, **kwargs): <NEW_LINE> <INDENT> if isinstance(cmd, str): <NEW_LINE> <INDENT> cmd = cmd.split() <NEW_LINE> <DEDENT> return subprocess.Popen(cmd, *args, **kwargs)
Run sh command without blocking output.
625941b24527f215b584c202
def calc_modtran_max_water(paths: Pathnames) -> float: <NEW_LINE> <INDENT> max_water = None <NEW_LINE> xdir = { 'linux': 'linux', 'darwin': 'macos', 'windows': 'windows' } <NEW_LINE> name = 'H2O_bound_test' <NEW_LINE> filebase = os.path.join(paths.lut_h2o_directory, name) <NEW_LINE> with open(paths.h2o_template_path, 'r') as f: <NEW_LINE> <INDENT> bound_test_config = json.load(f) <NEW_LINE> <DEDENT> bound_test_config['MODTRAN'][0]['MODTRANINPUT']['NAME'] = name <NEW_LINE> bound_test_config['MODTRAN'][0]['MODTRANINPUT']['ATMOSPHERE']['H2OSTR'] = 50 <NEW_LINE> with open(filebase + '.json', 'w') as fout: <NEW_LINE> <INDENT> fout.write(json.dumps(bound_test_config, cls=SerialEncoder, indent=4, sort_keys=True)) <NEW_LINE> <DEDENT> cmd = os.path.join(paths.modtran_path, 'bin', xdir[platform], 'mod6c_cons ' + filebase + '.json') <NEW_LINE> try: <NEW_LINE> <INDENT> subprocess.call(cmd, shell=True, timeout=10, cwd=paths.lut_h2o_directory) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> with open(filebase + '.tp6', errors='ignore') as tp6file: <NEW_LINE> <INDENT> for count, line in enumerate(tp6file): <NEW_LINE> <INDENT> if 'The water column is being set to the maximum' in line: <NEW_LINE> <INDENT> max_water = line.split(',')[1].strip() <NEW_LINE> max_water = float(max_water.split(' ')[0]) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if max_water is None: <NEW_LINE> <INDENT> logging.error('Could not find MODTRAN H2O upper bound in file {}'.format(filebase + '.tp6')) <NEW_LINE> raise KeyError('Could not find MODTRAN H2O upper bound') <NEW_LINE> <DEDENT> return max_water
MODTRAN may put a ceiling on "legal" H2O concentrations. This function calculates that ceiling. The intended use is to make sure the LUT does not contain useless gridpoints above it. Args: paths: object containing references to all relevant file locations Returns: max_water - maximum MODTRAN H2OSTR value for provided obs conditions
625941b297e22403b379cd3a
def topKFrequent(self, nums, k): <NEW_LINE> <INDENT> dict = {} <NEW_LINE> for num in nums: <NEW_LINE> <INDENT> if num in dict.keys(): <NEW_LINE> <INDENT> dict[num] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dict[num] = 1 <NEW_LINE> <DEDENT> <DEDENT> sorted_dict = sorted(dict.items(), key=lambda x: x[1], reverse=True) <NEW_LINE> res_list = [] <NEW_LINE> i = 1 <NEW_LINE> for lists in sorted_dict: <NEW_LINE> <INDENT> if i <= k: <NEW_LINE> <INDENT> res_list.append(lists[0]) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return res_list
:type nums: List[int] :type k: int :rtype: List[int]
625941b250485f2cf553cb36
def requires(self): <NEW_LINE> <INDENT> return [SanityTestUICollection(output_base_path=self.output_base_path, mongodb_output_collection_name=self.mongodb_output_collection_name)]
Tell Luigi that the SanityTestUICollection task needs to be completed before running this task.
625941b260cbc95b062c62e7
def lightSuccess(self): <NEW_LINE> <INDENT> def f(): <NEW_LINE> <INDENT> def g(): <NEW_LINE> <INDENT> pr.set_color(r=0, g=255, b=0) <NEW_LINE> <DEDENT> thread = Thread(target=g) <NEW_LINE> thread.daemon = True <NEW_LINE> thread.start() <NEW_LINE> self.turnLightOff = False <NEW_LINE> time.sleep(2) <NEW_LINE> self.turnLightOff = True <NEW_LINE> <DEDENT> thread = Thread(target=f) <NEW_LINE> thread.daemon = True <NEW_LINE> thread.start()
Green light signaling the transcript matched a command
625941b2a4f1c619b28afdea
def createRoundKey(expandedKey, roundKeyPointer): <NEW_LINE> <INDENT> roundKey = [0] * 16 <NEW_LINE> for i in range(4): <NEW_LINE> <INDENT> for j in range(4): <NEW_LINE> <INDENT> roundKey[j*4+i] = expandedKey[roundKeyPointer + i*4 + j] <NEW_LINE> <DEDENT> <DEDENT> return roundKey
Create a round key. Creates a round key from the given expanded key and the position within the expanded key.
625941b27c178a314d6ef1f6
def get_messages(self): <NEW_LINE> <INDENT> self.logger.info('Loading messages') <NEW_LINE> try: <NEW_LINE> <INDENT> sqs = boto3.resource('sqs') <NEW_LINE> queue = sqs.Queue(self.queue_url) <NEW_LINE> return queue.receive_messages( MaxNumberOfMessages=10, VisibilityTimeout=600) <NEW_LINE> <DEDENT> except boto3.exceptions.Boto3Error as awserror: <NEW_LINE> <INDENT> self.logger.error(awserror) <NEW_LINE> return []
Gets the next few messages
625941b230dc7b7665901710
def state_def(self): <NEW_LINE> <INDENT> return iter([])
Yields tuples of (key, cell) which are to be part of the object's exported state. These cells are in addition to to those defined by decorators, not replacing them.
625941b216aa5153ce36221a
def sobel(image): <NEW_LINE> <INDENT> sobelx = cv2.Sobel(image, cv2.CV_64F, 1, 0, -1) <NEW_LINE> sobelx = cv2.convertScaleAbs(sobelx) <NEW_LINE> sobely = cv2.Sobel(image, cv2.CV_64F, 0, 1, -1) <NEW_LINE> sobely = cv2.convertScaleAbs(sobely) <NEW_LINE> sobelxy = cv2.Sobel(image, cv2.CV_64F, 1, 1) <NEW_LINE> sobelxy = cv2.convertScaleAbs(sobelxy) <NEW_LINE> sobelx_Y = cv2.addWeighted(sobelx, 0.5, sobely, 0.9, 0) <NEW_LINE> list_sobel = ['sobelx', 'sobely', 'sobelxy', 'sobelx_Y'] <NEW_LINE> for item in list_sobel: <NEW_LINE> <INDENT> cv2.imshow(item, eval(item))
sobel算子是一种模拟计算梯度的差分算子,也是一种基于一阶导数的边缘检测算子,由于有局部平均的作用,也能很好的消除噪声 参数dx表示水平方向求导,dy表示垂直方向求导,ddepth表示图像深度
625941b2099cdd3c635f09fc
def test_list_pop_with_incorrect_policy(self): <NEW_LINE> <INDENT> key = ('test', 'demo', 1) <NEW_LINE> policy = { 'timeout': 0.5 } <NEW_LINE> try: <NEW_LINE> <INDENT> TestListPop.client.list_pop(key, "contact_no", 0, {}, policy) <NEW_LINE> <DEDENT> except e.ParamError as exception: <NEW_LINE> <INDENT> assert exception.code == -2 <NEW_LINE> assert exception.msg == "timeout is invalid"
Invoke list_pop() with incorrect policy
625941b21b99ca400220a84f
def on_about(self): <NEW_LINE> <INDENT> dialog = CloppyButtonWindow(self.master.editor_window) <NEW_LINE> dialog.set_message( f'This program was made by:\n' f'LargeKnome, Hanyuone, and Meta\n' ) <NEW_LINE> dialog.add_choice('Ok') <NEW_LINE> dialog.show()
Called when the 'About' action is selected from the Help menu.
625941b2adb09d7d5db6c532
def run_partition(data, smallk, largek, method, max_depth): <NEW_LINE> <INDENT> print('run partition: data shape={0}, smallk={1}, largek={2}'.format(data.shape, smallk, largek)) <NEW_LINE> genes = uncurl.max_variance_genes(data, nbins=1, frac=1.0) <NEW_LINE> results, ll = method.run(data[genes,:]) <NEW_LINE> w = results[0] <NEW_LINE> m_ = results[1] <NEW_LINE> m = np.zeros((data.shape[0], smallk)) <NEW_LINE> m[genes,:] = m_ <NEW_LINE> clusters_0 = w.argmax(0) <NEW_LINE> if max_depth == 0: <NEW_LINE> <INDENT> print('return at depth 0') <NEW_LINE> return m, w <NEW_LINE> <DEDENT> m_new = np.zeros((m.shape[0], largek)) <NEW_LINE> w_new = np.zeros((largek, w.shape[1])) <NEW_LINE> n_k = largek/smallk <NEW_LINE> for i in range(smallk): <NEW_LINE> <INDENT> data_c0 = data[:,clusters_0==i] <NEW_LINE> m_s1, w_s1 = run_partition(data_c0, smallk, largek/2, method, max_depth-1) <NEW_LINE> print(m_s1.shape) <NEW_LINE> print(w_s1.shape) <NEW_LINE> k_range = range(i*n_k, (i+1)*n_k) <NEW_LINE> m_new[:,k_range] = m_s1 <NEW_LINE> w_new[np.ix_(k_range, clusters_0==i)] = w_s1 <NEW_LINE> <DEDENT> return m_new, w_new
Very simple recursive partitioning-based state estimation system. Args: data smallk (int): k for each individual clustering largek (int): k for the whole global clustering
625941b22c8b7c6e89b3556a
def updateWeights(self, gradient): <NEW_LINE> <INDENT> if self.adagradG is None: <NEW_LINE> <INDENT> self.adagradG = self.buildEmptyGradient() <NEW_LINE> self.adagradG.dL = np.zeros(self.L.shape) <NEW_LINE> <DEDENT> self.adagradG.dV += gradient.dV * gradient.dV <NEW_LINE> self.adagradG.dW += gradient.dW * gradient.dW <NEW_LINE> self.adagradG.db += gradient.db * gradient.db <NEW_LINE> self.adagradG.dWs += gradient.dWs * gradient.dWs <NEW_LINE> self.adagradG.dbs += gradient.dbs * gradient.dbs <NEW_LINE> self.V -= self.learningRate * gradient.dV / np.sqrt(self.adagradG.dV + self.adagradEpsilon) <NEW_LINE> self.W -= self.learningRate * gradient.dW / np.sqrt(self.adagradG.dW + self.adagradEpsilon) <NEW_LINE> self.b -= self.learningRate * gradient.db / np.sqrt(self.adagradG.db + self.adagradEpsilon) <NEW_LINE> self.Ws -= self.learningRate * gradient.dWs / np.sqrt(self.adagradG.dWs + self.adagradEpsilon) <NEW_LINE> self.bs -= self.learningRate * gradient.dbs / np.sqrt(self.adagradG.dbs + self.adagradEpsilon) <NEW_LINE> for elem in gradient.dL: <NEW_LINE> <INDENT> self.adagradG.dL[elem.idx,:] += elem.g * elem.g <NEW_LINE> self.L[elem.idx,:] -= self.learningRate * elem.g / np.sqrt(self.adagradG.dL[elem.idx,:] + self.adagradEpsilon)
Update the weights according to the gradient
625941b2925a0f43d2549c11
def doctest_compareDictsHTML_recursive(): <NEW_LINE> <INDENT> pass
Tests for compareDicts >>> old = dict(a=1, b=dict(x=2, y=5), c=3, d=42, e=dict(x=42)) >>> new = dict(a=1, b=dict(x=3, y=5), d=dict(x=42), e=42, f=dict(x=4)) >>> print(compareDictsHTML(new, old)) <div class="diff"> <div class="diffitem changed"> <strong>b</strong>: dictionary changed: <div class="diff"> <div class="diffitem changed"> <strong>x</strong>: changed to 3 </div> </div> </div> <div class="diffitem removed"> <strong>c</strong>: removed 3 </div> <div class="diffitem changed"> <strong>d</strong>: changed to {'x': 42} </div> <div class="diffitem changed"> <strong>e</strong>: changed to 42 </div> <div class="diffitem added"> <strong>f</strong>: added {'x': 4} </div> </div> >>> old = dict(a=1, b=(2, 5), c=3, d=42) >>> new = dict(a=1, b=(3, 5), d=(42, ), e='x') >>> print(compareDictsHTML(new, old)) <div class="diff"> <div class="diffitem changed"> <strong>b</strong>: tuple changed: <div class="diff"> <div class="diffitem removed"> removed 2 </div> <div class="diffitem added"> added 3 </div> <div class="diffitem same"> last item kept the same </div> </div> </div> <div class="diffitem removed"> <strong>c</strong>: removed 3 </div> <div class="diffitem changed"> <strong>d</strong>: changed to (42,) </div> <div class="diffitem added"> <strong>e</strong>: added 'x' </div> </div>
625941b2de87d2750b85fb2c
def __rshift__(*args, **kwargs): <NEW_LINE> <INDENT> pass
x.__rshift__(y) <==> x>>y
625941b263b5f9789fde6e84
def getselector(self): <NEW_LINE> <INDENT> return self.selector
Returns the selector we are handling.
625941b23346ee7daa2b2b06
def __getitem__(self, index): <NEW_LINE> <INDENT> self._range_check(index) <NEW_LINE> return self.y(index)
if type(index) is slice: return self._content()[index]
625941b221bff66bcd6846fc
def call(self, input_images, input_labels): <NEW_LINE> <INDENT> pass <NEW_LINE> return out
MANN Args: input_images: [B, K+1, N, 784] flattened images labels: [B, K+1, N, N] ground truth labels Returns: [B, K+1, N, N] predictions
625941b2bf627c535bc12f75
def filter(self, resource_manager, **params): <NEW_LINE> <INDENT> m = self.resolve(resource_manager.resource_type) <NEW_LINE> parents = resource_manager.get_parent_manager() <NEW_LINE> results = [] <NEW_LINE> for parent in parents.resources(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> vault_url = None <NEW_LINE> if m.keyvault_child: <NEW_LINE> <INDENT> vault_url = generate_key_vault_url(parent['name']) <NEW_LINE> <DEDENT> subset = resource_manager.enumerate_resources( parent, m, vault_url=vault_url, **params) <NEW_LINE> if subset: <NEW_LINE> <INDENT> if m.annotate_parent: <NEW_LINE> <INDENT> for r in subset: <NEW_LINE> <INDENT> r[m.parent_key] = parent[parents.resource_type.id] <NEW_LINE> <DEDENT> <DEDENT> results.extend(subset) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log.warning('Child enumeration failed for {0}. {1}' .format(parent[parents.resource_type.id], e)) <NEW_LINE> if m.raise_on_exception: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return results
Query a set of resources.
625941b291f36d47f21ac294
def update(self, path): <NEW_LINE> <INDENT> os.system(r'tortoiseproc.exe /command:update /path:"'+path+'" /closeonend:1')
更新SVN目录,获取最新的文件
625941b2adb09d7d5db6c533
def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.collection.save(self, *args, **kwargs)
save the document into the db. `save()` follow the pymongo.collection.save arguments
625941b23346ee7daa2b2b07
def is_zipfile(file): <NEW_LINE> <INDENT> return zipfile.is_zipfile(file)
Wrapper function for detecting if file is a true ZIP archive
625941b2627d3e7fe0d68bed
def test_scope_dir_exists(self): <NEW_LINE> <INDENT> self.assertTrue(os.path.isdir(scope_path), scope_path + ' does not exist')
Verify the scope's directory exists here: /home/phablet/.local/share/unity-scopes/SCOPE
625941b2fff4ab517eb2f1de
@hook.command(autohelp=False) <NEW_LINE> def reddit(text): <NEW_LINE> <INDENT> id_num = None <NEW_LINE> if text: <NEW_LINE> <INDENT> parts = text.lower().strip().split() <NEW_LINE> if len(parts) > 1: <NEW_LINE> <INDENT> url = base_url.format(parts[0].strip()) <NEW_LINE> try: <NEW_LINE> <INDENT> id_num = int(parts[1]) - 1 <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return "Invalid post number." <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> url = base_url.format(parts[0].strip()) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> url = "http://reddit.com/.json" <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> data = http.get_json(url, user_agent=http.ua_chrome) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return "Error: " + str(e) <NEW_LINE> <DEDENT> data = data["data"]["children"] <NEW_LINE> if id_num is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = data[id_num]["data"] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> length = len(data) <NEW_LINE> return "Invalid post number. Number must be between 1 and {}.".format(length) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> item = random.choice(data)["data"] <NEW_LINE> <DEDENT> item["title"] = formatting.truncate_str(item["title"], 50) <NEW_LINE> item["link"] = short_url.format(item["id"]) <NEW_LINE> raw_time = datetime.fromtimestamp(int(item["created_utc"])) <NEW_LINE> item["timesince"] = timesince.timesince(raw_time) <NEW_LINE> if item["over_18"]: <NEW_LINE> <INDENT> item["warning"] = " \x02NSFW\x02" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item["warning"] = "" <NEW_LINE> <DEDENT> return "\x02{title} : {subreddit}\x02 - posted by \x02{author}\x02" " {timesince} ago - {ups} upvotes, {downs} downvotes -" " {link}{warning}".format(**item)
<subreddit> [n] - gets a random post from <subreddit>, or gets the [n]th post in the subreddit
625941b29f2886367277a639
def pyflakes_report(code): <NEW_LINE> <INDENT> flakes_stdout = StreamList() <NEW_LINE> flakes_stderr = StreamList() <NEW_LINE> rep = pyflakes_reporter.Reporter( flakes_stdout.reset(), flakes_stderr.reset()) <NEW_LINE> pyflakes_api.check(code, filename="kale", reporter=rep) <NEW_LINE> if rep._stderr(): <NEW_LINE> <INDENT> raise RuntimeError("Flakes reported the following error:" "\n{}".format('\t' + '\t'.join(rep._stderr()))) <NEW_LINE> <DEDENT> p = r"'(.+?)'" <NEW_LINE> out = rep._stdout() <NEW_LINE> undef_vars = set() <NEW_LINE> for line in filter(lambda a: a != '\n' and 'undefined name' in a, out): <NEW_LINE> <INDENT> var_search = re.search(p, line) <NEW_LINE> undef_vars.add(var_search.group(1)) <NEW_LINE> <DEDENT> return undef_vars
Inspect code using PyFlakes to detect any 'missing name' report. Args: code: A multiline string representing Python code Returns: a list of names that have been reported missing by Flakes
625941b2be383301e01b5236
def as_json(obj: Union[Dict, JsonObj, List], indent: Optional[str] = ' ', filtr: Callable[[dict], dict] = None, **kwargs) -> str: <NEW_LINE> <INDENT> if isinstance(obj, JsonObj) and '_root' in obj: <NEW_LINE> <INDENT> obj = obj._root <NEW_LINE> <DEDENT> default_processor = obj._default if isinstance(obj, JsonObj) else JsonObj._static_default <NEW_LINE> return obj._as_json_dumps(indent, filtr=filtr, **kwargs) if isinstance(obj, JsonObj) else json.dumps(obj, default=lambda o: default_processor(o, filtr) if filtr else default_processor(o), indent=indent, *kwargs)
Convert obj to json string representation. :param obj: pseudo 'self' :param indent: indent argument to dumps :param filtr: filter to remove unwanted elements :param kwargs: other arguments for dumps :return: JSON formatted string
625941b2236d856c2ad4457f
def keyPressEvent(self, QKeyEvent): <NEW_LINE> <INDENT> cursor=self.texts[self.name].textCursor() <NEW_LINE> if QKeyEvent.key() == QtCore.Qt.Key_Return: <NEW_LINE> <INDENT> cursor.select(QtGui.QTextCursor.LineUnderCursor) <NEW_LINE> self.texts[self.name].moveCursor(QtGui.QTextCursor.End, QtGui.QTextCursor.MoveAnchor) <NEW_LINE> self.texts[self.name].setTextCursor(cursor) <NEW_LINE> self.command = self.texts[self.name].textCursor().selectedText() <NEW_LINE> self.texts[self.name].moveCursor(QtGui.QTextCursor.End, QtGui.QTextCursor.MoveAnchor) <NEW_LINE> self.texts[self.name].insertPlainText('\n') <NEW_LINE> self.texts[self.name].moveCursor(QtGui.QTextCursor.End, QtGui.QTextCursor.MoveAnchor) <NEW_LINE> self.historyAppend(self.command) <NEW_LINE> self.execute(self.command) <NEW_LINE> <DEDENT> elif QKeyEvent.key() == QtCore.Qt.Key_Up: <NEW_LINE> <INDENT> if self.history_index > 0: self.history_index -= 1 <NEW_LINE> cursor.select(QtGui.QTextCursor.LineUnderCursor) <NEW_LINE> self.texts[self.name].setTextCursor(cursor) <NEW_LINE> self.texts[self.name].insertPlainText(self.history[self.history_index]) <NEW_LINE> if self.history_index == len(self.history) - 1: <NEW_LINE> <INDENT> cursor.select(QtGui.QTextCursor.LineUnderCursor) <NEW_LINE> self.texts[self.name].setTextCursor(cursor) <NEW_LINE> <DEDENT> <DEDENT> elif QKeyEvent.key() == QtCore.Qt.Key_Down: <NEW_LINE> <INDENT> if self.history_index < len(self.history) - 1: <NEW_LINE> <INDENT> self.history_index += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.history_index = len(self.history) - 1 <NEW_LINE> <DEDENT> cursor.select(QtGui.QTextCursor.LineUnderCursor) <NEW_LINE> self.texts[self.name].setTextCursor(cursor) <NEW_LINE> self.texts[self.name].insertPlainText(self.history[self.history_index]) <NEW_LINE> if self.history_index == len(self.history) - 1: <NEW_LINE> <INDENT> cursor.select(QtGui.QTextCursor.LineUnderCursor) <NEW_LINE> self.texts[self.name].setTextCursor(cursor) <NEW_LINE> <DEDENT> <DEDENT> elif QKeyEvent.key() == QtCore.Qt.Key_PageUp: <NEW_LINE> <INDENT> self.history_index = 0 <NEW_LINE> cursor.select(QtGui.QTextCursor.LineUnderCursor) <NEW_LINE> self.texts[self.name].setTextCursor(cursor) <NEW_LINE> self.texts[self.name].insertPlainText(self.history[self.history_index]) <NEW_LINE> <DEDENT> elif QKeyEvent.key() == QtCore.Qt.Key_PageDown: <NEW_LINE> <INDENT> self.history_index = len(self.history) - 1 <NEW_LINE> cursor.select(QtGui.QTextCursor.LineUnderCursor) <NEW_LINE> self.texts[self.name].setTextCursor(cursor) <NEW_LINE> self.texts[self.name].insertPlainText(self.history[self.history_index]) <NEW_LINE> if self.history_index == len(self.history) - 1: <NEW_LINE> <INDENT> cursor.select(QtGui.QTextCursor.LineUnderCursor) <NEW_LINE> self.texts[self.name].setTextCursor(cursor) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.texts[self.name].keyPressEventOrig(QKeyEvent)
Extend the QTextEdit key press event, add routines, then call the original method.
625941b24a966d76dd550daa
def get_array_connections( self, references=None, remotes=None, continuation_token=None, filter=None, ids=None, limit=None, offset=None, remote_ids=None, remote_names=None, sort=None, async_req=False, _return_http_data_only=False, _preload_content=True, _request_timeout=None, ): <NEW_LINE> <INDENT> kwargs = dict( continuation_token=continuation_token, filter=filter, ids=ids, limit=limit, offset=offset, remote_ids=remote_ids, remote_names=remote_names, sort=sort, async_req=async_req, _return_http_data_only=_return_http_data_only, _preload_content=_preload_content, _request_timeout=_request_timeout, ) <NEW_LINE> kwargs = {k: v for k, v in kwargs.items() if v is not None} <NEW_LINE> endpoint = self._array_connections_api.api21_array_connections_get_with_http_info <NEW_LINE> _process_references(references, ['ids'], kwargs) <NEW_LINE> _process_references(remotes, ['remote_ids', 'remote_names'], kwargs) <NEW_LINE> return self._call_api(endpoint, kwargs)
List connected arrays for replication. Args: references (list[FixedReference], optional): A list of references to query for. Overrides ids keyword arguments. remotes (list[FixedReference], optional): A list of remotes to query for. Overrides remote_ids and remote_names keyword arguments. continuation_token (str, optional): An opaque token to iterate over a collection of resources. filter (Filter, optional): A filter to include only resources that match the specified criteria. ids (list[str], optional): A list of resource IDs. If after filtering, there is not at least one resource that matches each of the elements of `ids`, then an error is returned. This cannot be provided together with the `name` or `names` query parameters. limit (int, optional): Limit the number of resources in the response. If not specified, defaults to 1000. offset (int, optional): The offset of the first resource to return from a collection. remote_ids (list[str], optional): A list of remote array IDs. If after filtering, there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with the `remote_names` query parameter. remote_names (list[str], optional): A list of remote array names. If there is not at least one resource that matches each of the elements, then an error is returned. This cannot be provided together with `remote_ids` query parameter. sort (list[Property], optional): Sort the response by the specified Properties. Can also be a single element. async_req (bool, optional): Request runs in separate thread and method returns multiprocessing.pool.ApplyResult. _return_http_data_only (bool, optional): Returns only data field. _preload_content (bool, optional): Response is converted into objects. _request_timeout (int, optional): Total request timeout in seconds. Returns: ValidResponse: If the call was successful. ErrorResponse: If the call was not successful. Raises: PureError: If calling the API fails. ValueError: If a parameter is of an invalid type. TypeError: If invalid or missing parameters are used.
625941b25510c4643540f19a
def admin(): <NEW_LINE> <INDENT> driver = webdriver.Chrome() <NEW_LINE> driver.get(hotel_url) <NEW_LINE> driver.maximize_window() <NEW_LINE> hotel_login(driver)
hotel系统购买操作
625941b26e29344779a623b6
def ray_x_polygon(r0, rd, poly_verts, pn, tol=1e-6): <NEW_LINE> <INDENT> t = ray_x_plane(r0, rd, poly_verts[0], pn, tol) <NEW_LINE> ri = r0 + rd*t <NEW_LINE> is_inside = point_in_polygon(ri, poly_verts, pn) <NEW_LINE> return ri, is_inside
Parameters ---------- r0, rd, pn, tol: check function `ray_x_plane()` parameters. poly_verts: check function `point_in_polygon()` parameters. Returns ------- A 2-tuple where the first element represents the coordinates of the intersection point, and the second one a boolean set to True if the ray intersects the polygon
625941b24e696a04525c91f5
@app.route("/users/logs", methods=['GET']) <NEW_LINE> @log_user_action <NEW_LINE> @login_required <NEW_LINE> @update_user_token <NEW_LINE> def list_all_user_actions(): <NEW_LINE> <INDENT> data = _get_request_args() <NEW_LINE> _filter_timestamp(data) <NEW_LINE> return utils.make_json_response( 200, user_log_api.list_actions( user=current_user, **data ) )
List all users actions.
625941b28c0ade5d55d3e75f
def gen_bot_name(): <NEW_LINE> <INDENT> name = choice(letters) + randint(0,10) + choice(letters) + randint(0,10) <NEW_LINE> if name not in robot_names: <NEW_LINE> <INDENT> robot_names += name <NEW_LINE> return name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return gen_bot_name()
returns unique robot__name
625941b24527f215b584c204
@secure_required <NEW_LINE> @permission_required_or_403('change_profile', (get_profile_model(), 'user__username', 'username')) <NEW_LINE> def profile_edit(request, username, edit_profile_form=EditProfileForm, template_name='userena/profile_form.html', success_url=None, extra_context=None): <NEW_LINE> <INDENT> user = get_object_or_404(User, username__iexact=username) <NEW_LINE> profile = user.get_profile() <NEW_LINE> user_initial = {'first_name': user.first_name, 'last_name': user.last_name} <NEW_LINE> form = edit_profile_form(instance=profile, initial=user_initial) <NEW_LINE> if request.method == 'POST': <NEW_LINE> <INDENT> form = edit_profile_form(request.POST, request.FILES, instance=profile, initial=user_initial) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> profile = form.save() <NEW_LINE> if userena_settings.USERENA_USE_MESSAGES: <NEW_LINE> <INDENT> messages.success(request, _('Your profile has been updated.'), fail_silently=True) <NEW_LINE> <DEDENT> if success_url: redirect_to = success_url <NEW_LINE> else: redirect_to = reverse_with_username('userena_profile_detail', user) <NEW_LINE> return redirect(redirect_to) <NEW_LINE> <DEDENT> <DEDENT> if not extra_context: extra_context = dict() <NEW_LINE> extra_context['form'] = form <NEW_LINE> extra_context['profile'] = profile <NEW_LINE> return direct_to_template(request, template_name, extra_context=extra_context)
Edit profile. Edits a profile selected by the supplied username. First checks permissions if the user is allowed to edit this profile, if denied will show a 404. When the profile is succesfully edited will redirect to ``success_url``. :param username: Username of the user which profile should be edited. :param edit_profile_form: Form that is used to edit the profile. The :func:`EditProfileForm.save` method of this form will be called when the form :func:`EditProfileForm.is_valid`. Defaults to :class:`EditProfileForm` from userena. :param template_name: String of the template that is used to render this view. Defaults to ``userena/edit_profile_form.html``. :param success_url: Named URL which be passed on to a django ``reverse`` function after the form is successfully saved. Defaults to the ``userena_detail`` url. :param extra_context: Dictionary containing variables that are passed on to the ``template_name`` template. ``form`` key will always be the form used to edit the profile, and the ``profile`` key is always the edited profile. **Context** ``form`` Form that is used to alter the profile. ``profile`` Instance of the ``Profile`` that is edited.
625941b26aa9bd52df036b42
def setup(): <NEW_LINE> <INDENT> size(800, 600) <NEW_LINE> global production <NEW_LINE> production = csgrammar.repeat(5, AXIOM, RULES, IGNORE) <NEW_LINE> fill(0, 200, 0) <NEW_LINE> noStroke()
processing setup
625941b263b5f9789fde6e85
def ampsToXML(self,amps=None,save=True): <NEW_LINE> <INDENT> codes = amps['code'].unique() <NEW_LINE> psacols = amps.columns[amps.columns.str.startswith('psa')].tolist() <NEW_LINE> imts = ['pga','pgv'] + psacols <NEW_LINE> shakemap_data_tag = Tag('shakemap-data') <NEW_LINE> atts = {'id':self._id, 'lat':self._origin['lat'], 'lon':self._origin['lon'], 'depth':self._origin['depth'], 'mag':self._mag, 'year':self._origin['time'].year, 'month':self._origin['time'].month, 'day':self._origin['time'].day, 'hour':self._origin['time'].hour, 'minute':self._origin['time'].minute, 'second':self._origin['time'].second, 'locstring':self._location, 'created':int(time.time())} <NEW_LINE> earthquake_tag = Tag('earthquake',attributes=atts) <NEW_LINE> shakemap_data_tag.addChild(earthquake_tag) <NEW_LINE> stationlist_tag = Tag('stationlist',attributes={'created':int(time.time())}) <NEW_LINE> for code in codes: <NEW_LINE> <INDENT> rows = amps[amps['code'] == code] <NEW_LINE> atts = {'code':rows.iloc[0]['code'], 'name':rows.iloc[0]['name'], 'insttype':rows.iloc[0]['insttype'], 'lat':rows.iloc[0]['lat'], 'lon':rows.iloc[0]['lon'], 'dist':rows.iloc[0]['dist'], 'source':rows.iloc[0]['source'], 'netid':rows.iloc[0]['netid'], 'commtype':rows.iloc[0]['commtype'], 'loc':rows.iloc[0]['loc'], 'intensity':rows.iloc[0]['intensity']} <NEW_LINE> station_tag = Tag('station',attributes=atts) <NEW_LINE> for index, row in rows.iterrows(): <NEW_LINE> <INDENT> for imt in imts: <NEW_LINE> <INDENT> if imt not in row: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> comptag = Tag('comp',attributes={'name':imt}) <NEW_LINE> imt_tag = Tag(imt,attributes={'value':row[imt],'flag':'0'}) <NEW_LINE> comptag.addChild(imt_tag) <NEW_LINE> station_tag.addChild(comptag) <NEW_LINE> <DEDENT> <DEDENT> stationlist_tag.addChild(station_tag) <NEW_LINE> <DEDENT> earthquake_tag.addChild(stationlist_tag) <NEW_LINE> outfile = os.path.join(self._inputfolder,'%s_dat.xml' % self._source) <NEW_LINE> if save: <NEW_LINE> <INDENT> xmlstr = earthquake_tag.renderToXML(outfile) <NEW_LINE> <DEDENT> return xmlstr
Save a DataFrame of peak amplitudes to a ShakeMap compatible XML station data file. :param amps: DataFrame containing the following columns: - netid - name - code - loc - lat - lon - dist - source - insttype - commtype - intensity and then a number of intensity measure types, typically including: - pga - pgv - psa03 - psa10 - psa30 and possibly a number of other pseudo-spectral periods. :param save: Boolean indicating whether XML representation of amps data should be saved to a file. :returns: String containing XML representation of amps data.
625941b250485f2cf553cb38
def import_third_party(modname): <NEW_LINE> <INDENT> with sys_modules_saved(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return importlib.import_module(modname) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> return None
Import a third-party module we need, but might not be installed. This also cleans out the module after the import, so that coverage won't appear to have imported it. This lets the third party use coverage for their own tests. Arguments: modname (str): the name of the module to import. Returns: The imported module, or None if the module couldn't be imported.
625941b2b7558d58953c4cc3
def check_managed_changes( name, source, source_hash, source_hash_name, user, group, mode, attrs, template, context, defaults, saltenv, contents=None, skip_verify=False, keep_mode=False, seuser=None, serole=None, setype=None, serange=None, verify_ssl=True, **kwargs ): <NEW_LINE> <INDENT> source, source_hash = source_list( source, source_hash, saltenv ) <NEW_LINE> sfn = "" <NEW_LINE> source_sum = None <NEW_LINE> if contents is None: <NEW_LINE> <INDENT> sfn, source_sum, comments = get_managed( name, template, source, source_hash, source_hash_name, user, group, mode, attrs, saltenv, context, defaults, skip_verify, verify_ssl=verify_ssl, **kwargs ) <NEW_LINE> if source_sum and ("hsum" in source_sum): <NEW_LINE> <INDENT> source_sum["hsum"] = source_sum["hsum"].lower() <NEW_LINE> <DEDENT> if comments: <NEW_LINE> <INDENT> __clean_tmp(sfn) <NEW_LINE> return False, comments <NEW_LINE> <DEDENT> if sfn and source and keep_mode: <NEW_LINE> <INDENT> if urllib.parse.urlparse(source).scheme in ( "salt", "file", ) or source.startswith("/"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> mode = __salt__["cp.stat_file"](source, saltenv=saltenv, octal=True) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> log.warning("Unable to stat %s: %s", sfn, exc) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> changes = check_file_meta( name, sfn, source, source_sum, user, group, mode, attrs, saltenv, contents, seuser=seuser, serole=serole, setype=setype, serange=serange, ) <NEW_LINE> __clean_tmp(sfn) <NEW_LINE> return changes
Return a dictionary of what changes need to be made for a file .. versionchanged:: 3001 selinux attributes added verify_ssl If ``False``, remote https file sources (``https://``) and source_hash will not attempt to validate the servers certificate. Default is True. .. versionadded:: 3002 CLI Example: .. code-block:: bash salt '*' file.check_managed_changes /etc/httpd/conf.d/httpd.conf salt://http/httpd.conf '{hash_type: 'md5', 'hsum': <md5sum>}' root, root, '755' jinja True None None base
625941b20fa83653e4656d65
@node.commandWrap <NEW_LINE> def NodeEditorToggleCreateNodePane(*args, **kwargs): <NEW_LINE> <INDENT> u <NEW_LINE> return cmds.NodeEditorToggleCreateNodePane(*args, **kwargs)
:rtype: list|str|basestring|DagNode|AttrObject|ArrayAttrObject|Components1Base
625941b2f548e778e58cd31b
def __setitem__(self, key, value): <NEW_LINE> <INDENT> self.add(key, value)
Overrides `[]` operator behavior :param key: the CachedEntry's key :type key: str :param value: the CachedEntry's value :type value: object
625941b255399d3f05588455
def run(): <NEW_LINE> <INDENT> customized()
Main function of the example. Runs each demo and then exits.
625941b2e5267d203edcda42
def main(): <NEW_LINE> <INDENT> os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api_credit_app.settings') <NEW_LINE> try: <NEW_LINE> <INDENT> from django.core.management import execute_from_command_line <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc <NEW_LINE> <DEDENT> execute_from_command_line(sys.argv)
Run administrative tasks.
625941b2de87d2750b85fb2d
def trainClassifier(self): <NEW_LINE> <INDENT> features = self.updateHumanStatsAndGetFeatures() <NEW_LINE> features.extend( self.updateBotStatsAndGetFeatures() ) <NEW_LINE> category = self.validGuesses[self.playerOneGuess] <NEW_LINE> self.classifier.train(features, category)
Send features and category to the classifier. Called by self.playGame()
625941b245492302aab5e05f
def get_xy_data(self, new_value): <NEW_LINE> <INDENT> if new_value is None: <NEW_LINE> <INDENT> return None, None, None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> data = getattr(self.module, '_' + self.attribute_name + '_object').data <NEW_LINE> name = getattr( self.module, '_' + self.attribute_name + '_object').params['name'] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None, None, None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x, y = data <NEW_LINE> return x, y, name
helper function to extract xy data from a curve object
625941b260cbc95b062c62e9
def se_from_ui(p, lower, upper, method, confidence=0.95): <NEW_LINE> <INDENT> assert method in ['ratio', 'non-ratio'], "must specify whether to" "use the ratio method" <NEW_LINE> quantile = 1 - (1 - confidence) / 2 <NEW_LINE> if method == 'ratio': <NEW_LINE> <INDENT> n = np.log(upper) - np.log(lower) <NEW_LINE> d = 2 * stats.norm.ppf(quantile) <NEW_LINE> se = (np.exp(n / d) - 1) * p <NEW_LINE> <DEDENT> elif method == 'non-ratio': <NEW_LINE> <INDENT> se = np.max([upper - p, p - lower]) / stats.norm.ppf(quantile) <NEW_LINE> <DEDENT> return se
Calculates standard error from the uncertainty interval
625941b2004d5f362079a0d9
def walk_stack(f): <NEW_LINE> <INDENT> if f is None: <NEW_LINE> <INDENT> f = sys._getframe().f_back.f_back <NEW_LINE> <DEDENT> while f is not None: <NEW_LINE> <INDENT> yield f, f.f_lineno <NEW_LINE> f = f.f_back
Walk a stack yielding the frame and line number for each frame. This will follow f.f_back from the given frame. If no frame is given, the current stack is used. Usually used with StackSummary.extract.
625941b2d164cc6175782aed
def ensure_jupyterhub_package(prefix): <NEW_LINE> <INDENT> conda.ensure_pip_packages(prefix, [ 'jupyterhub==0.9.4', 'jupyterhub-dummyauthenticator==0.3.1', 'jupyterhub-systemdspawner==0.11', 'jupyterhub-firstuseauthenticator==0.11', 'jupyterhub-ldapauthenticator==1.2.2', 'oauthenticator==0.8.0', ]) <NEW_LINE> traefik.ensure_traefik_binary(prefix)
Install JupyterHub into our conda environment if needed. We install all python packages from PyPI as much as possible in the hub environment. A lot of spawners & authenticators do not have conda-forge packages, but do have pip packages. Keeping all python packages in the hub environment be installed with pip prevents accidental mixing of python and conda packages!
625941b29b70327d1c4e0b74
def setup_method(self, method): <NEW_LINE> <INDENT> sparkConf = create_spark_conf().setMaster("local[4]").setAppName("test model") <NEW_LINE> self.sc = get_spark_context(sparkConf) <NEW_LINE> init_engine()
setup any state tied to the execution of the given method in a class. setup_method is invoked for every test method of a class.
625941b282261d6c526ab243
def isValid(self): <NEW_LINE> <INDENT> errors = {} <NEW_LINE> self.Schema().validate(self, self.REQUEST, errors, 1, 1) <NEW_LINE> if errors: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
Check if the response is valid. Meaning: a response has been filled in.
625941b2f7d966606f6a9da9
def test_add_multiple_effects(self): <NEW_LINE> <INDENT> effect_1 = (EffectBuilder() .with_effect_name('spell') .build()) <NEW_LINE> effect_2 = (EffectBuilder() .with_effect_name('curse') .build()) <NEW_LINE> self.character.add_effect(effect_1) <NEW_LINE> self.character.add_effect(effect_2) <NEW_LINE> assert_that(self.character, has_effect(effect_1)) <NEW_LINE> assert_that(self.character, has_effect(effect_2))
It should be possible to add multiple effects of different type
625941b299cbb53fe6792988
def execute(self, db_name=None, sql='', close_conn=True): <NEW_LINE> <INDENT> execute_result = ReviewSet(full_sql=sql) <NEW_LINE> conn = self.get_connection(db_name=db_name) <NEW_LINE> cursor = conn.cursor() <NEW_LINE> split_reg = re.compile('^GO$', re.I | re.M) <NEW_LINE> sql = re.split(split_reg, sql, 0) <NEW_LINE> sql = filter(None, sql) <NEW_LINE> split_sql = [f"""use [{db_name}]"""] <NEW_LINE> for i in sql: <NEW_LINE> <INDENT> split_sql = split_sql + [i] <NEW_LINE> <DEDENT> rowid = 1 <NEW_LINE> for statement in split_sql: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cursor.execute(statement) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.warning(f"Mssql命令执行报错,语句:{sql}, 错误信息:{traceback.format_exc()}") <NEW_LINE> execute_result.error = str(e) <NEW_LINE> execute_result.rows.append(ReviewResult( id=rowid, errlevel=2, stagestatus='Execute Failed', errormessage=f'异常信息:{e}', sql=statement, affected_rows=0, execute_time=0, )) <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> execute_result.rows.append(ReviewResult( id=rowid, errlevel=0, stagestatus='Execute Successfully', errormessage='None', sql=statement, affected_rows=cursor.rowcount, execute_time=0, )) <NEW_LINE> <DEDENT> rowid += 1 <NEW_LINE> <DEDENT> if execute_result.error: <NEW_LINE> <INDENT> for statement in split_sql[rowid:]: <NEW_LINE> <INDENT> execute_result.rows.append(ReviewResult( id=rowid, errlevel=2, stagestatus='Execute Failed', errormessage=f'前序语句失败, 未执行', sql=statement, affected_rows=0, execute_time=0, )) <NEW_LINE> rowid += 1 <NEW_LINE> <DEDENT> cursor.rollback() <NEW_LINE> for row in execute_result.rows: <NEW_LINE> <INDENT> if row.stagestatus == 'Execute Successfully': <NEW_LINE> <INDENT> row.stagestatus += '\nRollback Successfully' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> cursor.commit() <NEW_LINE> <DEDENT> if close_conn: <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> return execute_result
执行sql语句 返回 Review set
625941b2d58c6744b4257a03
def get_ip_serialuuids(user_org, user_end=None, ips=None, status=None): <NEW_LINE> <INDENT> db_conn = PostgreSQLDB(CONFIG.PostgreSQLDB.host, CONFIG.PostgreSQLDB.port, CONFIG.PostgreSQLDB.user, CONFIG.PostgreSQLDB.pwd, CONFIG.PostgreSQLDB.dbname) <NEW_LINE> sqls = [] <NEW_LINE> sql = "select '['||WM_CONCAT(''''||serialnum||'''')||']' serialnum,'['||WM_CONCAT(''''||ip||'''')||']' ip from (select serialnum,host(ip) as ip,rank() over(partition by ip order by ip,id desc,status desc) as rn from t_ip_protect where user_org=%s" <NEW_LINE> sqls.append(sql) <NEW_LINE> sql = " and ip in ('" + ips.replace(',', "','") + "')" if ips else '' <NEW_LINE> sqls.append(sql) <NEW_LINE> sql = " and user_end in ('" + user_end + "')" if user_end else '' <NEW_LINE> sqls.append(sql) <NEW_LINE> sql = " and status=True" if status == 'open' else '' <NEW_LINE> sqls.append(sql) <NEW_LINE> sql = " and status=False" if status == 'close' else '' <NEW_LINE> sqls.append(sql) <NEW_LINE> sql = " ) a where rn=1" <NEW_LINE> sqls.append(sql) <NEW_LINE> sql_all = ''.join(sqls) <NEW_LINE> data = db_conn.dbcur.queryall_dict(sql_all, (user_org,))[0] if db_conn.dbcur.queryall_dict(sql_all, (user_org,)) else None <NEW_LINE> serialnum = data['serialnum'] <NEW_LINE> ip = data['ip'] <NEW_LINE> uuids_list = list(eval(serialnum)) if serialnum else [] <NEW_LINE> uuids_str = '"' + ','.join(uuids_list).replace(',', '","') + '"' if uuids_list else None <NEW_LINE> ips_list = list(eval(ip)) if ip else [] <NEW_LINE> ips_str = '"' + ','.join(ips_list).replace(',', '","') + '"' if ips_list else None <NEW_LINE> return uuids_list, uuids_str, ips_list, ips_str
:param user_org:一级用户ID :param user_end: 二级用户ID :param ips: 字符串,IP记录,多个IP记录之间用逗号隔开 :param status: IP状态,open--在用,close--已关闭 :return: T_IP表的serialuuids组成的列表以及对应的字符串,ip组成的列表以及对应的字符串 :返回示例:['a61068ae-782e-11e7-9eb8-000ec6c6d278', 'f97637c7-798c-11e7-a854-f45c899a7eaf'] "a61068ae-782e-11e7-9eb8-000ec6c6d278","f97637c7-798c-11e7-a854-f45c899a7eaf" ['10.1.1.2', '10.1.1.41'] "10.1.1.2","10.1.1.41"
625941b2f9cc0f698b1403a7
def make_history(self, session): <NEW_LINE> <INDENT> self.create_transaction_log_entry(session) <NEW_LINE> self.create_history_objects(session) <NEW_LINE> self.create_association_versions(session) <NEW_LINE> self.create_transaction_changes_entries(session) <NEW_LINE> self.create_transaction_meta_entries(session) <NEW_LINE> self.operations = OrderedDict() <NEW_LINE> self._committing = False
Create transaction, transaction changes records, history objects. :param session: SQLAlchemy session object
625941b2d268445f265b4c15
def normalize_env(env, keys, force=False): <NEW_LINE> <INDENT> normenv = {} <NEW_LINE> if env: <NEW_LINE> <INDENT> for k in list(env.keys()): <NEW_LINE> <INDENT> normenv[k] = copy.deepcopy(env[k]) <NEW_LINE> <DEDENT> for k in keys: <NEW_LINE> <INDENT> if k in os.environ and (force or not k in normenv): <NEW_LINE> <INDENT> normenv[k] = os.environ[k] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> sys32_dir = os.path.join(os.environ.get("SystemRoot", os.environ.get("windir", r"C:\Windows\system32")), "System32") <NEW_LINE> if sys32_dir not in normenv['PATH']: <NEW_LINE> <INDENT> normenv['PATH'] = normenv['PATH'] + os.pathsep + sys32_dir <NEW_LINE> <DEDENT> sys32_wbem_dir = os.path.join(sys32_dir, 'Wbem') <NEW_LINE> if sys32_wbem_dir not in normenv['PATH']: <NEW_LINE> <INDENT> normenv['PATH'] = normenv['PATH'] + os.pathsep + sys32_wbem_dir <NEW_LINE> <DEDENT> debug("PATH: %s"%normenv['PATH']) <NEW_LINE> return normenv
Given a dictionary representing a shell environment, add the variables from os.environ needed for the processing of .bat files; the keys are controlled by the keys argument. It also makes sure the environment values are correctly encoded. If force=True, then all of the key values that exist are copied into the returned dictionary. If force=false, values are only copied if the key does not already exist in the copied dictionary. Note: the environment is copied.
625941b2ff9c53063f47bf9f
def check_destination_path(self, path, mkdir_destination=None, ssh=None): <NEW_LINE> <INDENT> ssh = ssh if ssh else self.ssh <NEW_LINE> if path[0:1] == b'~' or path[0:1] == '~': <NEW_LINE> <INDENT> _, stdout, _ = ssh.exec_command("pwd") <NEW_LINE> path = join(stdout.readlines()[0].strip(), path.replace('~/', '')) <NEW_LINE> <DEDENT> with SFTPClient.from_transport(ssh.get_transport()) as sftp: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sftp.chdir(path) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> if mkdir_destination or self.mkdir_destination: <NEW_LINE> <INDENT> self.mkdir_p(sftp, path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise FileNotFoundError('{} not found on remote host.'.format(path)) <NEW_LINE> <DEDENT> <DEDENT> return path <NEW_LINE> <DEDENT> return None
Returns the destination_dir or raises an Exception if destination_dir does not exist on the remote host. @param path: path on remote host @type path: byte or str @param mkdir_destination: if True attempts to create the remote folder. @type mkdir_destination: boolean @raise FileNotFoundError: if path does not exist and mkdir_destination is False @return path
625941b256ac1b37e6263f83
def _get_state(ipaclient, idnsname): <NEW_LINE> <INDENT> current = set() <NEW_LINE> result = ipaclient.get_dns_record(idnsname) <NEW_LINE> for record in result.get('srvrecord', []): <NEW_LINE> <INDENT> _w, _p, port, host = record.split() <NEW_LINE> current.add('{}:{}'.format(host.rstrip('.'), port)) <NEW_LINE> <DEDENT> return current
Return set of currently defined SRV records.
625941b216aa5153ce36221b
def cast(*args): <NEW_LINE> <INDENT> return _itkCastImageFilterPython.itkCastImageFilterID2IUL2_cast(*args)
cast(itkLightObject obj) -> itkCastImageFilterID2IUL2
625941b2462c4b4f79d1d470
def testSplitNamespacesPresent(self): <NEW_LINE> <INDENT> TestEntity().put() <NEW_LINE> for i in range(5): <NEW_LINE> <INDENT> namespace_manager.set_namespace(str(i)) <NEW_LINE> TestEntity().put() <NEW_LINE> <DEDENT> namespace_manager.set_namespace(None) <NEW_LINE> readers = input_readers.NamespaceInputReader.split_input(self.mapper_spec) <NEW_LINE> self.assertEquals(1, len(readers)) <NEW_LINE> r = readers[0] <NEW_LINE> self.assertEquals('__namespace__', r._entity_kind) <NEW_LINE> self.assertEquals(None, r._key_ranges[0].key_start) <NEW_LINE> self.assertEquals(None, r._key_ranges[0].key_end) <NEW_LINE> self.assertEquals(['', '0', '1', '2', '3', '4'], list(r))
Test reader with multiple namespaces present.
625941b266673b3332b91e37
def stop(self): <NEW_LINE> <INDENT> self.times.append(time.time() - self.tik) <NEW_LINE> return self.times[-1]
停止计时器并将时间记录在列表中
625941b24d74a7450ccd3f66
def is_numeric(self): <NEW_LINE> <INDENT> return javabridge.call(self.jobject, "isNumeric", "()Z")
Returns whether the attribute is a numeric one (date or numeric). :return: whether numeric attribute :rtype: bool
625941b2925a0f43d2549c13
@api.route('/issues/search/<issue_category>') <NEW_LINE> def get_category_from_search(issue_category): <NEW_LINE> <INDENT> category_list = ['contactready', 'needscontact', 'needsdiagnosis', 'sitewait'] <NEW_LINE> params = request.args.copy() <NEW_LINE> if issue_category in category_list: <NEW_LINE> <INDENT> query_string = 'label:{0}'.format('status-' + issue_category) <NEW_LINE> <DEDENT> elif issue_category == 'new': <NEW_LINE> <INDENT> query_string = ' '.join(['-label:%s' % cat for cat in category_list]) <NEW_LINE> query_string += ' state:open ' <NEW_LINE> <DEDENT> return get_search_results(query_string, params)
XHR endpoint to get issues categories from GitHub's Search API. It's also possible to use /issues/category/<issue_category> for a category that maps to a label. This uses the Issues API, which is less costly than the Search API.
625941b2de87d2750b85fb2e
def test_revise_section_with_rules_revise_with_rules(self): <NEW_LINE> <INDENT> pass
Test case for revise_section_with_rules_revise_with_rules Update an Existing Section with Rules # noqa: E501
625941b299fddb7c1c9de13c
@method(process_properties, 'export') <NEW_LINE> def _(row: list) -> list: <NEW_LINE> <INDENT> properties = get_properties(shlex.split(row[4])) <NEW_LINE> return [ *row[:2], *export_split(row[2]), row[3], properties['rw'], properties['root'], properties['access'] ]
Extracts rw, root and access properties for an export row :param row: :return:
625941b291f36d47f21ac296
def __init__(self, *args): <NEW_LINE> <INDENT> _vnl_vector_refPython.vnl_vector_ref_vcl_complexD_swiginit(self,_vnl_vector_refPython.new_vnl_vector_ref_vcl_complexD(*args))
__init__(self, unsigned int n, vcl_complexD space) -> vnl_vector_ref_vcl_complexD __init__(self, vnl_vector_ref_vcl_complexD v) -> vnl_vector_ref_vcl_complexD
625941b2bf627c535bc12f77
@parser_node_rule <NEW_LINE> def p_NTYPE_GT_EXPR_node(psr_val): <NEW_LINE> <INDENT> pass
node : NODE NTYPE_GT_EXPR TYPE_ATTR OP0_ATTR OP1_ATTR attr_list
625941b2e1aae11d1e749a58
def generate_probability_key(prob_table, length, encoding, message): <NEW_LINE> <INDENT> pass
Given a probability and encoding along with a message generate a random key that follows the bias provided TODO: Implement when I have time this weekend
625941b2cad5886f8bd26d83
def get(self, request, provider_uuid, identity_uuid, machine_id): <NEW_LINE> <INDENT> core_machine = ProviderMachine.objects.filter( provider__uuid=provider_uuid, identifier=machine_id) <NEW_LINE> if not core_machine: <NEW_LINE> <INDENT> return failure_response( status.HTTP_400_BAD_REQUEST, "Machine id %s does not exist" % machine_id) <NEW_LINE> <DEDENT> core_machine = core_machine.get() <NEW_LINE> licenses = core_machine.licenses.all() <NEW_LINE> serialized_data = LicenseSerializer(licenses, many=True).data <NEW_LINE> return Response(serialized_data, status=status.HTTP_200_OK)
Lookup the machine information (Lookup using the given provider/identity) Update on server (If applicable)
625941b2ab23a570cc24ff28
def test_RunPrivmsgOfOneWord(self): <NEW_LINE> <INDENT> if 'privmsg' in self.module.listen: <NEW_LINE> <INDENT> self.mock_privmsg_data['msg'] = 'word' <NEW_LINE> runnable = self.run_module(self.module, self.mock_privmsg_data) <NEW_LINE> if not runnable[0]: <NEW_LINE> <INDENT> self.assertTrue(False,runnable[1])
Run test [listen: privmsg, words: one]
625941b2507cdc57c6306a73
@main.route('/', methods=['GET', 'POST']) <NEW_LINE> def index(): <NEW_LINE> <INDENT> global room_dict <NEW_LINE> global i <NEW_LINE> form = LoginForm() <NEW_LINE> if form.validate_on_submit(): <NEW_LINE> <INDENT> session['name'] = form.name.data <NEW_LINE> session['room'] = i <NEW_LINE> if str(i) not in room_dict.keys(): <NEW_LINE> <INDENT> room_dict[str(i)] = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> room_dict[str(i)] += 1 <NEW_LINE> if room_dict[str(i)] == 2: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> <DEDENT> return redirect(url_for('.chat')) <NEW_LINE> <DEDENT> elif request.method == 'GET': <NEW_LINE> <INDENT> form.name.data = session.get('name', '') <NEW_LINE> form.room.data = session.get('room', '') <NEW_LINE> <DEDENT> return render_template('index.html', form=form)
Login form to enter a room.
625941b2fb3f5b602dac3438
def Reposition(self, slice_data): <NEW_LINE> <INDENT> ren = slice_data.renderer <NEW_LINE> size = ren.GetSize() <NEW_LINE> ren.ResetCamera() <NEW_LINE> ren.GetActiveCamera().Zoom(1.0) <NEW_LINE> self.interactor.Render()
Based on code of method Zoom in the vtkInteractorStyleRubberBandZoom, the of vtk 5.4.3
625941b25e10d32532c5ecd1
def getSkinWeightsMap(*skins): <NEW_LINE> <INDENT> skinWeights = {} <NEW_LINE> for skin in skins: <NEW_LINE> <INDENT> weights = getSkinWeights(skin) <NEW_LINE> skinWeights[skin.nodeName()] = weights <NEW_LINE> <DEDENT> return skinWeights
Return a dict containing weights for multiple skin clusters Args: *skins (PyNode): One or more skin cluster nodes Returns: A dict of {skinName: weights} for all the skin clusters
625941b2462c4b4f79d1d471
def _handle_password_hashes(self, obj: ListenerUserAddModifyObject) -> Dict[str, Any]: <NEW_LINE> <INDENT> if ( self.school_authority.plugin_configs[self.plugin_name].get("sync_password_hashes", False) and obj.user_passwords ): <NEW_LINE> <INDENT> hashes = obj.user_passwords.dict_krb5_key_base64_encoded() <NEW_LINE> return { KELVIN_API_PASSWORD_HASHES_ATTRIBUTE: PasswordsHashes( user_password=hashes["userPassword"], samba_nt_password=hashes["sambaNTPassword"], krb_5_key=hashes["krb5Key"], krb5_key_version_number=hashes["krb5KeyVersionNumber"], samba_pwd_last_set=hashes["sambaPwdLastSet"], ) } <NEW_LINE> <DEDENT> return {}
If password hashed should be sent, return them here.
625941b215baa723493c3d13
def add(self, *words): <NEW_LINE> <INDENT> words = get_words(words) <NEW_LINE> with open(self._wordfile, 'r', encoding=self._encoding) as file: <NEW_LINE> <INDENT> lines = file.readlines() <NEW_LINE> <DEDENT> lines = set(i.strip() for i in lines) | words <NEW_LINE> if self._case: <NEW_LINE> <INDENT> self._words |= words <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._words |= set(i.lower() for i in words) <NEW_LINE> <DEDENT> with open(self._wordfile, 'w', encoding=self._encoding) as file: <NEW_LINE> <INDENT> file.write('\n'.join(sorted(lines))) <NEW_LINE> <DEDENT> if _AUTO_ON: <NEW_LINE> <INDENT> collect() <NEW_LINE> for solver in Solver.instances: <NEW_LINE> <INDENT> if self is solver.checker: <NEW_LINE> <INDENT> solver.refresh()
Adds one or more words to the Checker and it's word file.
625941b2091ae35668666d08
def __call__(self, service_name, version=None): <NEW_LINE> <INDENT> kw = {} <NEW_LINE> if version: <NEW_LINE> <INDENT> kw['version'] = version <NEW_LINE> <DEDENT> return self._connection.get(api_path('service', service_name, **kw))
Load the configuration for a given service Required: service_name: Name of the service to get the info Optional: version : Specific version of the service to get Throws a Client exception if the service does not exist.
625941b28a43f66fc4b53e13