code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def heatEquationMatrix(n): <NEW_LINE> <INDENT> N = n*n <NEW_LINE> A = np.diag([1]*(N-n), -n) + np.diag([1]*(N-n), n) <NEW_LINE> sub = -4*np.eye(n) + np.diag([1]*(n-1), 1) + np.diag([1]*(n-1), -1) <NEW_LINE> for i in range(0, n): <NEW_LINE> <INDENT> A[i*n:(i+1)*n,i*n:(i+1)*n] = sub[:,:] <NEW_LINE> <DEDENT> return A | Returns a tridiagonal n^2*n^2 matrix A to solve the heat equation
in a discrete way by solving the linear system Ax = b, where b is a n^2
vector such as b[n*x+y] is the heat flux density in the point (x,y)
and h is the distance on both axis between 2 consecutive points | 625941b36aa9bd52df036b5d |
def broken_seqs(ol,break_points): <NEW_LINE> <INDENT> bps = list(break_points) <NEW_LINE> length = ol.__len__() <NEW_LINE> rgs = rangize(bps,length) <NEW_LINE> rslt = [] <NEW_LINE> for i in range(0,rgs.__len__()): <NEW_LINE> <INDENT> si,ei = rgs[i] <NEW_LINE> sec = ol[si:ei] <NEW_LINE> rslt.append(sec) <NEW_LINE> <DEDENT> return(rslt) | ol = initRange(0,20,1)
ol
break_points = [1,6,14,9]
secs = broken_seqs(ol,break_points)
forEach(secs,print) | 625941b30383005118ecf3a0 |
def test_molecular_density(self): <NEW_LINE> <INDENT> self.assertAlmostEqual( molecular_density(200), 3.669449208173649e+19, places=24) <NEW_LINE> self.assertAlmostEqual( molecular_density(300), 2.4462994721157665e+19, places=24) <NEW_LINE> self.assertAlmostEqual( molecular_density(400), 1.8347246040868246e+19, places=24) | Tests
:func:`colour.phenomenons.rayleigh.molecular_density`
definition. | 625941b323849d37ff7b2e4e |
def r_hash(self, keys): <NEW_LINE> <INDENT> with self.r.pipeline(transaction=False) as pipe: <NEW_LINE> <INDENT> for key in keys: <NEW_LINE> <INDENT> pipe.hgetall(key) <NEW_LINE> <DEDENT> values = pipe.execute() <NEW_LINE> <DEDENT> res = zip(keys, values) <NEW_LINE> return res | get redis hash values
:param keys:
:return: | 625941b3377c676e91271f6c |
def send(combination, do_press=True, do_release=True): <NEW_LINE> <INDENT> for keys in canonicalize(combination): <NEW_LINE> <INDENT> if do_press: <NEW_LINE> <INDENT> for key in keys: <NEW_LINE> <INDENT> _os_keyboard.press(to_scan_code(key)) <NEW_LINE> <DEDENT> <DEDENT> if do_release: <NEW_LINE> <INDENT> for key in reversed(keys): <NEW_LINE> <INDENT> _os_keyboard.release(to_scan_code(key)) | Sends OS events that perform the given hotkey combination.
- `combination` can be either a scan code (e.g. 57 for space), single key
(e.g. 'space') or multi-key, multi-step combination (e.g. 'alt+F4, enter').
- `do_press` if true then press events are sent. Defaults to True.
- `do_release` if true then release events are sent. Defaults to True.
send(57)
send('ctrl+alt+del')
send('alt+F4, enter')
send('shift+s')
Note: keys are released in the opposite order they were pressed. | 625941b30a50d4780f666c4a |
def add_structure(self, struct, path=None): <NEW_LINE> <INDENT> if path == None or path == '': <NEW_LINE> <INDENT> self._structures[struct.id] = struct <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path = path.split('/') <NEW_LINE> if path[0] in self._packages: <NEW_LINE> <INDENT> self._packages[path[0]].add_structure(struct, '/'.join(path[1:])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sub_l10n_pack = Package() <NEW_LINE> sub_l10n_pack.id = path[0] <NEW_LINE> self.add_package(sub_l10n_pack) <NEW_LINE> sub_l10n_pack.add_structure(struct, '/'.join(path[1:])) | Adds an object to L10nPackage.
Optional parameter path allows to declare place
inside the package where the object should be added.
For example l10npack.add_structure(l10nstruct, 'pkg1/pkg2') is similar to
l10npack.get_package('pkg1').get_package('pkg2').add_structure(l10nstruct)
with the difference that it will create missing sub packages. | 625941b326068e7796caea94 |
@singledispatch <NEW_LINE> def cycle(iterable): <NEW_LINE> <INDENT> return itertools.cycle(iterable) | Make an iterator returning elements from the iterable and saving a copy of each.
When the iterable is exhausted, return elements from the saved copy. Repeats indefinitely.
This function uses single dispatch.
.. seealso:: :func:`itertools.cycle` | 625941b350485f2cf553cb54 |
def render( self ): <NEW_LINE> <INDENT> raise NotImplementedError( "Not implemented method" ); | Interface for render function
@returns output | 625941b3de87d2750b85fb49 |
def line0_p(x,p): <NEW_LINE> <INDENT> return p*x | Straight line through origin: a*x
Parameters
----------
x : float or array_like of floats
independent variable
p : iterable of floats
`p[0]` is a
Returns
-------
float
function value(s) | 625941b330bbd722463cbb7f |
def plot_control_loop_output(output_generator: Generator, x_lim: List[int] = None, plot_errors: bool = True) -> plt.Figure: <NEW_LINE> <INDENT> system_states = [] <NEW_LINE> velocities = [] <NEW_LINE> error_signals = defaultdict(list) <NEW_LINE> controller_outputs = [] <NEW_LINE> time_step = [] <NEW_LINE> for output in output_generator: <NEW_LINE> <INDENT> step, system_state, velocity, error_signal, integral_error, controller_output = output <NEW_LINE> time_step.append(step) <NEW_LINE> system_states.append(system_state) <NEW_LINE> velocities.append(velocity) <NEW_LINE> for key, value in error_signal.__dict__.items(): <NEW_LINE> <INDENT> if value is not None and key != 'integral': <NEW_LINE> <INDENT> error_signals[key].append(value) <NEW_LINE> <DEDENT> <DEDENT> error_signals['integral'].append(integral_error) <NEW_LINE> controller_outputs.append(controller_output) <NEW_LINE> <DEDENT> fig, ax = setup_plt_figure(figsize=(25, 6), xlabel='Time [s]', ylabel='State and Controls') <NEW_LINE> linewidth = 1.5 <NEW_LINE> ax.plot(time_step, system_states, color='green', linewidth=linewidth, alpha=0.8, label='Position') <NEW_LINE> ax.plot(time_step, velocities, color='cyan', linewidth=linewidth, alpha=0.8, label='Velocity') <NEW_LINE> ax.plot(time_step, controller_outputs, color='purple', linewidth=linewidth, alpha=0.8, label='Controller Output') <NEW_LINE> if plot_errors: <NEW_LINE> <INDENT> ax.plot(time_step, error_signals['proportional'], color='red', linewidth=linewidth, alpha=0.8, label='Proportional Error') <NEW_LINE> ax.plot(time_step, error_signals['differential'], color='orange', linewidth=linewidth, alpha=0.8, label='Differential Error') <NEW_LINE> ax.plot(time_step, error_signals['integral'], color='magenta', linewidth=linewidth, alpha=0.8, label='Integral Error') <NEW_LINE> <DEDENT> if x_lim is not None: <NEW_LINE> <INDENT> ax.set_xlim(x_lim) <NEW_LINE> <DEDENT> ax.grid() <NEW_LINE> ax.legend(loc='upper right') <NEW_LINE> return fig | Plot the output of a closed control loop run. | 625941b33c8af77a43ae3562 |
def create_replace(self, scheduled_date, warehouse, product, qty, uom): <NEW_LINE> <INDENT> self.ensure_one() <NEW_LINE> self._ensure_can_be_replaced() <NEW_LINE> moves_before = self.delivery_move_ids <NEW_LINE> self._action_launch_stock_rule(scheduled_date, warehouse, product, qty, uom) <NEW_LINE> new_move = self.delivery_move_ids - moves_before <NEW_LINE> if new_move: <NEW_LINE> <INDENT> self.reception_move_id.move_dest_ids = [(4, new_move.id)] <NEW_LINE> self.message_post( body=_( 'Replacement: ' 'Move <a href="#" data-oe-model="stock.move" ' 'data-oe-id="%d">%s</a> (Picking <a href="#" ' 'data-oe-model="stock.picking" data-oe-id="%d">%s</a>) ' 'has been created.' ) % (new_move.id, new_move.name_get()[0][1], new_move.picking_id.id, new_move.picking_id.name) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.message_post( body=_( 'Replacement:<br/>' 'Product <a href="#" data-oe-model="product.product" ' 'data-oe-id="%d">%s</a><br/>' 'Quantity %f %s<br/>' 'This replacement did not create a new move, but one of ' 'the previously created moves was updated with this data.' ) % (product.id, product.display_name, qty, uom.name) ) <NEW_LINE> <DEDENT> if self.state != 'waiting_replacement': <NEW_LINE> <INDENT> self.state = 'waiting_replacement' | Intended to be invoked by the delivery wizard | 625941b360cbc95b062c6305 |
def fcall(self, children: List[Union[str, Optional[PyLautAtom]]]) -> Change: <NEW_LINE> <INDENT> fname = children[0] <NEW_LINE> args = [] <NEW_LINE> for c in children[1:]: <NEW_LINE> <INDENT> args.append(c) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.funcs[fname](*args) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return Change() | Looks up a function name in the function library,
a dictionary passed to the PyLautLang object at init time.
If the function exists, call it. If not, return an empty
Change.
:param list children: A function name plus the arguments to it.
:returns: A Change object. | 625941b345492302aab5e07b |
def test_feedback_can_set_reference(self) -> None: <NEW_LINE> <INDENT> email = '[email protected]' <NEW_LINE> feedback = Feedback(email=email, reviewer_id=self.reviewer_in_db.id, review_status_id=self.review_status_in_db.id) <NEW_LINE> db.session.add(feedback) <NEW_LINE> db.session.commit() <NEW_LINE> feedback_in_db = db.session.query(Feedback) .filter_by(email=email).first() <NEW_LINE> self.assertEqual(feedback_in_db.reviewer_id, self.reviewer_in_db.id, 'fail to refer a reviewer for a feedback') <NEW_LINE> self.assertEqual(feedback_in_db.review_status_id, self.review_status_in_db.id, 'fail to refer a admin status for a feedback') | Test of setting reviewer for a feedback | 625941b32eb69b55b151c665 |
def genPetscTests(self,root,dirs,files,dataDict): <NEW_LINE> <INDENT> debug=False <NEW_LINE> dataDict[root]={} <NEW_LINE> for exfile in files: <NEW_LINE> <INDENT> if exfile.startswith("."): continue <NEW_LINE> if exfile.startswith("#"): continue <NEW_LINE> if exfile.endswith("~"): continue <NEW_LINE> ext=os.path.splitext(exfile)[-1].lstrip('.') <NEW_LINE> if ext not in LANGS: continue <NEW_LINE> fullex=os.path.join(root,exfile) <NEW_LINE> if self.verbose: print(' --> '+fullex) <NEW_LINE> dataDict[root].update(testparse.parseTestFile(fullex,0)) <NEW_LINE> if exfile in dataDict[root]: <NEW_LINE> <INDENT> self.genScriptsAndInfo(exfile,root,dataDict[root][exfile]) <NEW_LINE> <DEDENT> <DEDENT> return | Go through and parse the source files in the directory to generate
the examples based on the metadata contained in the source files | 625941b394891a1f4081b864 |
def unstack(self): <NEW_LINE> <INDENT> if self.stack_pointer == 0: <NEW_LINE> <INDENT> raise Underflow() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.stack_pointer -= 1 <NEW_LINE> return self.remove_node(self.stack_pointer) | Return the top item and delete it from the stack | 625941b397e22403b379cd54 |
def transfer_var(self, value, row, field): <NEW_LINE> <INDENT> j = self.current_row + row <NEW_LINE> line = self.data[j] <NEW_LINE> sub = _SubHelper() <NEW_LINE> sub.set(value, field) <NEW_LINE> newline = re.sub(self.reg, sub.replace, line) <NEW_LINE> self.data[j] = newline | Changes a single variable in the template relative to the
current anchor.
Args
----
value : float, integer, bool, string
New value to set at the location.
row : integer
Number of lines offset from anchor line (0 is anchor line).
This can be negative.
field : integer
Which word in line to replace, as denoted by delimiter(s) | 625941b385dfad0860c3ac14 |
def awaitable(fn, *args, **kwargs): <NEW_LINE> <INDENT> future = POOL.submit(fn, *args, **kwargs) <NEW_LINE> return asyncio.wrap_future(future) | Turn sync method to async | 625941b338b623060ff0abb2 |
def get(self, path, as_json=False): <NEW_LINE> <INDENT> bucket, path = self._parse_uri(path) <NEW_LINE> return self._request('%s/o/%s' % (bucket, urllib.parse.quote(path, '')), {'alt': 'media'}, as_json=as_json) | Get an object from GCS. | 625941b363b5f9789fde6ea1 |
def sync_wallet(self, fast=True): <NEW_LINE> <INDENT> if self.synced: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if fast: <NEW_LINE> <INDENT> self.sync_wallet_fast() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sync_addresses() <NEW_LINE> self.sync_unspent() <NEW_LINE> <DEDENT> self.last_seen_txid = next( (tx['txid'] for tx in self.bci._yield_transactions() if 'txid' in tx), None) <NEW_LINE> if isinstance(self.bci, BitcoinCoreNoHistoryInterface): <NEW_LINE> <INDENT> self.bci.set_wallet_no_history(self.wallet) <NEW_LINE> <DEDENT> return self.synced | Syncs wallet; note that if slow sync
requires multiple rounds this must be called
until self.synced is True.
Before starting the event loop, we cache
the current most recent transactions as
reported by the blockchain interface, since
we are interested in deltas. | 625941b367a9b606de4a7c79 |
def addMusic(item): <NEW_LINE> <INDENT> artist = item.find(attrs={'class':'s_name'}).a['title'].encode('utf-8') <NEW_LINE> title = item.find(attrs={'class':'m_name'}).a['title'].encode('utf-8') <NEW_LINE> mid = item.find(attrs={'class':'number'}).input['mid'].encode('utf-8') <NEW_LINE> iconimage = '' <NEW_LINE> addLink(title,artist,mid,'PlayMusic()',iconimage) | for mingxing and album | 625941b30383005118ecf3a1 |
def select(self): <NEW_LINE> <INDENT> warnings.filterwarnings("ignore", category=DeprecationWarning) <NEW_LINE> result_model = self.base_model(self.n_constant) <NEW_LINE> max_value = float('inf') <NEW_LINE> for n_components in range(self.min_n_components, self.max_n_components+1): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> model = GaussianHMM(n_components=n_components, n_iter=1000).fit(self.X, self.lengths) <NEW_LINE> score = model.score(self.X, self.lengths) <NEW_LINE> p = n_components * n_components + 2 * len(self.X[0]) * n_components - 1 <NEW_LINE> N = len(self.X) <NEW_LINE> current_value = -2 * score + p * np.log(N) <NEW_LINE> if current_value < max_value: <NEW_LINE> <INDENT> max_value = current_value <NEW_LINE> result_model = model <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return result_model | select the best model for self.this_word based on
BIC score for n between self.min_n_components and self.max_n_components
:return: GaussianHMM object | 625941b39b70327d1c4e0b90 |
def set_NDV(self, ndv: int) -> bool: <NEW_LINE> <INDENT> self.NDV = ndv <NEW_LINE> return True | set nodata value | 625941b3ec188e330fd5a564 |
def show_diffBragg_state(D, debug_pixel_panelfastslow): <NEW_LINE> <INDENT> D.show_params() <NEW_LINE> MAIN_LOGGER.info("internal spot scale=%f" % D.spot_scale) <NEW_LINE> D.raw_pixels*=0 <NEW_LINE> p, f, s = debug_pixel_panelfastslow <NEW_LINE> D.printout_pixel_fastslow = f, s <NEW_LINE> D.add_diffBragg_spots((p, f, s)) <NEW_LINE> D.raw_pixels*=0 | D, diffBragg instance
debug_pixel_panelfastslow, 3-tuple of ints, panelId, fast coord, slow coord | 625941b350812a4eaa59c0e3 |
def main(): <NEW_LINE> <INDENT> logging.basicConfig(level=logging.INFO) <NEW_LINE> args = parse_args() <NEW_LINE> instance = get_instance(args.instance_id) <NEW_LINE> with ec2window.run_ec2_instance(instance): <NEW_LINE> <INDENT> LOG.info('hello world!') | Start and stop an instance. | 625941b3004d5f362079a0f4 |
def dispatch(method, headers, url, payload): <NEW_LINE> <INDENT> method, headers, filename, param_dict = _preprocess(method, headers, url) <NEW_LINE> gcs_stub = cloudstorage_stub.CloudStorageStub( apiproxy_stub_map.apiproxy.GetStub('blobstore').storage) <NEW_LINE> with GCS_STUB_LOCK: <NEW_LINE> <INDENT> if method == 'POST': <NEW_LINE> <INDENT> return _handle_post(gcs_stub, filename, headers) <NEW_LINE> <DEDENT> elif method == 'PUT': <NEW_LINE> <INDENT> return _handle_put(gcs_stub, filename, param_dict, headers, payload) <NEW_LINE> <DEDENT> elif method == 'GET': <NEW_LINE> <INDENT> return _handle_get(gcs_stub, filename, param_dict, headers) <NEW_LINE> <DEDENT> elif method == 'HEAD': <NEW_LINE> <INDENT> return _handle_head(gcs_stub, filename) <NEW_LINE> <DEDENT> elif method == 'DELETE': <NEW_LINE> <INDENT> return _handle_delete(gcs_stub, filename) <NEW_LINE> <DEDENT> raise ValueError('Unrecognized request method %r.' % method, http.client.METHOD_NOT_ALLOWED) | Dispatches incoming request and returns response.
In dev appserver GCS requests are forwarded to this method via the /_ah/gcs
endpoint. In unittest environment, this method is called instead of urlfetch.
See https://developers.google.com/storage/docs/xml-api-overview for the
exepected format for the request.
Args:
method: A string represneting the HTTP request method.
headers: A dict mapping HTTP header names to values.
url: A string representing the request URL in the form of
http://<host>/_ah/gcs/<bucket>/<object>.
payload: A string containing the payload for the request.
Returns:
A _FakeUrlFetchResult containing the HTTP status code, headers, and body of
the response.
Raises:
ValueError: invalid request method. | 625941b3e5267d203edcda5e |
def _create_voucher_from_record(self, cursor, uid, record, statement, line_ids, context=None): <NEW_LINE> <INDENT> context.update({'move_line_ids': line_ids}) <NEW_LINE> voucher_obj = self.pool.get('account.voucher') <NEW_LINE> move_line_obj = self.pool.get('account.move.line') <NEW_LINE> voucher_line_obj = self.pool.get('account.voucher.line') <NEW_LINE> line = move_line_obj.browse(cursor, uid, line_ids[0]) <NEW_LINE> partner_id = line.partner_id and line.partner_id.id or False <NEW_LINE> if not partner_id: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> move_id = line.move_id.id <NEW_LINE> result = voucher_obj.onchange_partner_id(cursor, uid, [], partner_id, statement.journal_id.id, abs(record['amount']), statement.currency.id, 'receipt', statement.date, context=context) <NEW_LINE> voucher_res = {'type': 'receipt', 'name': record['reference'], 'partner_id': partner_id, 'journal_id': statement.journal_id.id, 'account_id': result.get('account_id', statement.journal_id.default_credit_account_id.id), 'company_id': statement.company_id.id, 'currency_id': statement.currency.id, 'date': record['date'] or time.strftime('%Y-%m-%d'), 'amount': abs(record['amount']), 'period_id': statement.period_id.id } <NEW_LINE> voucher_id = voucher_obj.create(cursor, uid, voucher_res, context=context) <NEW_LINE> voucher_line_dict = False <NEW_LINE> if result['value']['line_cr_ids']: <NEW_LINE> <INDENT> for line_dict in result['value']['line_cr_ids']: <NEW_LINE> <INDENT> move_line = move_line_obj.browse(cursor, uid, line_dict['move_line_id'], context) <NEW_LINE> if move_id == move_line.move_id.id: <NEW_LINE> <INDENT> voucher_line_dict = line_dict <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if voucher_line_dict: <NEW_LINE> <INDENT> voucher_line_dict.update({'voucher_id': voucher_id}) <NEW_LINE> voucher_line_obj.create(cursor, uid, voucher_line_dict, context=context) <NEW_LINE> <DEDENT> return voucher_id | Create a voucher with voucher line | 625941b3aad79263cf3907f7 |
def __init__(self): <NEW_LINE> <INDENT> self.Type = None <NEW_LINE> self.StartTime = None <NEW_LINE> self.EndTime = None | :param Type: 请求类型 1:人群特征洞察统计 2:购车意向预测统计
:type Type: int
:param StartTime: 开始时间戳(毫秒)
:type StartTime: int
:param EndTime: 结束时间戳(毫秒)
:type EndTime: int | 625941b391af0d3eaac9b7cf |
def get_sigmoid_data(self, index_offset=150, max_fnh3=1.): <NEW_LINE> <INDENT> if max_fnh3 > 1. or max_fnh3 < 0.: <NEW_LINE> <INDENT> raise ValueError("max_fnh3 must be between 0 and 1") <NEW_LINE> <DEDENT> if self.Tc_indices is None: <NEW_LINE> <INDENT> self.get_temp_changes() <NEW_LINE> <DEDENT> times = self.log_data['Time'].values / 60. <NEW_LINE> Tc_indices = self.Tc_indices.indices <NEW_LINE> Tc_indices2 = self.Tc_indices.indices - index_offset <NEW_LINE> Tc_ind_mspec = np.searchsorted(self.MS_fit_times, times[Tc_indices]) <NEW_LINE> Tc_ind_mspec2 = np.searchsorted(self.MS_fit_times, times[Tc_indices2]) <NEW_LINE> av_eq_mspec = np.row_stack([np.mean(self.MS_gasfracs[Tci2:Tci, :], axis=0) for Tci, Tci2 in zip(Tc_ind_mspec, Tc_ind_mspec2)]) <NEW_LINE> if self.CRD: <NEW_LINE> <INDENT> T = self.log_data['CRD'].values <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> T = self.log_data['Aux'].values <NEW_LINE> <DEDENT> av_eq_T = np.array([np.mean(T[Tci2:Tci]) for Tci, Tci2 in zip(Tc_indices, Tc_indices2)]) <NEW_LINE> av_eq_mspec_std = np.row_stack([np.mean(self.MS_gasfracs[Tci2:Tci, :], axis=0) for Tci, Tci2 in zip(Tc_ind_mspec, Tc_ind_mspec2)]) <NEW_LINE> av_eq_mspec[av_eq_mspec < 0] = 0 <NEW_LINE> conv = (max_fnh3 - av_eq_mspec_std[:, 2]) / (max_fnh3 * (1 + av_eq_mspec_std[:, 2])) <NEW_LINE> conv_unc = conv * av_eq_mspec_std[:, 2] / av_eq_mspec[:, 2] <NEW_LINE> conv2 = 2 * av_eq_mspec[:, 1] / (max_fnh3 * (1 - 2 * av_eq_mspec[:, 1])) <NEW_LINE> conv2_unc = conv2 * 2 * av_eq_mspec_std[:, 1] / av_eq_mspec[:, 1] <NEW_LINE> self.av_eq_T = av_eq_T <NEW_LINE> self.conv = conv <NEW_LINE> self.conv_std = conv_unc <NEW_LINE> self.conv2 = conv2 <NEW_LINE> self.conv2_unc = conv2_unc | Return sigmoid data for all changes in temperature
Args:
index_offset: number of points to take averages over
max_fnh3: maximum fraction of NH3 (for experiments where more than
one input gas). | 625941b363b5f9789fde6ea2 |
def label_preprocessing(self): <NEW_LINE> <INDENT> centered_continuous_label_dict = {key: [] for key in self.emotion_dimension} <NEW_LINE> for emotion in self.emotion_dimension: <NEW_LINE> <INDENT> continuous_labels = self.continuous_label[emotion] <NEW_LINE> for continuous_label in continuous_labels: <NEW_LINE> <INDENT> centered_continuous_label = concordance_correlation_coefficient_centering(continuous_label) <NEW_LINE> centered_continuous_label_dict[emotion].append(np.mean(centered_continuous_label, axis=0)) <NEW_LINE> <DEDENT> <DEDENT> continuous_label_to_csv( self.root_directory, self.output_folder, centered_continuous_label_dict, self.dataset_info) | Carry out the label preprocessing. Here, since multiple raters are available, therefore
concordance_correlation_coefficient_centering has to be performed. | 625941b382261d6c526ab25f |
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, FilterKeys): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ | Returns true if both objects are equal | 625941b38a349b6b435e7f37 |
def _trace_global(self, frame, event, arg): <NEW_LINE> <INDENT> if self._stop is True: <NEW_LINE> <INDENT> raise KeyboardInterrupt <NEW_LINE> <DEDENT> if self._end is True: <NEW_LINE> <INDENT> sys.settrace(None) <NEW_LINE> return self._trace_off <NEW_LINE> <DEDENT> filename = inspect.getsourcefile(frame) or inspect.getfile(frame) <NEW_LINE> lineno = frame.f_lineno <NEW_LINE> if self._check_files(filename, self._block_files): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if frame.f_back.f_trace is None and frame != self._bottom_frame: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif (frame.f_back.f_trace) == (self._trace_stepout): <NEW_LINE> <INDENT> return self._trace_stepout <NEW_LINE> <DEDENT> filename = self._abs_filename(filename) <NEW_LINE> bps = self.bpoints.filter(('filename', 'lineno'), (filename, lineno)) <NEW_LINE> for bpdata in bps: <NEW_LINE> <INDENT> paused = self._hit_bp(bpdata, frame) <NEW_LINE> if paused is True: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if self._paused is True: <NEW_LINE> <INDENT> self._trace_pause(frame, event, arg) <NEW_LINE> <DEDENT> if self._paused: <NEW_LINE> <INDENT> if self._stepin is True: <NEW_LINE> <INDENT> local_trace = self._trace_local <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> local_trace = self._trace_stepout <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> local_trace = self._trace_local <NEW_LINE> <DEDENT> self._can_stepin = False <NEW_LINE> self._stepin = False <NEW_LINE> return local_trace | The main trace function called on call events | 625941b31b99ca400220a86c |
def _delete_test_log_files(self): <NEW_LINE> <INDENT> util.remove_file(self._PATH_TO_TEST_DIR + "/" + self._LOG_ERRORS_NAME) <NEW_LINE> util.remove_file(self._PATH_TO_TEST_DIR + "/" + self._LOG_INFO_NAME) | deletes the two generated logfiles | 625941b330c21e258bdfa259 |
def _all(self, *args, **kwargs): <NEW_LINE> <INDENT> data = dict() <NEW_LINE> data["software"] = self._software(**kwargs) <NEW_LINE> data["system"] = self._system(**kwargs) <NEW_LINE> data["services"] = self._services(**kwargs) <NEW_LINE> try: <NEW_LINE> <INDENT> data["configuration"] = self._configuration(**kwargs) <NEW_LINE> <DEDENT> except InspectorQueryException as ex: <NEW_LINE> <INDENT> data["configuration"] = "N/A" <NEW_LINE> log.error(ex) <NEW_LINE> <DEDENT> data["payload"] = self._payload(**kwargs) or "N/A" <NEW_LINE> return data | Return all the summary of the particular system. | 625941b3d164cc6175782b0a |
def __init__(self, width, height, title): <NEW_LINE> <INDENT> file_path = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> os.chdir(file_path) <NEW_LINE> super().__init__(width, height, title) <NEW_LINE> self.all_sprites_list = None <NEW_LINE> self.player_sprite = None <NEW_LINE> self.score = 0 <NEW_LINE> arcade.set_background_color(arcade.color.AMAZON) | Initializer | 625941b3ec188e330fd5a565 |
def train(self, X, Y, lr=1e-4, lambda_=1e-5, momentum=0.99, steps=10000, batch=None): <NEW_LINE> <INDENT> X = np.asarray(X) <NEW_LINE> Y = np.asarray(Y) <NEW_LINE> m = X.shape[0] <NEW_LINE> if batch is None: <NEW_LINE> <INDENT> batch = X.shape[0] <NEW_LINE> <DEDENT> i = m <NEW_LINE> indices = np.arange(m) <NEW_LINE> for step in range(steps): <NEW_LINE> <INDENT> if i + batch > m: <NEW_LINE> <INDENT> i = 0 <NEW_LINE> np.random.shuffle(indices) <NEW_LINE> <DEDENT> self.backpropagation(X[indices[i:i + batch], :], Y[indices[i:i + batch]], lr=lr, lambda_=lambda_, momentum=momentum) <NEW_LINE> i += batch | Train the network.
Apply multiple steps of stochastic gradient descent.
Parameters
----------
X : ndarray, shape (m, n)
input features (one row per feature vector).
Y : ndarray, shape (m,)
target output (integer class labels).
lr : float
learning rate.
lambda_ : float
regularization coefficients.
momentum : float
momentum coefficient.
steps : int
training iterations.
batch : int or None
size of the minibatch used in each step. When None all
the data is used in each step. | 625941b3046cf37aa974cb07 |
def sp_sim_xpcs_events(events, decaytime, scatterrate, clockperiod=40e-9): <NEW_LINE> <INDENT> import sys <NEW_LINE> lam = clockperiod/decaytime <NEW_LINE> sigmazsq = scatterrate/2.0 <NEW_LINE> num = (1.0 - np.exp(-2.0*lam) ) <NEW_LINE> den = (1.0 - np.exp(-1.0*lam) )**2 <NEW_LINE> sigmay = np.sqrt( sigmazsq * (num/den) ) <NEW_LINE> q2 = np.exp(-1.0*lam) <NEW_LINE> q1 = (1.0 - q2 ) <NEW_LINE> i = 0 <NEW_LINE> nevents = 0 <NEW_LINE> Ex0, Ey0 = (0.0, 0.0) <NEW_LINE> atATime = int(1e13*decaytime/scatterrate) <NEW_LINE> if atATime < 500: <NEW_LINE> <INDENT> atATime = 500 <NEW_LINE> <DEDENT> elif atATime > 1e7: <NEW_LINE> <INDENT> atATime = int(1e7) <NEW_LINE> <DEDENT> Exy = np.zeros((2,atATime)) <NEW_LINE> incidencetimes = np.zeros(events, dtype='int64') <NEW_LINE> while nevents < events: <NEW_LINE> <INDENT> Exy = np.random.normal(size=(2, atATime), scale = sigmay) * q1 <NEW_LINE> Exy[0, 0] += Ex0*q2 <NEW_LINE> Exy[1, 0] += Ey0*q2 <NEW_LINE> for j in range(1,atATime): <NEW_LINE> <INDENT> Exy[:,j] += Exy[:,j-1]*q2 <NEW_LINE> <DEDENT> R = clockperiod * (Exy**2).sum(axis=0) <NEW_LINE> eventsRemaining = events - nevents <NEW_LINE> ridx = np.argwhere(np.random.poisson( R ) >= 1)[0:eventsRemaining] <NEW_LINE> for t in ridx: <NEW_LINE> <INDENT> incidencetimes[nevents] = i+t <NEW_LINE> nevents += 1 <NEW_LINE> sys.stdout.write("\rfound event %d/%d @ %d. R = %f" % (nevents, events, i+t, R[t])) <NEW_LINE> <DEDENT> i += atATime <NEW_LINE> [Ex0, Ey0] = Exy[:,-1] <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> print("") <NEW_LINE> return incidencetimes | Simulate a single photon XPCS signal with a decaytime for events events.
The simulated sample has a scatterrate (in Hz) and detector has a
clockperiod (in s).
This algorithm implements the simulator from Rev. Sci. Instrum 74 4273.
arguments:
events - Total number of events to collect.
decaytime - Decay time of simulated dataset, in seconds.
scatterrate - The amount that the sample scatters, in Hz.
clockperiod - Clock rate of the detector, in s. This is the time
resolution of the experiment. Defaults to 40e-9 s (40 ns).
returns:
times - A (events x 1) array of photon incidence times. | 625941b34d74a7450ccd3f80 |
def fetch_simple_stats(_, textbuffer: Gtk.TextBuffer) -> None: <NEW_LINE> <INDENT> tlp_stat_cmd = which("tlp-stat") <NEW_LINE> if tlp_stat_cmd is None: <NEW_LINE> <INDENT> textbuffer.set_text(TLP_STAT_MISSING) <NEW_LINE> return <NEW_LINE> <DEDENT> simple_stat_command = ["tlp-stat", "-r", "-t", "-c", "-s", "-u"] <NEW_LINE> tlp_stat_output = call_tlp_stat(simple_stat_command) <NEW_LINE> textbuffer.set_text(tlp_stat_output) | Fetch simple tlp-stat information. | 625941b399cbb53fe67929a4 |
def quit(self): <NEW_LINE> <INDENT> self.timer_heap.quit() | Terminate the timer factory. Pending timers and events will not
be processed. | 625941b332920d7e50b27f8c |
def focus_in(self, *args): <NEW_LINE> <INDENT> pass | Focus change event | 625941b307f4c71912b11242 |
def _square_wave(self, sampling_frequency, square_wave_frequency, measurement_period, duty_cycle): <NEW_LINE> <INDENT> t_sampling = np.linspace(0, measurement_period, int(measurement_period * sampling_frequency)) <NEW_LINE> square_wave = signal.square(2 * np.pi * square_wave_frequency * t_sampling, duty=duty_cycle) <NEW_LINE> square_wave += 1 <NEW_LINE> square_wave /= 2.0 <NEW_LINE> return square_wave | Creates a square wave. This is used as a box function over a sine wave to turn it off and on.
@param sampling_frequency The frequency at which the wave is sampled. Consecutive samples
are separated by units of time equalling 1/sample_frequency
@param square_wave_frequency The frequency with which the wave oscillates. The square wave
value flips between zero and one every 1/(2*square_wave_frequency)
units of time
@param measurement_period The number of time units to generate the wave for
@param duty_cycle Duty cycle of the wave
@return A numpy array representing the voltages sampled from the square wave.
| 625941b3462c4b4f79d1d48c |
def install(self,install_type = "nr"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.code2sys(self.install_hosts.code(install_type)) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return 1 | 传入nr普通安装,传入na安装科学上网+去除广告hosts | 625941b3de87d2750b85fb4a |
def straight_polynom(value, coefficients): <NEW_LINE> <INDENT> polynom = 0 <NEW_LINE> for i,a in enumerate(coefficients): <NEW_LINE> <INDENT> polynom += a * value**(len(coefficients)-1-i) <NEW_LINE> <DEDENT> return polynom | Calculate polynom value with Horner algorythm
Parameters are coefficients of the polynom and a value to calculate for | 625941b321bff66bcd684713 |
def e2wrn28_7(pretrained=False, **kwargs): <NEW_LINE> <INDENT> model = Wide_ResNet(28, 7, 0.3, f=True, initial_stride=1, **kwargs) <NEW_LINE> if pretrained: <NEW_LINE> <INDENT> model.load_state_dict(model_paths['e2wrn28_7']) <NEW_LINE> <DEDENT> return model | Constructs a Wide ResNet 28-7 model
Args:
pretrained (bool): If True, returns a model pre-trained | 625941b301c39578d7e74c01 |
def goto(self, value) : <NEW_LINE> <INDENT> self._value = float(value) <NEW_LINE> pcnt = (self._value - self._value_from) / (self._value_to - self._value_from) <NEW_LINE> pcnt = 0.0 if pcnt < 0.0 else 1.0 if pcnt > 1.0 else pcnt <NEW_LINE> len_done = int(round(pcnt * self._length)) <NEW_LINE> len_wait = self._length - len_done <NEW_LINE> str_done = self._char_done * len_done <NEW_LINE> str_wait = self._char_wait * len_wait <NEW_LINE> if self._format_percent is not None: <NEW_LINE> <INDENT> out.write(self._format_percent.format(pcnt)) <NEW_LINE> <DEDENT> out.write(" {brace[0]}{str_done}{str_wait}{brace[1]} ".format( brace=self._char_brace, str_done=str_done, str_wait=str_wait)) <NEW_LINE> if self._format_value is not None: <NEW_LINE> <INDENT> out.write(self._format_value.format(int(value))) <NEW_LINE> <DEDENT> out.write("\r") <NEW_LINE> out.flush() | Walk the progress bar to specified value, and flush the display
args:
value: position of progress bar. If out of range, will display 0% or 100% | 625941b3627d3e7fe0d68c0a |
def predict(self, input_matrix): <NEW_LINE> <INDENT> alpha = self.population[0] <NEW_LINE> return alpha.predict(input_matrix) | Predict output matrix of an input matrix
:param input_matrix: matrix of inputs
:return: best guess of output matrix | 625941b3eab8aa0e5d26d91b |
def __init__(self, data, affine, coord_sys, metadata=None): <NEW_LINE> <INDENT> affine = np.asarray(affine) <NEW_LINE> if affine.shape != (4,4): <NEW_LINE> <INDENT> raise ValueError('Affine image takes 4x4 affine as input') <NEW_LINE> <DEDENT> function_domain = CoordinateSystem(['axis%d' % i for i in range(3)], name=coord_sys) <NEW_LINE> function_range = CoordinateSystem(['x','y','z'], name='world') <NEW_LINE> spatial_coordmap = AffineTransform(function_domain, function_range, affine) <NEW_LINE> nonspatial_names = ['axis%d' % i for i in range(3, data.ndim)] <NEW_LINE> if nonspatial_names: <NEW_LINE> <INDENT> nonspatial_coordmap = AffineTransform.from_start_step(nonspatial_names, nonspatial_names, [0]*(data.ndim-3), [1]*(data.ndim-3)) <NEW_LINE> full_coordmap = cmap_product(spatial_coordmap, nonspatial_coordmap) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> full_coordmap = spatial_coordmap <NEW_LINE> <DEDENT> self._spatial_coordmap = spatial_coordmap <NEW_LINE> self.coord_sys = coord_sys <NEW_LINE> Image.__init__(self, data, full_coordmap) <NEW_LINE> if metadata is not None: <NEW_LINE> <INDENT> self.metadata = metadata | Creates a new nipy image with an affine mapping.
Parameters
----------
data : ndarray
ndarray representing the data.
affine : 4x4 ndarray
affine transformation to the reference coordinate system
coord_system : string
name of the reference coordinate system. | 625941b3ab23a570cc24ff44 |
def test_flavor_list_filter_project(self): <NEW_LINE> <INDENT> response = self.myget('flavors/detail', data={'SNF:flavor-access': self.project1}) <NEW_LINE> self.assertSuccess(response) <NEW_LINE> api_flavors = json.loads(response.content)['flavors'] <NEW_LINE> self.assertEqual(len(api_flavors), 3) <NEW_LINE> for api_flavor in api_flavors: <NEW_LINE> <INDENT> self.assertTrue(len(api_flavor['SNF:flavor-access']) > 0) <NEW_LINE> self.assertTrue(self.project1 in api_flavor['SNF:flavor-access']) <NEW_LINE> for access in api_flavor['SNF:flavor-access']: <NEW_LINE> <INDENT> self.assertTrue(access in self.projects) | Test listing only flavors accesed by a specific project | 625941b34e4d5625662d419b |
def user_media_path(what): <NEW_LINE> <INDENT> default = os.path.join(settings.MEDIA_ROOT, what) <NEW_LINE> key = "{0}_PATH".format(what.upper()) <NEW_LINE> return getattr(settings, key, default) | Make it possible to override storage paths in settings.
By default, all storage paths are in the MEDIA_ROOT.
This is backwards compatible. | 625941b3cad5886f8bd26d9f |
def train(self, n_steps=10000, print_steps=100, plot=True): <NEW_LINE> <INDENT> pyro.clear_param_store() <NEW_LINE> learning_rate = 0.2 * 1e-2 <NEW_LINE> momentum = 1e-1 <NEW_LINE> optimizer = torch.optim.SGD(self.GP.parameters(), lr=learning_rate, momentum=momentum) <NEW_LINE> optimizer = torch.optim.Adam(self.GP.parameters(), lr=learning_rate) <NEW_LINE> elbo = pyro.infer.TraceMeanField_ELBO() <NEW_LINE> loss_fn = elbo.differentiable_loss <NEW_LINE> n_steps = n_steps <NEW_LINE> losses = [] <NEW_LINE> for i in range(1, n_steps + 1): <NEW_LINE> <INDENT> optimizer.zero_grad() <NEW_LINE> loss = loss_fn(self.GP.model, self.GP.guide) <NEW_LINE> if print_steps is not None and i % print_steps == 0: <NEW_LINE> <INDENT> print("Step {}: {}".format(i, loss)) <NEW_LINE> <DEDENT> losses.append(loss) <NEW_LINE> loss.backward() <NEW_LINE> optimizer.step() <NEW_LINE> <DEDENT> self.losses = losses <NEW_LINE> if plot: <NEW_LINE> <INDENT> self._plot() | Train the embedding and GP.
Args:
n_steps (int): number of training steps.
print_steps (int): print the loss if step is a multiple of
`print_steps`. `None` if no print.
plot (bool): plot the steps-by-loss matrix after training. | 625941b3bf627c535bc12f94 |
def back(self, savePath, parentPath, needBack): <NEW_LINE> <INDENT> tr = ExecUtil.execCommand(" tar czvf " + savePath + " -C " + parentPath + " " + needBack) | savePath:备份文件保存的路径
parentPath:需要备份的父路径
needBack:需要备份的父目录 | 625941b3cdde0d52a9e52df1 |
def __init__(self): <NEW_LINE> <INDENT> self.logger = logging.getLogger('JSONUtil') | Constructor | 625941b35166f23b2e1a4f16 |
@app.route('/list') <NEW_LINE> def list_all(): <NEW_LINE> <INDENT> expressions = index.trees[:100] <NEW_LINE> return render_template('list.html', expressions=expressions) | [DEPRECATED]
Return first 100 symbol tress | 625941b310dbd63aa1bd296e |
def read_from_in_pipe(self, *l): <NEW_LINE> <INDENT> txt = '\n' <NEW_LINE> txt_line = '' <NEW_LINE> os_read = os.read <NEW_LINE> self_stdin_pipe = self.stdin_pipe <NEW_LINE> self_mode = self.mode <NEW_LINE> self_write = self.write <NEW_LINE> Clock_schedule_once = Clock.schedule_once <NEW_LINE> self_update_cache = self.update_cache <NEW_LINE> self_flush = self.flush <NEW_LINE> obj = self.obj <NEW_LINE> try: <NEW_LINE> <INDENT> while txt != '': <NEW_LINE> <INDENT> txt = os_read(self_stdin_pipe, 1) <NEW_LINE> txt_line = u''.join((txt_line, txt)) <NEW_LINE> if txt == '\n': <NEW_LINE> <INDENT> if self_mode == 'stdin': <NEW_LINE> <INDENT> self_write(txt_line) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Clock_schedule_once( partial(self_update_cache, txt_line, obj), 0) <NEW_LINE> self_flush() <NEW_LINE> <DEDENT> txt_line = '' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> Logger.exception(e) | Read the output from the command
| 625941b399fddb7c1c9de151 |
@app.route('/killprocess', methods=['POST']) <NEW_LINE> def kill_process(): <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> pid = request.form['pid'] <NEW_LINE> try : <NEW_LINE> <INDENT> with dbt.execute('kill %s', pid): pass <NEW_LINE> <DEDENT> except : <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return make_response('', 200) <NEW_LINE> <DEDENT> return make_response('', 401) | Tris to kill the process corresponding to the pid posted. | 625941b3fb3f5b602dac3454 |
def __init__(self, *args, **options): <NEW_LINE> <INDENT> self.__log.call(*args, **options) <NEW_LINE> super().__init__(*args, **options) <NEW_LINE> fm = self.master <NEW_LINE> self._disc_eject_button = Button( self, name="disc_eject_button", text="Eject", command=fm.eject_disc) <NEW_LINE> self._disc_status_label = Label(self, name="disc_status_label") <NEW_LINE> self._retry_disc_check_button = Button( self, name="retry_disc_check_button", text="Retry disc check", command=fm.check_for_disc) <NEW_LINE> self._rip_and_tag_button = _styled( Button( self, name="rip_and_tag_button", text="Rip and Tag", command=fm.rip_and_tag), foreground="Dark Green", font="-weight bold") <NEW_LINE> self.grid_columnconfigure(1, weight=1) | :arg tuple args: positional arguments to initialize the frame
:arg dict options: ``config`` options to initialize the frame
All widgets for this frame are initialized, but layout is
deferred until methods are called to transition between states. | 625941b38c0ade5d55d3e77c |
def __init__(self, file_name='users.db', table_name='users'): <NEW_LINE> <INDENT> self.__file_name = file_name <NEW_LINE> self.__table_name = table_name | This class is responsible for the users.db data base. This data base has all the information about the users.
This class can create the table, add new users, change info.
:param file_name: The name of the data base file (.db). This data base contains the information about the users.
:param table_name: The name of the the table inside the data base file. This table contains the user_name,
password, and email. | 625941b36aa9bd52df036b5f |
def convert_single_example(ex_index, example, label_list, max_seq_length, tokenizer): <NEW_LINE> <INDENT> if isinstance(example, PaddingInputExample): <NEW_LINE> <INDENT> return InputFeatures( input_ids=[0] * max_seq_length, input_mask=[0] * max_seq_length, segment_ids=[0] * max_seq_length, label_id=0, is_real_example=False) <NEW_LINE> <DEDENT> label_map = {} <NEW_LINE> for (i, label) in enumerate(label_list): <NEW_LINE> <INDENT> label_map[label] = i <NEW_LINE> <DEDENT> tokens_a = tokenizer.tokenize(example.text_a) <NEW_LINE> tokens_b = None <NEW_LINE> if example.text_b: <NEW_LINE> <INDENT> tokens_b = tokenizer.tokenize(example.text_b) <NEW_LINE> <DEDENT> if tokens_b: <NEW_LINE> <INDENT> _truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(tokens_a) > max_seq_length - 2: <NEW_LINE> <INDENT> tokens_a = tokens_a[0:(max_seq_length - 2)] <NEW_LINE> <DEDENT> <DEDENT> tokens = [] <NEW_LINE> segment_ids = [] <NEW_LINE> tokens.append("[CLS]") <NEW_LINE> segment_ids.append(0) <NEW_LINE> for token in tokens_a: <NEW_LINE> <INDENT> tokens.append(token) <NEW_LINE> segment_ids.append(0) <NEW_LINE> <DEDENT> tokens.append("[SEP]") <NEW_LINE> segment_ids.append(0) <NEW_LINE> if tokens_b: <NEW_LINE> <INDENT> for token in tokens_b: <NEW_LINE> <INDENT> tokens.append(token) <NEW_LINE> segment_ids.append(1) <NEW_LINE> <DEDENT> tokens.append("[SEP]") <NEW_LINE> segment_ids.append(1) <NEW_LINE> <DEDENT> print(tokens) <NEW_LINE> input_ids = tokenizer.convert_tokens_to_ids(tokens) <NEW_LINE> input_mask = [1] * len(input_ids) <NEW_LINE> while len(input_ids) < max_seq_length: <NEW_LINE> <INDENT> input_ids.append(0) <NEW_LINE> input_mask.append(0) <NEW_LINE> segment_ids.append(0) <NEW_LINE> <DEDENT> assert len(input_ids) == max_seq_length <NEW_LINE> assert len(input_mask) == max_seq_length <NEW_LINE> assert len(segment_ids) == max_seq_length <NEW_LINE> label_id = label_map[example.label] <NEW_LINE> if ex_index < 5: <NEW_LINE> <INDENT> tf.logging.info("*** Example ***") <NEW_LINE> tf.logging.info("guid: %s" % (example.guid)) <NEW_LINE> tf.logging.info("tokens: %s" % " ".join( [tokenization.printable_text(x) for x in tokens])) <NEW_LINE> tf.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) <NEW_LINE> tf.logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) <NEW_LINE> tf.logging.info("segment_ids: %s" % " ".join([str(x) for x in segment_ids])) <NEW_LINE> tf.logging.info("label: %s (id = %d)" % (example.label, label_id)) <NEW_LINE> <DEDENT> feature = InputFeatures( input_ids=input_ids, input_mask=input_mask, segment_ids=segment_ids, label_id=label_id, is_real_example=True) <NEW_LINE> return feature | Converts a single `InputExample` into a single `InputFeatures`. | 625941b3be383301e01b5253 |
def __call__(self, img): <NEW_LINE> <INDENT> s = self.get_params(img) <NEW_LINE> return FF.resize(img, s) | random resize.
by zxn.
:param img: PIL image
:return: resized pil image | 625941b3e5267d203edcda5f |
def reloadPlugins(self): <NEW_LINE> <INDENT> self.pluginWidget.updatePlugins() | Slot to reload plugins.
| 625941b323849d37ff7b2e50 |
def threeSum2(self, nums): <NEW_LINE> <INDENT> d = collections.Counter(nums) <NEW_LINE> nums_2 = [x[0] for x in d.items() if x[1] > 1] <NEW_LINE> nums_unique = sorted([x[0] for x in d.items()]) <NEW_LINE> rtn = [[0, 0, 0]] if d[0] >= 3 else [] <NEW_LINE> for i, j in enumerate(nums_unique): <NEW_LINE> <INDENT> if j <= 0: <NEW_LINE> <INDENT> numss2 = nums_unique[i + 1:] <NEW_LINE> for x, y in enumerate(numss2): <NEW_LINE> <INDENT> if (0 - j - y in [j, y] and 0 - j - y in nums_2) or (0 - j - y not in [j, y] and 0 - j - y in nums_unique): <NEW_LINE> <INDENT> if sorted([j, y, 0 - j - y]) not in rtn: <NEW_LINE> <INDENT> rtn.append(sorted([j, y, 0 - j - y])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return rtn | :type nums: List[int]
:rtype: List[List[int]] | 625941b3be7bc26dc91cd3c4 |
@_system_proxy <NEW_LINE> def open_system_proxy(_set_key, internet_set_option, proxy_ip=u"", ignore_ip=u""): <NEW_LINE> <INDENT> _set_key('ProxyEnable', 1) <NEW_LINE> if ignore_ip: <NEW_LINE> <INDENT> _set_key('ProxyOverride', ignore_ip) <NEW_LINE> <DEDENT> if proxy_ip: <NEW_LINE> <INDENT> _set_key('ProxyServer', proxy_ip) <NEW_LINE> <DEDENT> internet_set_option(0, 37, 0, 0) <NEW_LINE> internet_set_option(0, 39, 0, 0) <NEW_LINE> return False if system_proxy_status()[0] == 0 else system_proxy_status()[1] == proxy_ip | 开启系统代理 | 625941b30a50d4780f666c4c |
def iterables(self): <NEW_LINE> <INDENT> iterables = {} <NEW_LINE> for key, value in self.parameters.items(): <NEW_LINE> <INDENT> if (not isinstance(value, str) and isinstance(value, collections.Iterable)): <NEW_LINE> <INDENT> iterables[key] = value <NEW_LINE> <DEDENT> <DEDENT> keys = list(iterables.keys()) <NEW_LINE> for prod in product(*iterables.values()): <NEW_LINE> <INDENT> logger.debug("Yielding %s", str(prod)) <NEW_LINE> yield [keys, prod] | Generator yielding the product of every iterable keyword | 625941b34527f215b584c21b |
def how_many(aDict): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> for vals in aDict.values(): <NEW_LINE> <INDENT> count += len(vals) <NEW_LINE> <DEDENT> return count | Returns how many values are in a dictionary of lists. | 625941b360cbc95b062c6307 |
def cyllat(r, lonc, z): <NEW_LINE> <INDENT> r = ctypes.c_double(r) <NEW_LINE> lonc = ctypes.c_double(lonc) <NEW_LINE> z = ctypes.c_double(z) <NEW_LINE> radius = ctypes.c_double() <NEW_LINE> lon = ctypes.c_double() <NEW_LINE> lat = ctypes.c_double() <NEW_LINE> libspice.cyllat_c(r, lonc, z, ctypes.byref(radius), ctypes.byref(lon), ctypes.byref(lat)) <NEW_LINE> return radius.value, lon.value, lat.value | Convert from cylindrical to latitudinal coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cyllat_c.html
:param r: Distance of point from z axis.
:type r: float
:param lonc: Cylindrical angle of point from XZ plane(radians).
:type lonc: float
:param z: Height of point above XY plane.
:type z: float
:return: Distance, Longitude (radians), and Latitude of point (radians).
:rtype: tuple | 625941b326238365f5f0ec27 |
def filenamesort(filename): <NEW_LINE> <INDENT> name, ext = os.path.splitext(filename) <NEW_LINE> return naturalsort(name), ext | Return a key for sorting filenames. | 625941b363f4b57ef0000ee9 |
def elapseTime(self, progress, seconds): <NEW_LINE> <INDENT> progress.set_time(progress.time - datetime.timedelta(seconds=seconds)) <NEW_LINE> try: <NEW_LINE> <INDENT> session = ScriptSession.objects.get(connection=progress.connection, end_time=None) <NEW_LINE> session.start_time = session.start_time - datetime.timedelta(seconds=seconds) <NEW_LINE> session.save() <NEW_LINE> <DEDENT> except ScriptSession.DoesNotExist: <NEW_LINE> <INDENT> pass | This hack mimics the progression of time, from the perspective of a linear test case,
by actually *subtracting* from the value that's currently stored (usually datetime.datetime.now()) | 625941b3167d2b6e3121895c |
def fit_galaxy_sky_multi(galaxy0, datas, weights, ctrs, psfs, regpenalty, factor): <NEW_LINE> <INDENT> nepochs = len(datas) <NEW_LINE> cvals = [] <NEW_LINE> for data, weight, ctr, psf in zip(datas, weights, ctrs, psfs): <NEW_LINE> <INDENT> cval, _ = chisq_galaxy_sky_single(galaxy0, data, weight, ctr, psf) <NEW_LINE> cvals.append(cval) <NEW_LINE> <DEDENT> logging.info(u" initial \u03C7\u00B2/epoch: [%s]", ", ".join(["%8.2f" % v for v in cvals])) <NEW_LINE> def objective(galparams): <NEW_LINE> <INDENT> galaxy = galparams.reshape(galaxy0.shape) <NEW_LINE> cval, cgrad = chisq_galaxy_sky_multi(galaxy, datas, weights, ctrs, psfs) <NEW_LINE> rval, rgrad = regpenalty(galaxy) <NEW_LINE> totval = cval + rval <NEW_LINE> logging.debug(u'\u03C7\u00B2 = %8.2f (%8.2f + %8.2f)', totval, cval, rval) <NEW_LINE> return totval, np.ravel(cgrad + rgrad) <NEW_LINE> <DEDENT> galparams0 = np.ravel(galaxy0) <NEW_LINE> galparams, f, d = fmin_l_bfgs_b(objective, galparams0, factr=factor) <NEW_LINE> _check_result(d['warnflag'], d['task']) <NEW_LINE> galaxy = galparams.reshape(galaxy0.shape) <NEW_LINE> cvals = [] <NEW_LINE> for data, weight, ctr, psf in zip(datas, weights, ctrs, psfs): <NEW_LINE> <INDENT> cval, _ = chisq_galaxy_sky_single(galaxy, data, weight, ctr, psf) <NEW_LINE> cvals.append(cval) <NEW_LINE> <DEDENT> logging.info(u" final \u03C7\u00B2/epoch: [%s]", ", ".join(["%8.2f" % v for v in cvals])) <NEW_LINE> _log_result("fmin_l_bfgs_b", f, d['nit'], d['funcalls']) <NEW_LINE> skys = [] <NEW_LINE> for data, weight, ctr, psf in zip(datas, weights, ctrs, psfs): <NEW_LINE> <INDENT> scene = psf.evaluate_galaxy(galaxy, data.shape[1:3], ctr) <NEW_LINE> sky = np.average(data - scene, weights=weight, axis=(1, 2)) <NEW_LINE> skys.append(sky) <NEW_LINE> <DEDENT> return galaxy, skys | Fit the galaxy model to multiple data cubes.
Parameters
----------
galaxy0 : ndarray (3-d)
Initial galaxy model.
datas : list of ndarray
Sky-subtracted data for each epoch to fit. | 625941b30a50d4780f666c4d |
def get_used_gpu_memory(self): <NEW_LINE> <INDENT> result = subprocess.check_output( [ 'nvidia-smi', '--query-gpu=memory.used', '--format=csv,nounits,noheader' ], encoding='utf-8') <NEW_LINE> gpu_memory = [int(x) for x in result.strip().split('\n')] <NEW_LINE> gpu_memory = np.array(gpu_memory) <NEW_LINE> return gpu_memory | Adapted code from mjstevens777
https://discuss.pytorch.org/t/access-gpu-memory-usage-in-pytorch/3192/3
Get the current GPU usage.
Return:
gpu_memory: numpy array
memory usage as integers in MB. | 625941b323e79379d52ee327 |
def _annotations(self): <NEW_LINE> <INDENT> start = 0 <NEW_LINE> for end in self.annot_index: <NEW_LINE> <INDENT> yield start, end <NEW_LINE> start = end | Yield blocks of annotated SNPs | 625941b397e22403b379cd56 |
def test_filter_no_parameters(self): <NEW_LINE> <INDENT> statement1 = StatementModel(text="Testing...") <NEW_LINE> statement2 = StatementModel(text="Testing one, two, three.") <NEW_LINE> self.adapter.update(statement1) <NEW_LINE> self.adapter.update(statement2) <NEW_LINE> results = self.adapter.filter() <NEW_LINE> self.assertEqual(len(results), 2) | If no parameters are passed to the filter,
then all statements should be returned. | 625941b3460517430c393f4e |
def validateValue(self, value): <NEW_LINE> <INDENT> if type(value) not in (int, float): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True | Validates the incoming value is the correct type.
Arguments:
value -- Type, value to check the type of.
Return:
True if successful. | 625941b3d164cc6175782b0b |
def faces_from_solids(self, solid): <NEW_LINE> <INDENT> return self._loop_topo(topology_type=TopAbs_FACE, topological_entity=solid) | Parameters
----------
solid
Returns
------- | 625941b38c3a87329515817c |
def __neg__(self): <NEW_LINE> <INDENT> lc_new = Lightcurve(self.time, -1*self.counts) <NEW_LINE> return lc_new | Implement the behavior of negation of the light curve objects.
The negation operator ``-`` is supposed to invert the sign of the count
values of a light curve object.
Example
-------
>>> time = [1, 2, 3]
>>> count1 = [100, 200, 300]
>>> count2 = [200, 300, 400]
>>> lc1 = Lightcurve(time, count1)
>>> lc2 = Lightcurve(time, count2)
>>> lc_new = -lc1 + lc2
>>> lc_new.counts
array([100, 100, 100]) | 625941b37c178a314d6ef216 |
def test_file_s_detailed(self): <NEW_LINE> <INDENT> expected_filesize = 2131 <NEW_LINE> filename = 'testfile.tsv' <NEW_LINE> if os.path.exists(filename): <NEW_LINE> <INDENT> os.remove(filename) <NEW_LINE> <DEDENT> self.assertFalse(os.path.exists(filename)) <NEW_LINE> argv = ['s', '-de', '-o', filename, os.path.join(self.test_data_dir, 'hearst1997.json')] <NEW_LINE> try: <NEW_LINE> <INDENT> main(argv) <NEW_LINE> self.assertTrue(os.path.exists(filename)) <NEW_LINE> actual_filesize = len(open(filename).read()) <NEW_LINE> self.assertEqual(expected_filesize, actual_filesize, '%(metric)s %(expected)i != %(actual)i' % {'metric' : 's', 'expected' : expected_filesize, 'actual' : actual_filesize}) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if os.path.exists(filename): <NEW_LINE> <INDENT> os.remove(filename) <NEW_LINE> <DEDENT> <DEDENT> self.assertFalse(os.path.exists(filename)) | Test detailed S output. | 625941b338b623060ff0abb4 |
def home_axis(x=None, y=None, z=None, optional=None): <NEW_LINE> <INDENT> return Code("G28", x=Bool(x), y=Bool(y), z=Bool(z), o=Bool(optional)) | G28: Home one or more axis. | 625941b3d18da76e2353228e |
def __call__(self, atom): <NEW_LINE> <INDENT> position = atom.position <NEW_LINE> atomicnumber = atom.atomicnumber <NEW_LINE> newposition = self.r * position + self.t <NEW_LINE> return atomsite.AtomSite(atomicnumber, newposition) | Return the symmetry operation on an *atomsite*.
:math:`= RV + T`
:arg atom: atomsite to apply the symmetry operation
:type atom: :class:`atomsite.AtomSite` | 625941b38e7ae83300e4ad8a |
def copy_xyz_tree(base_dir, new_dir): <NEW_LINE> <INDENT> structures = get_structures(base_dir) <NEW_LINE> copy_tree(structures, base_dir, new_dir) | Copy all xyz files recursively to a new directory, maintaining the original
directory structure. Give the new directory as a relative path to the
base directory that you are copying from. | 625941b3d486a94d0b98df0d |
def parse_qpidd_conf(config_file): <NEW_LINE> <INDENT> f = open(config_file) <NEW_LINE> try: <NEW_LINE> <INDENT> clean = filter(None, [line.split("#")[0].strip() for line in f]) <NEW_LINE> def item(line): return [x.strip() for x in line.split("=")] <NEW_LINE> config = dict(item(line) for line in clean if "=" in line) <NEW_LINE> <DEDENT> finally: f.close() <NEW_LINE> def name(env_name): return env_name[len(QPID_ENV_PREFIX):].lower() <NEW_LINE> env = dict((name(i[0]), i[1]) for i in os.environ.iteritems() if i[0].startswith(QPID_ENV_PREFIX)) <NEW_LINE> config.update(env) <NEW_LINE> return config | Parse a qpidd.conf configuration file into a dictionary | 625941b350812a4eaa59c0e5 |
def form_valid(self, form): <NEW_LINE> <INDENT> context = self.get_context_data() <NEW_LINE> fl_form_formset = context['formLegendFormFormset'] <NEW_LINE> if fl_form_formset.is_valid(): <NEW_LINE> <INDENT> fl_form_form = form.save(commit=False) <NEW_LINE> fl_form_form.user = self.request.user <NEW_LINE> try: <NEW_LINE> <INDENT> fl_form_form.full_clean() <NEW_LINE> <DEDENT> except ValidationError as e: <NEW_LINE> <INDENT> non_field_errors = e.message_dict[NON_FIELD_ERRORS] <NEW_LINE> form.non_field_errors = non_field_errors <NEW_LINE> return self.form_invalid(form) <NEW_LINE> <DEDENT> fl_form_form.save() <NEW_LINE> fl_field_forms = fl_form_formset.save(commit=False) <NEW_LINE> for field_form in fl_field_forms: <NEW_LINE> <INDENT> field_form.user = self.request.user <NEW_LINE> field_form.form_id = fl_form_form.pk <NEW_LINE> field_form.save() <NEW_LINE> <DEDENT> saveDynamicFormLegendForm(self.request, fl_form_form) <NEW_LINE> return HttpResponseRedirect(self.success_url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.render_to_response(self.get_context_data(form=form)) | This method overrides form_valid and makes sure the
authenticated user is bound to both the FormLegendForm and
FormLegenField instances that are saved here. | 625941b3aad79263cf3907f9 |
def isCorner(size, i, j): <NEW_LINE> <INDENT> return (i == 0 and i == 0) or (i == 0 and j == size) or (i == size and j == 0) or (i == size and j == size) | Returns true if the given position is an corner | 625941b3ec188e330fd5a566 |
def string_to_tiles(self, text): <NEW_LINE> <INDENT> parsed_tiles = [] <NEW_LINE> suit_markers = re.findall("[mpsz]", text) <NEW_LINE> remainder = text <NEW_LINE> for suit in suit_markers: <NEW_LINE> <INDENT> values, remainder = remainder.split(suit) <NEW_LINE> for value in values: <NEW_LINE> <INDENT> parsed_tiles.append(Tile(int(value), suit)) <NEW_LINE> <DEDENT> <DEDENT> return parsed_tiles | "
Enables standard string formats to be read in as a tiles
E.g. 1m222p --> Tile(1, 'm'), Tile(2, 'p'), etc. | 625941b3e5267d203edcda60 |
def to_pandas(self): <NEW_LINE> <INDENT> self._t_env._before_execute() <NEW_LINE> gateway = get_gateway() <NEW_LINE> max_arrow_batch_size = self._j_table.getTableEnvironment().getConfig().getConfiguration() .getInteger(gateway.jvm.org.apache.flink.python.PythonOptions.MAX_ARROW_BATCH_SIZE) <NEW_LINE> batches = gateway.jvm.org.apache.flink.table.runtime.arrow.ArrowUtils .collectAsPandasDataFrame(self._j_table, max_arrow_batch_size) <NEW_LINE> if batches.hasNext(): <NEW_LINE> <INDENT> import pytz <NEW_LINE> timezone = pytz.timezone( self._j_table.getTableEnvironment().getConfig().getLocalTimeZone().getId()) <NEW_LINE> serializer = ArrowSerializer( create_arrow_schema(self.get_schema().get_field_names(), self.get_schema().get_field_data_types()), self.get_schema().to_row_data_type(), timezone) <NEW_LINE> import pyarrow as pa <NEW_LINE> table = pa.Table.from_batches(serializer.load_from_iterator(batches)) <NEW_LINE> pdf = table.to_pandas() <NEW_LINE> schema = self.get_schema() <NEW_LINE> for field_name in schema.get_field_names(): <NEW_LINE> <INDENT> pdf[field_name] = tz_convert_from_internal( pdf[field_name], schema.get_field_data_type(field_name), timezone) <NEW_LINE> <DEDENT> return pdf <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> import pandas as pd <NEW_LINE> return pd.DataFrame.from_records([], columns=self.get_schema().get_field_names()) | Converts the table to a pandas DataFrame. It will collect the content of the table to
the client side and so please make sure that the content of the table could fit in memory
before calling this method.
Example:
::
>>> pdf = pd.DataFrame(np.random.rand(1000, 2))
>>> table = table_env.from_pandas(pdf, ["a", "b"])
>>> table.filter(table.a > 0.5).to_pandas()
:return: the result pandas DataFrame.
.. versionadded:: 1.11.0 | 625941b330dc7b7665901729 |
def predict(self, items, k=10): <NEW_LINE> <INDENT> result = dict() <NEW_LINE> if isinstance(items, list): <NEW_LINE> <INDENT> for item in items: <NEW_LINE> <INDENT> if item in self.item_similarity: <NEW_LINE> <INDENT> for i, val in self.item_similarity[item].items(): <NEW_LINE> <INDENT> if i not in items: <NEW_LINE> <INDENT> if i not in result: <NEW_LINE> <INDENT> result[i] = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[i] += val <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return [i for i, val in sorted(result.items(), key=lambda x: x[1], reverse=True)[:k]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Input parameter type is not list") | predict result for a given user
:param user: str, user id
:param items: list, user recent behavior item list
:param k: predict top k result
:return: dict | 625941b3d6c5a10208143e04 |
def test_restore_group_node_group_layer(self): <NEW_LINE> <INDENT> p = QgsProject() <NEW_LINE> layer = QgsVectorLayer("Point?field=fldtxt:string", "layer1", "memory") <NEW_LINE> p.addMapLayer(layer, False) <NEW_LINE> layer2 = QgsVectorLayer("Point?field=fldtxt:string", "layer2", "memory") <NEW_LINE> p.addMapLayer(layer2, False) <NEW_LINE> group_node = p.layerTreeRoot().addGroup('my group') <NEW_LINE> group_node.addLayer(layer) <NEW_LINE> group_node.addLayer(layer2) <NEW_LINE> options = QgsGroupLayer.LayerOptions(QgsCoordinateTransformContext()) <NEW_LINE> group_layer = group_node.convertToGroupLayer(options) <NEW_LINE> p.addMapLayer(group_layer, False) <NEW_LINE> with TemporaryDirectory() as d: <NEW_LINE> <INDENT> path = os.path.join(d, 'group_layers.qgs') <NEW_LINE> p.setFileName(path) <NEW_LINE> p.write() <NEW_LINE> p2 = QgsProject() <NEW_LINE> p2.read(path) <NEW_LINE> restored_group_node = p2.layerTreeRoot().children()[0] <NEW_LINE> self.assertEqual(restored_group_node.name(), 'my group') <NEW_LINE> restored_group_layer = restored_group_node.groupLayer() <NEW_LINE> self.assertIsNotNone(restored_group_layer) <NEW_LINE> self.assertEqual(restored_group_layer.childLayers()[0].name(), 'layer2') <NEW_LINE> self.assertEqual(restored_group_layer.childLayers()[1].name(), 'layer1') | Test that group node's QgsGroupLayers are restored with projects | 625941b3091ae35668666d25 |
def _df_structure(self): <NEW_LINE> <INDENT> return pd.DataFrame(0.0, index=self.cosecha.get_index(), columns=( 'saldo_inicial0', 'saldo_inicial30', 'saldo_inicial60', 'saldo_inicial90', 'saldo_inicial120', 'saldo_inicial150', 'saldo_inicial180', 'saldo_inicial210', 'gasto0', 'gasto30', 'gasto60', 'gasto90', 'gasto120', 'gasto150', 'gasto180', 'gasto210', 'castigo0', 'castigo30', 'castigo60', 'castigo90', 'castigo120', 'castigo150', 'castigo180', 'castigo210', 'saldo_final0', 'saldo_final30', 'saldo_final60', 'saldo_final90', 'saldo_final120', 'saldo_final150', 'saldo_final180', 'saldo_final210')) | Crea la estructura del dataframe de salida
:return pandas dataframe | 625941b3d6c5a10208143e05 |
def create_checkpoint(weights_and_biases, global_step, model_dir, batch_norm_vars=None): <NEW_LINE> <INDENT> weights, biases = zip(*weights_and_biases) <NEW_LINE> if batch_norm_vars: <NEW_LINE> <INDENT> assert len(batch_norm_vars) == len(weights_and_biases) - 1 <NEW_LINE> (bn_betas, bn_gammas, bn_means, bn_variances) = zip(*batch_norm_vars) <NEW_LINE> <DEDENT> model_weights = {} <NEW_LINE> for i in range(0, len(weights) - 1): <NEW_LINE> <INDENT> model_weights[HIDDEN_WEIGHTS_NAME_PATTERN % i] = weights[i] <NEW_LINE> model_weights[HIDDEN_BIASES_NAME_PATTERN % i] = biases[i] <NEW_LINE> if batch_norm_vars: <NEW_LINE> <INDENT> model_weights[BATCH_NORM_BETA_NAME_PATTERN % (i, i)] = bn_betas[i] <NEW_LINE> model_weights[BATCH_NORM_GAMMA_NAME_PATTERN % (i, i)] = bn_gammas[i] <NEW_LINE> model_weights[BATCH_NORM_MEAN_NAME_PATTERN % (i, i)] = bn_means[i] <NEW_LINE> model_weights[BATCH_NORM_VARIANCE_NAME_PATTERN % (i, i)] = bn_variances[i] <NEW_LINE> <DEDENT> <DEDENT> model_weights[LOGITS_WEIGHTS_NAME] = weights[-1] <NEW_LINE> model_weights[LOGITS_BIASES_NAME] = biases[-1] <NEW_LINE> with ops.Graph().as_default(): <NEW_LINE> <INDENT> for k, v in six.iteritems(model_weights): <NEW_LINE> <INDENT> variables_lib.Variable(v, name=k, dtype=dtypes.float32) <NEW_LINE> <DEDENT> global_step_var = training_util.create_global_step() <NEW_LINE> with tf_session.Session() as sess: <NEW_LINE> <INDENT> variables_lib.global_variables_initializer().run() <NEW_LINE> global_step_var.assign(global_step).eval() <NEW_LINE> saver.Saver().save(sess, os.path.join(model_dir, 'model.ckpt')) | Create checkpoint file with provided model weights.
Args:
weights_and_biases: Iterable of tuples of weight and bias values.
global_step: Initial global step to save in checkpoint.
model_dir: Directory into which checkpoint is saved. | 625941b3d18da76e2353228f |
def test_maybeDeferredAsyncError(self): <NEW_LINE> <INDENT> d = defer.Deferred() <NEW_LINE> d2 = defer.maybeDeferred(lambda: d) <NEW_LINE> d.errback(failure.Failure(RuntimeError())) <NEW_LINE> self.assertImmediateFailure(d2, RuntimeError) | L{defer.maybeDeferred} should let L{defer.Deferred} instance pass by
so that L{failure.Failure} returned by the original instance is the
same. | 625941b382261d6c526ab261 |
def end(self): <NEW_LINE> <INDENT> self.intransaction.pop() | End the most recent transaction. | 625941b38a349b6b435e7f39 |
def annotate_image(self, scale=None, fit_size=None): <NEW_LINE> <INDENT> return self.image_annotator.annotate_image(self.raw_image, scale=scale, fit_size=fit_size) | Adds any enabled annotations to the image.
Optionally resizes the image prior to annotations being applied. The
aspect ratio of the resulting image always matches that of the raw image.
Args:
scale (float): If set then the base image will be scaled by the
supplied multiplier. Cannot be combined with fit_size
fit_size (tuple of ints (width, height)): If set, then scale the
image to fit inside the supplied dimensions. The original
aspect ratio will be preserved. Cannot be combined with scale.
Returns:
:class:`PIL.Image.Image` | 625941b330c21e258bdfa25c |
def XPLMFindPluginsMenu(): <NEW_LINE> <INDENT> pass | This function returns the ID of the plug-ins menu, which is created for you
at startup. | 625941b373bcbd0ca4b2be3b |
def setJobDescription(self, value): <NEW_LINE> <INDENT> self._jsc.setJobDescription(value) | Set a human readable description of the current job.
.. note:: Currently, setting a job description (set to local properties) with multiple
threads does not properly work. Internally threads on PVM and JVM are not synced,
and JVM thread can be reused for multiple threads on PVM, which fails to isolate
local properties for each thread on PVM.
To avoid this, enable the pinned thread mode by setting ``PYSPARK_PIN_THREAD``
environment variable to ``true`` and uses :class:`pyspark.InheritableThread`. | 625941b33617ad0b5ed67cbd |
def get_user_details(self, response): <NEW_LINE> <INDENT> return response | Return user details from MIPT account. | 625941b3ff9c53063f47bfbd |
def opp_ready(c): <NEW_LINE> <INDENT> if not is_timed(c[1]): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return _opponent_waiting(cuid, c[0]) | Returns True if this is a timed challenge and the opponent is ready to play | 625941b3925a0f43d2549c31 |
def saveM(request): <NEW_LINE> <INDENT> username = request.POST.get("username") <NEW_LINE> title = request.POST.get("title") <NEW_LINE> content = request.POST.get("content") <NEW_LINE> publish = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") <NEW_LINE> messageB = Messages(title=title, content=content, username=username, publish=publish) <NEW_LINE> messageB.save() <NEW_LINE> Message = Messages.objects.all() <NEW_LINE> return render(request, 'myApp/loginsuccess.html',{"Message":Message}) | 用户创建留言后,保存留言
返回保存留言后的用户页面 | 625941b3498bea3a759b9871 |
@function_wrapper(output={'parameters': dict, 'topology': str, 'coordinates': str, 'simulation_state': str}) <NEW_LINE> def read_tpr(filename: str = '', output=None): <NEW_LINE> <INDENT> sim_input = fileio.read_tpr(filename) <NEW_LINE> output._internal = sim_input <NEW_LINE> output.parameters = sim_input.parameters.extract() <NEW_LINE> output.topology = filename <NEW_LINE> output.coordinates = filename <NEW_LINE> output.simulation_state = filename | Get simulation input sources from a TPR file.
Outputs:
parameters : MDP simulation parameters
coordinates : atom (or CG particle) coordinates (not yet implemented)
simulation_state : simulation internal state (checkpoint data) (not yet implemented)
topology : molecular force field data (not yet implemented) | 625941b3460517430c393f4f |
def duration_outside_nwh(self, starttime=datetime.time(NORMAL_DAY_START_H), endtime=datetime.time(NORMAL_DAY_END_H)): <NEW_LINE> <INDENT> total = datetime.timedelta() <NEW_LINE> for interval in self.intervals: <NEW_LINE> <INDENT> total += interval.duration_outside_nwh(starttime, endtime) <NEW_LINE> <DEDENT> return total | Returns the total duration outside normal working hours, i.e.
evenings/nights, weekends (and Bank Holidays). | 625941b3099cdd3c635f0a1b |
def sum_before_first_even(xs): <NEW_LINE> <INDENT> sum = 0 <NEW_LINE> for x in xs: <NEW_LINE> <INDENT> if x % 2 != 0: <NEW_LINE> <INDENT> sum = sum + x <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return sum | Returns sum of all elements in a list up to but not including the first even number
| 625941b3d58c6744b4257a1f |
def get_key(self, timestamp=None): <NEW_LINE> <INDENT> return self.api.get('key', timestamp)["permissions"] | Gets the permissions associated with the given API key | 625941b3627d3e7fe0d68c0c |
def create_tensor(self, name=None, persistable=None, dtype=None): <NEW_LINE> <INDENT> if name is not None: <NEW_LINE> <INDENT> var_name = ".".join([self._full_name, name]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> var_name = unique_name.generate(".".join( [self._full_name, "_generated_var"])) <NEW_LINE> <DEDENT> return self._helper.main_program.current_block().create_var( name=var_name, persistable=persistable, dtype=dtype, type=core.VarDesc.VarType.LOD_TENSOR) | Create Tensor for this layer.
Parameters:
name(str, optional): name of the tensor. Please refer to :ref:`api_guide_Name` . Default: None
persistable(bool, optional): if set this tensor persistable. Default: False
dtype(str, optional): data type of this parameter.
If set str, it can be "bool", "float16", "float32", "float64",
"int8", "int16", "int32", "int64", "uint8" or "uint16".
If set None, it will be "float32". Default: None
Returns:
Tensor, created Tensor.
Examples:
.. code-block:: python
import paddle
class MyLinear(paddle.nn.Layer):
def __init__(self,
in_features,
out_features):
super(MyLinear, self).__init__()
self.linear = paddle.nn.Linear( 10, 10)
self.back_var = self.create_tensor(name = "linear_tmp_0", dtype=self._dtype)
def forward(self, input):
out = self.linear(input)
paddle.assign( out, self.back_var)
return out | 625941b301c39578d7e74c03 |
def _expand_index(self, num): <NEW_LINE> <INDENT> if self.ndim == 3: <NEW_LINE> <INDENT> num_z = self._shape[2] - self._size[2] + 1 <NEW_LINE> num_y = self._shape[1] - self._size[1] + 1 <NEW_LINE> k = self.stride[2] * num % num_z <NEW_LINE> j = self.stride[1] * (self.stride[2] * num // num_z) % num_y <NEW_LINE> i = self.stride[0] * (num // (num_z * num_y // (self.stride[2] * self.stride[1]))) <NEW_LINE> return i, j, k <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError() | Should expand 1D index into 3D index (x, y, z) | 625941b3a4f1c619b28afe05 |
Subsets and Splits