code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def compute_IOU(self,bb1,bb2): <NEW_LINE> <INDENT> up_left_corner=np.array((max(bb1[0],bb2[0]),max(bb1[1],bb2[1]))) <NEW_LINE> low_right_corner=np.array((min(bb1[2],bb2[2]),min(bb1[3],bb2[3]))) <NEW_LINE> dist=low_right_corner-up_left_corner <NEW_LINE> if np.any(dist<=0): <NEW_LINE> <INDENT> inter_area=0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> inter_area=np.prod(dist) <NEW_LINE> <DEDENT> union_area=(bb1[2]-bb1[0])*(bb1[2]-bb1[0])+ (bb2[2]-bb2[0])*(bb2[2]-bb2[0]) <NEW_LINE> return(inter_area/union_area)
returns the IOU of bb1 with bb2
625941b424f1403a92600948
def animate_bisection(_f, xmin, xmax): <NEW_LINE> <INDENT> yrange = _f(xmin), _f(xmax) <NEW_LINE> ymin, ymax = min(yrange), max(yrange) <NEW_LINE> _x = np.linspace(xmin, xmax) <NEW_LINE> _y = _f(_x) <NEW_LINE> fig = plt.figure() <NEW_LINE> _ax = plt.axes(xlim=(xmin-0.1, xmax+0.1), ylim=(ymin, ymax)) <NEW_LINE> curve, = _ax.plot([], [], color='blue') <NEW_LINE> left, = _ax.plot([], [], color='red') <NEW_LINE> right, = _ax.plot([], [], color='red') <NEW_LINE> def init(): <NEW_LINE> <INDENT> left.set_data([], []) <NEW_LINE> right.set_data([], []) <NEW_LINE> curve.set_data([], []) <NEW_LINE> return left, right, curve <NEW_LINE> <DEDENT> def animate(i): <NEW_LINE> <INDENT> _a, _b = bisection_method(_f, xmin, xmax, i) <NEW_LINE> logging.info("i: %s, delta_x: %s, delta_y: %s", i, _b-_a, abs(_f(_a)-_f(_b))) <NEW_LINE> logging.debug("a: %s, b: %s", _a, _b) <NEW_LINE> left.set_data([_a, _a], [ymin, ymax]) <NEW_LINE> right.set_data([_b, _b], [ymin, ymax]) <NEW_LINE> curve.set_data(_x, _y) <NEW_LINE> return left, right, curve <NEW_LINE> <DEDENT> animi = animation.FuncAnimation(fig, animate, init_func=init, frames=15, interval=700, blit=True) <NEW_LINE> logging.debug("You animation: %s", animi) <NEW_LINE> plt.grid() <NEW_LINE> plt.show()
animates the bisection method
625941b49f2886367277a66f
def _as_deces(sal, _P): <NEW_LINE> <INDENT> return 0
Assurance sociale - décès # TODO: à compléter
625941b43317a56b86939a42
def _generate_item_for_reply_or_forward(self, folder, subject, body): <NEW_LINE> <INDENT> item = folder.create_item( subject=subject, body=body, from_=self.store.user, to=self.from_.email, message_class=self.message_class ) <NEW_LINE> source_message_info = self._create_source_message_info('reply').encode('ascii') <NEW_LINE> item[PidLidSourceMessageInfo] = source_message_info <NEW_LINE> return item
Generate item object for 'reply' or 'forward' action. Args: folder (Folder): folder object. subject (str): subject text. body (str): body text. Returns: Item: created item for 'reply' or 'forward' action.
625941b430c21e258bdfa274
def pylab_setup(): <NEW_LINE> <INDENT> if backend.startswith('module://'): <NEW_LINE> <INDENT> backend_name = backend[9:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> backend_name = 'backend_'+backend <NEW_LINE> backend_name = backend_name.lower() <NEW_LINE> backend_name = 'matplotlib.backends.%s'%backend_name.lower() <NEW_LINE> <DEDENT> backend_mod = __import__(backend_name, globals(),locals(),[backend_name]) <NEW_LINE> new_figure_manager = backend_mod.new_figure_manager <NEW_LINE> def do_nothing(*args, **kwargs): pass <NEW_LINE> backend_version = getattr(backend_mod,'backend_version', 'unknown') <NEW_LINE> show = getattr(backend_mod, 'show', do_nothing) <NEW_LINE> draw_if_interactive = getattr(backend_mod, 'draw_if_interactive', do_nothing) <NEW_LINE> if backend.lower() in ['wx','wxagg']: <NEW_LINE> <INDENT> Toolbar = backend_mod.Toolbar <NEW_LINE> __all__.append('Toolbar') <NEW_LINE> <DEDENT> matplotlib.verbose.report('backend %s version %s' % (backend,backend_version)) <NEW_LINE> return new_figure_manager, draw_if_interactive, show
return new_figure_manager, draw_if_interactive and show for pylab
625941b460cbc95b062c6320
def getBalanceInQuote(crypto_enum): <NEW_LINE> <INDENT> return LEDGERS[crypto_enum].balance * TRADES[crypto_enum].current_row.price
Convert to quote the balance of the LEDGERS[crypto_enum]
625941b4e5267d203edcda79
def generate_xml_file(self): <NEW_LINE> <INDENT> unique_id = unicode(uuid.uuid4()) <NEW_LINE> xml_content = self.tmpl.render({ 'uniq_string_value': unique_id, 'rand_number': unicode(self.__get_rand_number(self.number_range)), 'random_strings': self.__get_random_strings(), }) <NEW_LINE> file_name = unique_id + '.xml' <NEW_LINE> return file_name, xml_content
Generate XML file @return: XML filename and XML file data.
625941b4a79ad161976cbf1d
def get_pixel_range(self, row, col): <NEW_LINE> <INDENT> range = c_double() <NEW_LINE> error_code = dll.BVTColorImage_GetPixelRange(self._handle, row, col, byref(range)) <NEW_LINE> if (0 != error_code): <NEW_LINE> <INDENT> raise sdkerror.SDKError(error_code) <NEW_LINE> <DEDENT> return range.value
Retrieve the range (from the sonar head) of the specified pixel (in meters)
625941b43539df3088e2e123
def match(self, name, s): <NEW_LINE> <INDENT> return re.match(self.get_pattern(name, r".*"), s)
:param name: Name of the regex pattern :param s: Target string to try to match with
625941b421a7993f00bc7ac0
def add_transaction(self, sender, receiver, amount): <NEW_LINE> <INDENT> self.transactions.append({'sender': sender, 'receiver': receiver, 'amount': amount}) <NEW_LINE> previous_block = self.get_previous_block() <NEW_LINE> return previous_block['index'] + 1
Add transaction to the block
625941b494891a1f4081b87f
def populate_uuids(apps, schema_editor): <NEW_LINE> <INDENT> Document = apps.get_model('wiki', 'Document') <NEW_LINE> docs = Document.objects.filter(uuid__isnull=True) <NEW_LINE> for document_id in docs.values_list('id', flat=True).iterator(): <NEW_LINE> <INDENT> Document.objects.filter(id=document_id).update(uuid=uuid4())
Populate Document.uuid, without bumping last modified.
625941b41d351010ab8558fd
def on_reset_delivery(self, delivery: QuicDeliveryState) -> None: <NEW_LINE> <INDENT> if delivery == QuicDeliveryState.ACKED: <NEW_LINE> <INDENT> self.is_finished = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.reset_pending = True
Callback when a reset is ACK'd.
625941b4d164cc6175782b25
def index(self, value: float) -> int: <NEW_LINE> <INDENT> cursor = self.head <NEW_LINE> index = 0 <NEW_LINE> while cursor: <NEW_LINE> <INDENT> if cursor.val == value: <NEW_LINE> <INDENT> return index <NEW_LINE> <DEDENT> cursor = cursor.next <NEW_LINE> index += 1 <NEW_LINE> <DEDENT> return -1
Find value index position in linked list :param value: float :return: index position
625941b48e7ae83300e4ada3
def ratios(self): <NEW_LINE> <INDENT> emp = EmploymentData(self.data, self.dpath) <NEW_LINE> emp.id_extend() <NEW_LINE> emp.ratio_gen() <NEW_LINE> self.data = emp.data_merge() <NEW_LINE> self.data = self.data[pd.notnull(self.data['id'])] <NEW_LINE> self.data = self.data[pd.notnull(self.data['school'])] <NEW_LINE> for name in np.unique(self.data['school']): <NEW_LINE> <INDENT> selection = self.data.loc[ self.data['school'] == name, ['year', 'ratio'] ].reset_index(drop=True) <NEW_LINE> year = np.min( selection['year'][np.where(selection['ratio'].notnull())[0]] ) <NEW_LINE> self.data.loc[ (self.data['school'] == name) & (self.data['year'] < year), 'ratio' ] = self.data.loc[ (self.data['school'] == name) & (self.data['year'] == year), 'ratio' ].reset_index(drop=True)[0]
Generate transparency ratios
625941b497e22403b379cd70
def test_timeseries(): <NEW_LINE> <INDENT> pass
>>> import wafo.data >>> import wafo.objects as wo >>> x = wafo.data.sea() >>> ts = wo.mat2timeseries(x) >>> ts.sampling_period() 0.25 Estimate spectrum >>> S = ts.tospecdata() >>> S.data[:10] array([ 0.00913087, 0.00881073, 0.00791944, 0.00664244, 0.00522429, 0.00389816, 0.00282753, 0.00207843, 0.00162678, 0.0013916 ]) Estimated covariance function >>> rf = ts.tocovdata(lag=150) >>> rf.data[:10] array([ 0.22368637, 0.20838473, 0.17110733, 0.12237803, 0.07024054, 0.02064859, -0.02218831, -0.0555993 , -0.07859847, -0.09166187])
625941b4aad79263cf390812
def no_file_warning(file): <NEW_LINE> <INDENT> messagebox.showwarning("Not Found", file + " could not be opened")
Warn if the given file path does not exist
625941b49b70327d1c4e0bac
def _disallow_batch_hooks_in_ps_strategy(self): <NEW_LINE> <INDENT> strategy = tf.distribute.get_strategy() <NEW_LINE> if strategy._should_use_with_coordinator: <NEW_LINE> <INDENT> unsupported_callbacks = [] <NEW_LINE> for cb in self.callbacks: <NEW_LINE> <INDENT> if getattr(cb, '_supports_tf_logs', False): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if (cb._implements_train_batch_hooks() or cb._implements_test_batch_hooks() or cb._implements_predict_batch_hooks()): <NEW_LINE> <INDENT> unsupported_callbacks.append(cb) <NEW_LINE> <DEDENT> <DEDENT> if unsupported_callbacks: <NEW_LINE> <INDENT> raise ValueError('Batch-level `Callback`s are not supported with ' '`ParameterServerStrategy`. Found unsupported ' 'callbacks: {}'.format(unsupported_callbacks))
Error out if batch-level callbacks are passed with PSStrategy.
625941b40c0af96317bb7fc1
def test_read_default(self): <NEW_LINE> <INDENT> mock = MagicMock() <NEW_LINE> with patch.dict(macdefaults.__salt__, {'cmd.run': mock}): <NEW_LINE> <INDENT> macdefaults.read('com.apple.CrashReporter', 'Crash') <NEW_LINE> mock.assert_called_once_with('defaults read "com.apple.CrashReporter" "Crash"', runas=None)
Test reading a default setting
625941b4187af65679ca4efc
def setup(self, static, moving): <NEW_LINE> <INDENT> self.static = static <NEW_LINE> self.moving = moving
Setup static and moving sets of streamlines. Parameters ---------- static : streamlines Fixed or reference set of streamlines. moving : streamlines Moving streamlines. Notes ----- Call this after the object is initiated and before distance. Num_threads is not used in this class. Use ``BundleMinDistanceMetric`` for a faster, threaded and less memory hungry metric
625941b4d18da76e235322a8
def setUp(self): <NEW_LINE> <INDENT> self.table_names = [f for f in os.listdir('python/tests/tables/') if os.path.isfile(os.path.join('python/tests/tables/', f))] <NEW_LINE> self.doc_names = [f for f in os.listdir('python/tests/docs/') if os.path.isfile(os.path.join('python/tests/docs/', f))] <NEW_LINE> self.sbtab_docs = [] <NEW_LINE> self.convert_document_objects = [] <NEW_LINE> for i, t in enumerate(self.table_names): <NEW_LINE> <INDENT> if t.startswith('_'): continue <NEW_LINE> p = open('python/tests/tables/' + t, 'r') <NEW_LINE> p_content = p.read() <NEW_LINE> sbtab_doc = SBtab.SBtabDocument('test_' + str(i), sbtab_init=p_content, filename=t) <NEW_LINE> if 'Reaction' in sbtab_doc.type_to_sbtab.keys() or 'Compound' in sbtab_doc.type_to_sbtab.keys(): <NEW_LINE> <INDENT> conv = sbtab2sbml.SBtabDocument(sbtab_doc) <NEW_LINE> self.convert_document_objects.append(conv) <NEW_LINE> self.sbtab_docs.append(sbtab_doc) <NEW_LINE> <DEDENT> p.close() <NEW_LINE> <DEDENT> for i, d in enumerate(self.doc_names): <NEW_LINE> <INDENT> if not d.startswith('_'): <NEW_LINE> <INDENT> p = open('python/tests/docs/' + d, 'r') <NEW_LINE> p_content = p.read() <NEW_LINE> sbtab_doc = SBtab.SBtabDocument('test_'+str(i),sbtab_init=p_content, filename=d) <NEW_LINE> if 'Reaction' in sbtab_doc.type_to_sbtab.keys() or 'Compound' in sbtab_doc.type_to_sbtab.keys(): <NEW_LINE> <INDENT> conv = sbtab2sbml.SBtabDocument(sbtab_doc) <NEW_LINE> self.convert_document_objects.append(conv) <NEW_LINE> self.sbtab_docs.append(sbtab_doc) <NEW_LINE> <DEDENT> p.close()
setup SBtabTable class with files from test directory
625941b431939e2706e4cc49
def grad2deg(grad): <NEW_LINE> <INDENT> return (grad * 9) / 10
grad2deg(100.00) --> 90.0
625941b4d268445f265b4c4d
def runSimulation(N,gridX,gridY,gridP): <NEW_LINE> <INDENT> results = [] <NEW_LINE> for take in range(N): <NEW_LINE> <INDENT> grid = Grid(gridX,gridY,gridP) <NEW_LINE> results.append(grid.letItRain()) <NEW_LINE> <DEDENT> return sum(results)/float(len(results))
takes Grid object, lets water propagate through from top to bottom returns list of results (number of )
625941b43d592f4c4ed1ce59
def amber_energies(prmtop, crd, input, amb_path): <NEW_LINE> <INDENT> logger.info('Evaluating energy of {0}'.format(crd)) <NEW_LINE> directory, _ = os.path.split(os.path.abspath(prmtop)) <NEW_LINE> if not prmtop: <NEW_LINE> <INDENT> prmtop = os.path.join(directory, 'parm.prmtop') <NEW_LINE> <DEDENT> if not crd: <NEW_LINE> <INDENT> crd = os.path.join(directory, 'ener.edr') <NEW_LINE> <DEDENT> mdout = os.path.join(directory, 'amber.out') <NEW_LINE> stdout_path = os.path.join(directory, 'amber_stdout.txt') <NEW_LINE> stderr_path = os.path.join(directory, 'amber_stderr.txt') <NEW_LINE> islastbin = os.path.basename(os.path.normpath(amb_path)) <NEW_LINE> if islastbin == 'sander': <NEW_LINE> <INDENT> amber_bin = amb_path <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> amber_bin = os.path.join(amb_path, 'sander') <NEW_LINE> <DEDENT> if not which(amber_bin): <NEW_LINE> <INDENT> raise IOError('Unable to find AMBER executable (sander).') <NEW_LINE> <DEDENT> cmd = [amber_bin, '-i', input, '-c', crd, '-p', prmtop, '-o', mdout, '-O'] <NEW_LINE> proc = run_subprocess(cmd, 'amber', stdout_path, stderr_path) <NEW_LINE> if proc.returncode != 0: <NEW_LINE> <INDENT> logger.error('sander failed. See %s' % stderr_path) <NEW_LINE> <DEDENT> return _group_energy_terms(mdout)
Compute single-point energies using AMBER. Args: prmtop (str): crd (str): input (str) amb_path (str): Returns: e_out: ener_xvg:
625941b43eb6a72ae02ec2b2
def inuse(self): <NEW_LINE> <INDENT> self.inuse_bit_at_offset(self.size & ~SIZE_BITS)
extract p's inuse bit
625941b4498bea3a759b988a
def __init__(self, hash: chainhash.Hash, height: int, bits: int, block_size: int, block_weight: int, num_txns: int, total_txns: int, median_time: int): <NEW_LINE> <INDENT> self.hash = hash <NEW_LINE> self.height = height <NEW_LINE> self.bits = bits <NEW_LINE> self.block_size = block_size <NEW_LINE> self.block_weight = block_weight <NEW_LINE> self.num_txns = num_txns <NEW_LINE> self.total_txns = total_txns <NEW_LINE> self.median_time = median_time
:param chainhash.Hash *chainhash.Hash hash: :param int int32 height: :param int uint32 bits: :param int uint64 block_size: :param int uint64 block_weight: :param int uint64 num_txns: :param int uint64 total_txns: :param int time.Time median_time:
625941b44527f215b584c234
def init_kernel(self): <NEW_LINE> <INDENT> shell_stream = ZMQStream(self.shell_socket) <NEW_LINE> control_stream = ZMQStream(self.control_socket) <NEW_LINE> kernel_factory = self.kernel_class.instance <NEW_LINE> kernel = kernel_factory(parent=self, session=self.session, shell_streams=[shell_stream, control_stream], iopub_thread=self.iopub_thread, iopub_socket=self.iopub_socket, stdin_socket=self.stdin_socket, log=self.log, profile_dir=self.profile_dir, user_ns=self.user_ns, ) <NEW_LINE> kernel.record_ports(self.ports) <NEW_LINE> self.kernel = kernel
Create the Kernel object itself
625941b4b7558d58953c4cf4
def test_get_schema(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> Draft4Validator.check_schema(snapshot.SnapshotView.GET_SCHEMA) <NEW_LINE> schema_valid = True <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> schema_valid = False <NEW_LINE> <DEDENT> self.assertTrue(schema_valid)
The schema defined for GET on base end point is valid
625941b48a43f66fc4b53e42
def test_main_discover(mocker, mock_config, mock_context): <NEW_LINE> <INDENT> mocker.patch( 'tap_intacct.singer.utils.parse_args', return_value=argparse.Namespace(config=mock_config, discover=True, state=None), ) <NEW_LINE> mock_do_discover = mocker.patch('tap_intacct.do_discover', autospec=True) <NEW_LINE> tap_intacct.main() <NEW_LINE> mock_context.config.update.assert_called_once_with(mock_config) <NEW_LINE> mock_context.state.update.assert_not_called() <NEW_LINE> mock_do_discover.assert_called_once()
Ensure that the correct functions are called when tap is executed in discovery mode.
625941b41b99ca400220a888
def edit_reservation(hotel_id, database): <NEW_LINE> <INDENT> _, options = get_reservations(hotel_id, database) <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user_choice = int( input('Which reservation do you want to manage? >')) <NEW_LINE> try: <NEW_LINE> <INDENT> reservation_id = options[user_choice] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> print('Sorry, that wasn\'t an option. Please try again.') <NEW_LINE> continue <NEW_LINE> <DEDENT> if reservation_id not in options: <NEW_LINE> <INDENT> print('Sorry, that wasn\'t an option. Please try again.') <NEW_LINE> continue <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print('Sorry, that didn\'t work. Please try again.') <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> reservation = get_reservation(reservation_id, hotel_id, database) <NEW_LINE> reservation = Reservation(reservation[0], database) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> raise ReservationDoesNotExist( 'Sorry that reservation could not be found.') <NEW_LINE> <DEDENT> reservation_edit = int(input( 'Which part of the reservation do you want to edit?\n1. Check In Date, \n2. Check Out Date, \n3. Early Check In \n4. Late Check Out\n5. Cancel Reservation\n>')) <NEW_LINE> if reservation_edit == 1: <NEW_LINE> <INDENT> check_in = input('Enter your check in date (Jun 10 2020): ') <NEW_LINE> check_in = datetime.datetime.strptime(check_in, '%b %d %Y') <NEW_LINE> check_in = check_in.replace(hour=13, minute=00) <NEW_LINE> reservation.edit_check_in(check_in) <NEW_LINE> <DEDENT> elif reservation_edit == 2: <NEW_LINE> <INDENT> days_staying = int(input("Enter how many days the guest be staying >")) <NEW_LINE> check_in = reservation.check_in <NEW_LINE> check_out = (reservation.check_in + datetime.timedelta(days=days_staying)) <NEW_LINE> reservation.edit_check_out(check_out) <NEW_LINE> <DEDENT> elif reservation_edit == 3: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> hours = int(input( 'How many hours early would you like to check in? No more than 2 hours allowed. >')) <NEW_LINE> if hours > 2: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> reservation.early_checkin(hours) <NEW_LINE> print('Your early check in has been approved.') <NEW_LINE> input("Press Enter to continue...") <NEW_LINE> break <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif reservation_edit == 4: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> hours = int(input( 'How many hours late would you like to check out? No more than 2 hours is allowed. >')) <NEW_LINE> if hours > 2: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> reservation.late_checkout(hours) <NEW_LINE> print('Your late check out has been approved.') <NEW_LINE> input("Press Enter to continue...") <NEW_LINE> break <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return reservation
Edits a reservation. Args: hotel_id(int): The id of the hotel the reservation belongs to. database(str): The database to connect to. Raises: ReservationDoesNotExist: Returns if the reservation can not be found. Returns: (obj): A reservation object.
625941b45510c4643540f1d2
def sorted(self): <NEW_LINE> <INDENT> cache_sorted = self._cache.get('sorted') <NEW_LINE> if cache_sorted is not None: <NEW_LINE> <INDENT> return cache_sorted <NEW_LINE> <DEDENT> shallowElements = copy.copy(self._elements) <NEW_LINE> shallowEndElements = copy.copy(self._endElements) <NEW_LINE> s = copy.copy(self) <NEW_LINE> s._elements = shallowElements <NEW_LINE> s._endElements = shallowEndElements <NEW_LINE> for e in shallowElements + shallowEndElements: <NEW_LINE> <INDENT> s.coreSetElementOffset(e, self.elementOffset(e), addElement=True) <NEW_LINE> e.sites.add(s) <NEW_LINE> s.coreSelfActiveSite(e) <NEW_LINE> <DEDENT> s.sort() <NEW_LINE> self._cache['sorted'] = s <NEW_LINE> return s
(TL;DR: you probably do not need to call this method unless you have turned `.autoSort` to off.) Returns a new Stream where all the elements are sorted according to offset time, then priority, then classSortOrder (so that, for instance, a Clef at offset 0 appears before a Note at offset 0). If this Stream is not flat, then only the elements directly in the stream itself are sorted. To sort all, run myStream.flatten().sorted(). Changed in v7 -- made into a method, not a property. For instance, here is an unsorted Stream: >>> s = stream.Stream() >>> s.autoSort = False # if True, sorting is automatic >>> s.insert(1, note.Note('D')) >>> s.insert(0, note.Note('C')) >>> s.show('text') {1.0} <music21.note.Note D> {0.0} <music21.note.Note C> But a sorted version of the Stream puts the C first: >>> s.sorted().show('text') {0.0} <music21.note.Note C> {1.0} <music21.note.Note D> While the original stream remains unsorted: >>> s.show('text') {1.0} <music21.note.Note D> {0.0} <music21.note.Note C> OMIT_FROM_DOCS >>> s = stream.Stream() >>> s.autoSort = False >>> s.repeatInsert(note.Note('C#'), [0, 2, 4]) >>> s.repeatInsert(note.Note('D-'), [1, 3, 5]) >>> s.isSorted False >>> g = '' >>> for myElement in s: ... g += '%s: %s; ' % (myElement.offset, myElement.name) >>> g '0.0: C#; 2.0: C#; 4.0: C#; 1.0: D-; 3.0: D-; 5.0: D-; ' >>> y = s.sorted() >>> y.isSorted True >>> g = '' >>> for myElement in y: ... g += '%s: %s; ' % (myElement.offset, myElement.name) >>> g '0.0: C#; 1.0: D-; 2.0: C#; 3.0: D-; 4.0: C#; 5.0: D-; ' >>> farRight = note.Note('E') >>> farRight.priority = 5 >>> farRight.offset = 2.0 >>> y.insert(farRight) >>> g = '' >>> for myElement in y: ... g += '%s: %s; ' % (myElement.offset, myElement.name) >>> g '0.0: C#; 1.0: D-; 2.0: C#; 3.0: D-; 4.0: C#; 5.0: D-; 2.0: E; ' >>> z = y.sorted() >>> g = '' >>> for myElement in z: ... g += '%s: %s; ' % (myElement.offset, myElement.name) >>> g '0.0: C#; 1.0: D-; 2.0: C#; 2.0: E; 3.0: D-; 4.0: C#; 5.0: D-; ' >>> z[2].name, z[3].name ('C#', 'E')
625941b4004d5f362079a110
def move(self, direction): <NEW_LINE> <INDENT> self.direction = direction <NEW_LINE> self.write("S")
Move the stepper motor continuously in the given direction until a stop command is sent or a limit switch is reached. This method corresponds to the 'slew' command in the DP series instrument manuals. :param direction: value to set on the direction property before moving the motor.
625941b47b180e01f3dc45e0
def tearDown(self): <NEW_LINE> <INDENT> db.session.remove() <NEW_LINE> db.drop_all()
Get rid of the database again after each test.
625941b43617ad0b5ed67cd7
def Process_Loading_Render_Painting_Network(self): <NEW_LINE> <INDENT> with open(self.trace, 'r') as content_file: <NEW_LINE> <INDENT> content = content_file.read() <NEW_LINE> <DEDENT> trace_events = json.loads(content) <NEW_LINE> for trace_event in trace_events: <NEW_LINE> <INDENT> trace_event = json.loads(trace_event) <NEW_LINE> try: <NEW_LINE> <INDENT> cat = trace_event['cat'] <NEW_LINE> name = trace_event['name'] <NEW_LINE> if (cat == 'devtools.timeline' and name == 'ParseHTML') or ( cat == 'blink,devtools.timeline' and name == 'ParseAuthorStyleSheet'): <NEW_LINE> <INDENT> self.loading_trace_events.append(trace_event) <NEW_LINE> <DEDENT> if (cat == 'disabled-by-default-devtools.timeline' or cat.find('devtools.timeline') >= 0) and name in ['CompositeLayers', 'Paint']: <NEW_LINE> <INDENT> self.painting_trace_events.append(trace_event) <NEW_LINE> <DEDENT> if (cat == 'devtools.timeline' or cat.find('devtools.timeline') >= 0) and name in ['Layout', 'UpdateLayerTree', 'HitTest', 'RecalculateStyles']: <NEW_LINE> <INDENT> self.rendering_trace_events.append(trace_event) <NEW_LINE> <DEDENT> if cat == 'devtools.timeline' and (name in ['ResourceSendRequest', 'ResourceReceiveResponse', 'ResourceReceivedData', 'ResourceFinish']): <NEW_LINE> <INDENT> self.network_trace_events.append(trace_event) <NEW_LINE> <DEDENT> if cat == 'netlog': <NEW_LINE> <INDENT> self.netlog_trace_events.append(trace_event) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> self.network_trace_events.sort(key=lambda my_trace_event: my_trace_event['ts']) <NEW_LINE> self.rendering_trace_events.sort(key=lambda my_trace_event: my_trace_event['ts']) <NEW_LINE> self.loading_trace_events.sort(key=lambda my_trace_event: my_trace_event['ts']) <NEW_LINE> self.painting_trace_events.sort(key=lambda my_trace_event: my_trace_event['ts']) <NEW_LINE> self.netlog_trace_events.sort(key=lambda my_trace_event: my_trace_event['ts']) <NEW_LINE> self.convertIdtoHex(self.netlog_trace_events)
f = None try: file_name, ext = os.path.splitext(trace) if ext.lower() == '.gz': f = gzip.open(trace, 'rb') else: f = open(trace, 'r') for line in f: try: trace_event = json.loads(line.strip("[] ,")) cat = trace_event['cat'] name = trace_event['name'] if (cat == 'devtools.timeline' and name == 'ParseHTML') or ( cat == 'blink,devtools.timeline' and name == 'ParseAuthorStyleSheet'): self.loading_trace_events.append(trace_event) if (cat == 'disabled-by-default-devtools.timeline' or cat.find('devtools.timeline') >= 0) and name in ['CompositeLayers', 'Paint']: self.painting_trace_events.append(trace_event) if (cat == 'devtools.timeline' or cat.find('devtools.timeline') >= 0) and name in ['Layout', 'UpdateLayerTree', 'HitTest', 'RecalculateStyles']: self.rendering_trace_events.append(trace_event) if cat == 'devtools.timeline' and (name in ['ResourceSendRequest', 'ResourceReceiveResponse', 'ResourceReceivedData', 'ResourceFinish']): self.network_trace_events.append(trace_event) except: raise except: logging.critical("Error processing trace " + trace) raise if f is not None: f.close()
625941b4293b9510aa2c3072
@pytest.mark.django_db <NEW_LINE> def test_empty_object_returned_with_authentication(api_client, activities_url): <NEW_LINE> <INDENT> sender = _auth_sender(activities_url) <NEW_LINE> response = api_client.get( activities_url, content_type='', HTTP_AUTHORIZATION=sender.request_header, HTTP_X_FORWARDED_FOR='1.2.3.4, 123.123.123.123', ) <NEW_LINE> assert response.status_code == status.HTTP_200_OK <NEW_LINE> assert response.json() == _empty_collection() <NEW_LINE> sender.accept_response( response_header=response['Server-Authorization'], content=response.content, content_type=response['Content-Type'], ) <NEW_LINE> with pytest.raises(mohawk.exc.MacMismatch): <NEW_LINE> <INDENT> sender.accept_response( response_header=response['Server-Authorization'] + 'incorrect', content=response.content, content_type=response['Content-Type'], ) <NEW_LINE> <DEDENT> with pytest.raises(mohawk.exc.MisComputedContentHash): <NEW_LINE> <INDENT> sender.accept_response( response_header=response['Server-Authorization'], content='incorrect', content_type=response['Content-Type'], ) <NEW_LINE> <DEDENT> with pytest.raises(mohawk.exc.MisComputedContentHash): <NEW_LINE> <INDENT> sender.accept_response( response_header=response['Server-Authorization'], content=response.content, content_type='incorrect', )
If the Authorization and X-Forwarded-For headers are correct, then the correct, and authentic, data is returned
625941b4f7d966606f6a9de1
def on_select(self, event): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.idx = int(self.listbox.curselection()[0]) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> self.idx = None <NEW_LINE> <DEDENT> if self.idx is not None: <NEW_LINE> <INDENT> self.included_lines_box.delete(0, tk.END) <NEW_LINE> if self.master.data.vuv_lines[self.master.system][self.idx].diagnostic_lines is not None: <NEW_LINE> <INDENT> self.included_lines_box.insert( 0, ', '.join( map( str, self.master.data.vuv_lines[self.master.system][self.idx].diagnostic_lines ) ) ) <NEW_LINE> <DEDENT> self.lam_lb_box.delete(0, tk.END) <NEW_LINE> if self.master.data.vuv_lines[self.master.system][self.idx].lam_lb is not None: <NEW_LINE> <INDENT> self.lam_lb_box.insert( 0, self.master.data.vuv_lines[self.master.system][self.idx].lam_lb ) <NEW_LINE> <DEDENT> self.lam_ub_box.delete(0, tk.END) <NEW_LINE> if self.master.data.vuv_lines[self.master.system][self.idx].lam_ub is not None: <NEW_LINE> <INDENT> self.lam_ub_box.insert( 0, self.master.data.vuv_lines[self.master.system][self.idx].lam_ub ) <NEW_LINE> <DEDENT> if self.master.data.vuv_lines[self.master.system][self.idx].t_lb is not None: <NEW_LINE> <INDENT> self.t_lb_box.delete(0, tk.END) <NEW_LINE> self.t_lb_box.insert( 0, self.master.data.vuv_lines[self.master.system][self.idx].t_lb ) <NEW_LINE> <DEDENT> if self.master.data.vuv_lines[self.master.system][self.idx].t_ub is not None: <NEW_LINE> <INDENT> self.t_ub_box.delete(0, tk.END) <NEW_LINE> self.t_ub_box.insert( 0, self.master.data.vuv_lines[self.master.system][self.idx].t_ub ) <NEW_LINE> <DEDENT> remove_all(self.master.plot_frame.l_final) <NEW_LINE> self.master.plot_frame.l_final = [] <NEW_LINE> if self.master.data.vuv_lines[self.master.system][self.idx].signal is not None: <NEW_LINE> <INDENT> self.master.plot_frame.l_final.append( self.master.plot_frame.a_final.plot( self.master.data.vuv_time[self.master.system], self.master.data.vuv_lines[self.master.system][self.idx].signal, 'k' ) ) <NEW_LINE> <DEDENT> self.master.plot_frame.canvas.draw()
Handle selection of a new line.
625941b45e10d32532c5ed08
def _parse_allotment_line(tr): <NEW_LINE> <INDENT> if not tr: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> tds = tr.find_all('td') <NEW_LINE> if tds and len(tds) >= 9: <NEW_LINE> <INDENT> return { '公告日期': tds[0].text.strip(), '配股方案(每10股配股股数)': tds[1].text.strip(), '配股价格(元)': tds[2].text.strip(), '基准股本(万股)': tds[3].text.strip(), '除权日': tds[4].text.strip(), '股权登记日': tds[5].text.strip(), '缴款起始日': tds[6].text.strip(), '缴款终止日': tds[7].text.strip(), '配股上市日': tds[8].text.strip(), '募集资金合计(元)': tds[9].text.strip() } <NEW_LINE> <DEDENT> return None
解析配股数据tr行
625941b4d10714528d5ffab7
def __init__(self, problem, deadline): <NEW_LINE> <INDENT> SimulatedAnnealing.__init__(self, problem, deadline) <NEW_LINE> self.heuristic = "all" <NEW_LINE> self.prob_l = self._get_problem_length()
Create an Algorithm according to the heuristic.
625941b492d797404e303f63
def source_element_initializer(self): <NEW_LINE> <INDENT> if self.dynamic_initializer: <NEW_LINE> <INDENT> return Statement(self.data_object_manager.get_namespace() + "::" + self.name + ".init()") <NEW_LINE> <DEDENT> return []
Builds a dynamic initialization statement. :returns: A Statement invoking the init() method of the object
625941b4a8370b771705267a
def tycat(*things): <NEW_LINE> <INDENT> print("[", Displayer.file_count, "]") <NEW_LINE> user = getpass.getuser() <NEW_LINE> directory = "/tmp/{}".format(user) <NEW_LINE> if not os.path.exists(directory): <NEW_LINE> <INDENT> os.makedirs(directory) <NEW_LINE> <DEDENT> filename = "{}/{}.svg".format(directory, str(Displayer.file_count).zfill(5)) <NEW_LINE> Displayer.file_count += 1 <NEW_LINE> size, svg_strings = compute_displays(things) <NEW_LINE> display = Displayer(size) <NEW_LINE> svg_file = display.open_svg(filename) <NEW_LINE> for string in svg_strings: <NEW_LINE> <INDENT> svg_file.write(string) <NEW_LINE> <DEDENT> display.close_svg(svg_file) <NEW_LINE> os.system("tycat {}".format(filename))
graphically displays all objects given. each argument will be displayed in a different color. requires : - the terminology terminal emulator - each object either implements * bounding_quadrant * svg_content or is an iterable on things implementing it.
625941b499cbb53fe67929c0
def get_nn_domain_from_constraints(nn_type, constraint_dict): <NEW_LINE> <INDENT> constraint_dict = copy(constraint_dict) <NEW_LINE> mandatory_keys = ['max_num_layers', 'max_mass'] <NEW_LINE> optional_key_vals = [('min_num_layers', 5), ('min_mass', 0), ('max_out_degree', np.inf), ('max_in_degree', np.inf), ('max_num_edges', np.inf), ('max_num_units_per_layer', 10001), ('min_num_units_per_layer', 5), ] <NEW_LINE> if nn_type.startswith('cnn'): <NEW_LINE> <INDENT> optional_key_vals += [('max_num_2strides', np.inf)] <NEW_LINE> <DEDENT> for mkey in mandatory_keys: <NEW_LINE> <INDENT> if mkey not in constraint_dict.keys(): <NEW_LINE> <INDENT> raise ValueError('Must specify keys %s in constraint_dict.'%( ', '.join(mandatory_keys))) <NEW_LINE> <DEDENT> <DEDENT> for okey, oval in optional_key_vals: <NEW_LINE> <INDENT> if okey not in constraint_dict.keys(): <NEW_LINE> <INDENT> constraint_dict[okey] = oval <NEW_LINE> <DEDENT> <DEDENT> if nn_type.startswith('cnn'): <NEW_LINE> <INDENT> cc_constructor = CNNConstraintChecker <NEW_LINE> <DEDENT> elif nn_type.startswith('mlp'): <NEW_LINE> <INDENT> cc_constructor = MLPConstraintChecker <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Unknown nn_type: %s.'%(nn_type)) <NEW_LINE> <DEDENT> cc_attributes = mandatory_keys + [okv[0] for okv in optional_key_vals] <NEW_LINE> constraint_dict_to_pass = {key: constraint_dict[key] for key in cc_attributes} <NEW_LINE> constraint_checker = cc_constructor(**constraint_dict_to_pass) <NEW_LINE> return NNDomain(nn_type, constraint_checker)
nn_type is the type of the network. See CNNConstraintChecker, MLPConstraintChecker, NNConstraintChecker constructors for args and kwargs.
625941b4b7558d58953c4cf5
def testAmenityIds(self): <NEW_LINE> <INDENT> self.assertIs(type(Place.amenity_ids), list) <NEW_LINE> self.assertIs(type(Place.amenity_ids[0]), str)
Tests the amenity_ids attribute
625941b4d58c6744b4257a39
def MakeEncoder(InputFunction,EncoderArchitecture): <NEW_LINE> <INDENT> inputEncoder=InputFunction <NEW_LINE> en=Dense(EncoderArchitecture[0])(inputEncoder) <NEW_LINE> en=Activation('relu')(en) <NEW_LINE> for j in range(len(EncoderArchitecture)-1): <NEW_LINE> <INDENT> en=Dense(EncoderArchitecture[j+1])(en) <NEW_LINE> en=Activation('relu')(en) <NEW_LINE> <DEDENT> en=Dense(latent_dim)(inputEncoder) <NEW_LINE> output=Activation('relu')(en) <NEW_LINE> Encoder=Model(inputEncoder,output,name='Encoder') <NEW_LINE> return Encoder
Generates the encoder network using the functional API from keras Its Intended as a wrapper function for TrainAutoencoder InputFunction Input function from the keras functional API EncoderArchitecture A list with the number of dense units in the layer, the lenght of the list is the number of layers in the network
625941b44f6381625f11481f
def report_version(self, data): <NEW_LINE> <INDENT> self.firmata_version.append(data[0]) <NEW_LINE> self.firmata_version.append(data[1])
This method processes the report version message, sent asynchronously by Firmata when it starts up or after refresh_report_version() is called Use the api method api_get_version to retrieve this information :param data: Message data from Firmata :return: No return value.
625941b4b830903b967e96f1
def DoDragDrop(self,data,allowedEffects): <NEW_LINE> <INDENT> pass
DoDragDrop(self: ToolStripItem,data: object,allowedEffects: DragDropEffects) -> DragDropEffects Begins a drag-and-drop operation. data: The object to be dragged. allowedEffects: The drag operations that can occur. Returns: One of the System.Windows.Forms.DragDropEffects values.
625941b431939e2706e4cc4a
def const_s(v): <NEW_LINE> <INDENT> result = Stream(v, lambda: result) <NEW_LINE> return result
The constant stream of Vs.
625941b46fb2d068a760ee7a
def test_sell_btc_jpy(): <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> sell_value = 2 * market_info.get('bid') <NEW_LINE> o1 = order.Order(access_key=settings.access_key, secret_key=settings.secret_key) <NEW_LINE> ok_(o1.sell_btc_jpy(amount = 0.01, rate = sell_value).get('success'))
sell BTC
625941b41f037a2d8b945fd8
def to_indexer(self, cutoff=None, from_cutoff=True): <NEW_LINE> <INDENT> if from_cutoff: <NEW_LINE> <INDENT> return self.to_relative(cutoff).to_pandas() - 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> relative = self.to_relative(cutoff) <NEW_LINE> return relative - relative.to_pandas()[0]
Return zero-based indexer values for easy indexing into arrays. Parameters ---------- cutoff : pd.Period, pd.Timestamp, int, optional (default=None) Cutoff value required to convert a relative forecasting horizon to an absolute one and vice versa. from_cutoff : bool, optional (default=True) - If True, zero-based relative to cutoff. - If False, zero-based relative to first value in forecasting horizon. Returns ------- fh : pd.Index Indexer.
625941b4004d5f362079a111
def __init__(self, form, *children, use_csrf=True, standalone=True, **kwargs): <NEW_LINE> <INDENT> self.standalone = standalone <NEW_LINE> attributes = {"method": "POST", "autocomplete": "off"} <NEW_LINE> attributes.update(kwargs) <NEW_LINE> if ( attributes["method"].upper() == "POST" and use_csrf is not False and standalone is True ): <NEW_LINE> <INDENT> children = (CsrfToken(),) + children <NEW_LINE> <DEDENT> if self.standalone and "enctype" not in attributes: <NEW_LINE> <INDENT> attributes["enctype"] = "multipart/form-data" <NEW_LINE> <DEDENT> super().__init__( hg.WithContext( hg.If( form.non_field_errors(), hg.Iterator( form.non_field_errors(), "formerror", InlineNotification( _("Form error"), hg.C("formerror"), kind="error" ), ), ), hg.If( form.hidden_fields(), hg.Iterator( form.hidden_fields(), "hiddenfield", hg.Iterator( hg.C("hiddenfield").errors, "hiddenfield_error", InlineNotification( _("Hidden field error: "), hg.format( "{}: {}", hg.C("hiddenfield").name, hg.C("hiddenfield_error"), ), kind="error", ), ), ), ), *children, **{DEFAULT_FORM_CONTEXTNAME: form}, ), **attributes, )
form: lazy evaluated value which should resolve to the form object children: any child elements, can be formfields or other use_csrf: add a CSRF input, but only for POST submission and standalone forms standalone: if true, will add a CSRF token and will render enclosing FORM-element
625941b48c0ade5d55d3e798
def call_finished(location_uri, headers, timeout): <NEW_LINE> <INDENT> logger = logging.getLogger(__name__) <NEW_LINE> logger.debug(f"GET API call: {location_uri}, timeout {timeout} seconds and headers: {headers}") <NEW_LINE> response = requests.get(location_uri, headers=headers, proxies=get_proxies(location_uri), timeout=timeout) <NEW_LINE> if response is None: <NEW_LINE> <INDENT> raise Exception("API call failed") <NEW_LINE> <DEDENT> response.raise_for_status() <NEW_LINE> if response.status_code == 202: <NEW_LINE> <INDENT> return False, response <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True, response
:param location_uri: URI to check the status of API call :param headers: HTTP headers :param timeout: connect timeout :return indication and response tuple
625941b48a349b6b435e7f4e
def get_servers_input(self): <NEW_LINE> <INDENT> if not self.so: return <NEW_LINE> sockdata= str() <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sockdata,addr= self.so.recvfrom(data_size) <NEW_LINE> sockdata = sockdata.decode('utf-8') <NEW_LINE> <DEDENT> except socket.error as emsg: <NEW_LINE> <INDENT> print('.', end=' ') <NEW_LINE> <DEDENT> if '***identified***' in sockdata: <NEW_LINE> <INDENT> print("Client connected on %d.............." % self.port) <NEW_LINE> continue <NEW_LINE> <DEDENT> elif '***shutdown***' in sockdata: <NEW_LINE> <INDENT> print((("Server has stopped the race on %d. "+ "You were in %d place.") % (self.port,self.S.d['racePos']))) <NEW_LINE> self.shutdown() <NEW_LINE> return <NEW_LINE> <DEDENT> elif '***restart***' in sockdata: <NEW_LINE> <INDENT> print("Server has restarted the race on %d." % self.port) <NEW_LINE> self.shutdown() <NEW_LINE> return <NEW_LINE> <DEDENT> elif not sockdata: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> a=self.S.parse_server_str(sockdata) <NEW_LINE> if self.debug: <NEW_LINE> <INDENT> sys.stderr.write("\x1b[2J\x1b[H") <NEW_LINE> print(self.S) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> return a
Server's input is stored in a ServerState object
625941b450812a4eaa59c0ff
@click.command() <NEW_LINE> @decorators.catch_all <NEW_LINE> @decorators.json_output <NEW_LINE> @decorators.capture_usage <NEW_LINE> def inbox(ctx): <NEW_LINE> <INDENT> return _inbox(ctx.obj['config'], ctx.obj['client'])
View notifications from 21.co.
625941b44a966d76dd550de4
def main(args=sys.argv[1:]): <NEW_LINE> <INDENT> opt = docopt(main.__doc__.strip(), args, options_first=True) <NEW_LINE> config_logging(opt['--verbose']) <NEW_LINE> if opt['check']: <NEW_LINE> <INDENT> check_backends(opt['--title']) <NEW_LINE> <DEDENT> elif opt['extract']: <NEW_LINE> <INDENT> handler = fulltext.get <NEW_LINE> if opt['--file']: <NEW_LINE> <INDENT> handler = _handle_open <NEW_LINE> <DEDENT> for path in opt['<path>']: <NEW_LINE> <INDENT> print(handler(path)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("don't know how to handle cmd")
Extract text from a file. Commands: extract - extract text from path check - make sure all deps are installed Usage: fulltext extract [-v] [-f] <path>... fulltext check [-t] Options: -f, --file Open file first. -t, --title Check deps for title. -v, --verbose More verbose output.
625941b4baa26c4b54cb0efd
def total_description_type_support(self, string): <NEW_LINE> <INDENT> support = self.local_description_type_support(string) <NEW_LINE> return round((support + self.activation) / 2.0)
Return the total description type support with string. A function of the local description type support and the node's activation.
625941b4283ffb24f3c556e7
def append_df_to_excel(filename, df, sheet_name='Sheet1', startrow=None, truncate_sheet=False, **to_excel_kwargs): <NEW_LINE> <INDENT> from openpyxl import load_workbook <NEW_LINE> import pandas as pd <NEW_LINE> if 'engine' in to_excel_kwargs: <NEW_LINE> <INDENT> to_excel_kwargs.pop('engine') <NEW_LINE> <DEDENT> writer = pd.ExcelWriter(filename, engine='openpyxl') <NEW_LINE> try: <NEW_LINE> <INDENT> writer.book = load_workbook(filename) <NEW_LINE> if startrow is None and sheet_name in writer.book.sheetnames: <NEW_LINE> <INDENT> startrow = writer.book[sheet_name].max_row <NEW_LINE> <DEDENT> if truncate_sheet and sheet_name in writer.book.sheetnames: <NEW_LINE> <INDENT> idx = writer.book.sheetnames.index(sheet_name) <NEW_LINE> writer.book.remove(writer.book.worksheets[idx]) <NEW_LINE> writer.book.create_sheet(sheet_name, idx) <NEW_LINE> <DEDENT> writer.sheets = {ws.title:ws for ws in writer.book.worksheets} <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if startrow is None: <NEW_LINE> <INDENT> startrow = 0 <NEW_LINE> <DEDENT> df.to_excel(writer, sheet_name, startrow=startrow, **to_excel_kwargs) <NEW_LINE> writer.save()
Append a DataFrame [df] to existing Excel file [filename] into [sheet_name] Sheet. If [filename] doesn't exist, then this function will create it. Parameters: filename : File path or existing ExcelWriter (Example: '/path/to/file.xlsx') df : dataframe to save to workbook sheet_name : Name of sheet which will contain DataFrame. (default: 'Sheet1') startrow : upper left cell row to dump data frame. Per default (startrow=None) calculate the last row in the existing DF and write to the next row... truncate_sheet : truncate (remove and recreate) [sheet_name] before writing DataFrame to Excel file to_excel_kwargs : arguments which will be passed to `DataFrame.to_excel()` [can be dictionary] Returns: None
625941b4d7e4931a7ee9dcf5
def bindAnim(self, animName, partName = None, lodName = None, allowAsyncBind = False): <NEW_LINE> <INDENT> self.getAnimControls(animName = animName, partName = partName, lodName = lodName, allowAsyncBind = allowAsyncBind)
Binds the named animation to the named part and/or lod. If allowAsyncBind is False, this guarantees that the animation is bound immediately--the animation is never bound in a sub-thread; it will be loaded and bound in the main thread, so it will be available by the time this method returns. The parameters are the same as that for getAnimControls(). In fact, this method is a thin wrapper around that other method. Use this method if you need to ensure that an animation is available before you start to play it, and you don't mind holding up the render for a frame or two until the animation is available.
625941b44e696a04525c922e
def Qmix_mixer_checkers(agent_qs, state_env, state, goals_all, state_dim, goal_dim, n_agents, f1=4, k1=[3,5]): <NEW_LINE> <INDENT> conv = convnet_1(state_env, f1=f1, k1=k1, s1=[1,1], scope='conv') <NEW_LINE> embed_dim = 128 <NEW_LINE> state_goals_dim = state_dim + n_agents*goal_dim + conv.get_shape().as_list()[1] <NEW_LINE> agent_qs_reshaped = tf.reshape(agent_qs, [-1, 1, n_agents]) <NEW_LINE> state_goals = tf.concat([conv, state, goals_all], axis=1) <NEW_LINE> hyper_w_1 = get_variable('hyper_w_1', [state_goals_dim, embed_dim*n_agents]) <NEW_LINE> hyper_w_final = get_variable('hyper_w_final', [state_goals_dim, embed_dim]) <NEW_LINE> hyper_b_1 = tf.get_variable('hyper_b_1', [state_goals_dim, embed_dim]) <NEW_LINE> hyper_b_final_l1 = tf.layers.dense(inputs=state_goals, units=embed_dim, activation=tf.nn.relu, use_bias=False, name='hyper_b_final_l1') <NEW_LINE> hyper_b_final = tf.layers.dense(inputs=hyper_b_final_l1, units=1, activation=None, use_bias=False, name='hyper_b_final') <NEW_LINE> w1 = tf.abs(tf.matmul(state_goals, hyper_w_1)) <NEW_LINE> b1 = tf.matmul(state_goals, hyper_b_1) <NEW_LINE> w1_reshaped = tf.reshape(w1, [-1, n_agents, embed_dim]) <NEW_LINE> b1_reshaped = tf.reshape(b1, [-1, 1, embed_dim]) <NEW_LINE> hidden = tf.nn.elu(tf.matmul(agent_qs_reshaped, w1_reshaped) + b1_reshaped) <NEW_LINE> w_final = tf.abs(tf.matmul(state_goals, hyper_w_final)) <NEW_LINE> w_final_reshaped = tf.reshape(w_final, [-1, embed_dim, 1]) <NEW_LINE> b_final_reshaped = tf.reshape(hyper_b_final, [-1, 1, 1]) <NEW_LINE> y = tf.matmul(hidden, w_final_reshaped) + b_final_reshaped <NEW_LINE> q_tot = tf.reshape(y, [-1, 1]) <NEW_LINE> return q_tot
Args: agent_qs: shape [batch, n_agents] state_env: shape [batch, rows, cols, channels] state: shape [batch, state_dim] goals_all: shape [batch, n_agents*goal_dim]
625941b430bbd722463cbb9c
@pypub.command() <NEW_LINE> def changelog(): <NEW_LINE> <INDENT> shell(f"{Publisher.CMD_CHANGELOG} -u | less")
Preview the changelog.
625941b49f2886367277a671
def get_conn(self): <NEW_LINE> <INDENT> if self.conn_queue.qsize() == 0: <NEW_LINE> <INDENT> cn = self._build_conn() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> cn = self.conn_queue.get_nowait() <NEW_LINE> if not cn.stat().startswith('Uptime:'): <NEW_LINE> <INDENT> cn.close() <NEW_LINE> del cn <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> cn = self._build_conn() <NEW_LINE> <DEDENT> <DEDENT> return cn
获取mysql连接
625941b47b180e01f3dc45e1
def _set_status(self, status): <NEW_LINE> <INDENT> self._lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self._status = status <NEW_LINE> if (status > 0) and self._context._verbose: <NEW_LINE> <INDENT> action = STATUSMAP[status] <NEW_LINE> print("Yoton: %s at %s:%s." % (action, self._hostname1, self._port1)) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self._lock.release()
Used to change the status. Subclasses can reimplement this to get the desired behavior.
625941b426238365f5f0ec42
def get_group(self, user, user_partition, assign=True): <NEW_LINE> <INDENT> return user_partition.scheme.get_group_for_user( self._course_id, user, user_partition, assign=assign, )
Returns the group from the specified user partition to which the user is assigned. If the user has not yet been assigned, a group will be chosen for them based upon the partition's scheme.
625941b42eb69b55b151c683
def limit_to_chromosomes(df, chromosomes): <NEW_LINE> <INDENT> if chromosomes: <NEW_LINE> <INDENT> df = df[df['Chromosome'].isin(chromosomes)] <NEW_LINE> <DEDENT> return df
Returns only records that are in chromosomes (if "chromosomes" is not empty)
625941b445492302aab5e099
def deQueue(self) -> bool: <NEW_LINE> <INDENT> if self.isEmpty(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.queue[self.front] = None <NEW_LINE> if self.front != self.rear: <NEW_LINE> <INDENT> self.front = (self.front + 1) % self.max <NEW_LINE> <DEDENT> return True
Delete an element from the circular queue. Return true if the operation is successful.
625941b4be383301e01b526f
def in_place_qs(lis): <NEW_LINE> <INDENT> pivot = lis[0] <NEW_LINE> l, r = 1, len(lis) - 1 <NEW_LINE> while l <= r: <NEW_LINE> <INDENT> if lis[l] < pivot: <NEW_LINE> <INDENT> l += 1 <NEW_LINE> <DEDENT> elif lis[i] > pivot: <NEW_LINE> <INDENT> r -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lis[r], lis[l] = lis[l], lis[r] <NEW_LINE> <DEDENT> <DEDENT> pivot, lis[r] = lis[r], pivot <NEW_LINE> result = in_place_qs(lis) <NEW_LINE> return result
dicks
625941b485dfad0860c3ac32
def test_never_mutate(self): <NEW_LINE> <INDENT> mutator = ConversionMutation(mutation_rate = 0.0) <NEW_LINE> self._never_mutate(mutator)
Make sure we do not mutate at unexpected times.
625941b48e71fb1e9831d588
def update(self, message): <NEW_LINE> <INDENT> self.name = str(message.name) <NEW_LINE> if message.HasField('inherit'): <NEW_LINE> <INDENT> self.inherit = bool(message.inherit) <NEW_LINE> <DEDENT> if message.HasField('inherited'): <NEW_LINE> <INDENT> self.inherited = bool(message.inherited) <NEW_LINE> <DEDENT> if message.HasField('inheritable'): <NEW_LINE> <INDENT> self.inheritable = bool(message.inheritable) <NEW_LINE> <DEDENT> if message.add: <NEW_LINE> <INDENT> for user in message.add: <NEW_LINE> <INDENT> self.add.append(int(user)) <NEW_LINE> <DEDENT> <DEDENT> if message.remove: <NEW_LINE> <INDENT> for user in message.remove: <NEW_LINE> <INDENT> self.remove.append(int(user)) <NEW_LINE> <DEDENT> <DEDENT> if message.inherited_members: <NEW_LINE> <INDENT> for user in message.inherited_members: <NEW_LINE> <INDENT> self.inherited_members.append(int(user))
Update a ACL information, based on the incoming message
625941b499cbb53fe67929c1
def epcr_parse(self): <NEW_LINE> <INDENT> logging.info('Parsing ePCR outputs') <NEW_LINE> for sample in self.metadata: <NEW_LINE> <INDENT> if sample.general.bestassemblyfile != 'NA': <NEW_LINE> <INDENT> toxin_set = set() <NEW_LINE> if os.path.isfile(sample[self.analysistype].resultsfile): <NEW_LINE> <INDENT> with open(sample[self.analysistype].resultsfile) as epcrresults: <NEW_LINE> <INDENT> for result in epcrresults: <NEW_LINE> <INDENT> if "#" not in result: <NEW_LINE> <INDENT> data = result.split('\t') <NEW_LINE> vttype = data[0].split('_')[0] <NEW_LINE> toxin_set.add(vttype) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> sample[self.analysistype].toxinprofile = ";".join(sorted(list(toxin_set))) if toxin_set else 'ND' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(sample, self.analysistype, GenObject()) <NEW_LINE> sample[self.analysistype].toxinprofile = 'ND'
Parse the ePCR outputs
625941b430bbd722463cbb9d
def __scan_ports(self, ip, delay, message): <NEW_LINE> <INDENT> output = {} <NEW_LINE> thread = threading.Thread(target=self.__scan_ports_helper, args=(ip, delay, output, message)) <NEW_LINE> thread.start() <NEW_LINE> while len(output) < len(self.target_ports): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for port in self.target_ports: <NEW_LINE> <INDENT> if output[port] == 'OPEN': <NEW_LINE> <INDENT> print('{}: {}\n'.format(port, output[port])) <NEW_LINE> <DEDENT> <DEDENT> return output
Controller of the __scan_ports_helper() function :param ip: the ip address that is being scanned :type ip: str :param delay: the time in seconds that a TCP socket waits until timeout :type delay: int :param message: the message that is going to be included in the scanning packets, in order to prevent ethical problem, default to ''. :type message: str :return: a dict that stores result in {port, status} style pairs. status can be 'OPEN' or 'CLOSE'.
625941b4596a8972360898a5
def align_labels(tiers_list, precision=None, regex=r'[^\s]+'): <NEW_LINE> <INDENT> if precision is not None: <NEW_LINE> <INDENT> precision_old = Time._precision <NEW_LINE> Time._precision = precision <NEW_LINE> <DEDENT> if len(tiers_list) < 2: <NEW_LINE> <INDENT> raise Exception('At least two tiers need to be provided') <NEW_LINE> <DEDENT> elif (not (all([isinstance(x, IntervalTier) for x in tiers_list]) or all([isinstance(x, PointTier) for x in tiers_list]))): <NEW_LINE> <INDENT> raise TypeError('Only objects of types IntervalTier or PointTier can be aligned.') <NEW_LINE> <DEDENT> elif len(set([len(x) for x in tiers_list])) > 1: <NEW_LINE> <INDENT> raise Exception('Input tiers differ in the number of objects.') <NEW_LINE> <DEDENT> labels_aligned = [] <NEW_LINE> for intervals in zip(*[x for x in tiers_list]): <NEW_LINE> <INDENT> start_times = [x.start_time for x in intervals] <NEW_LINE> end_times = [x.end_time for x in intervals] <NEW_LINE> labels = [x.text.strip() for x in intervals] <NEW_LINE> if any([not re.search(regex, x) for x in labels]): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif start_times.count(start_times[0]) != len(start_times): <NEW_LINE> <INDENT> raise Exception('Objects\' time stamps do not match: {0}'.format(start_times)) <NEW_LINE> <DEDENT> elif end_times.count(end_times[0]) != len(end_times): <NEW_LINE> <INDENT> raise Exception('Objects\' time stamps do not match: {0}'.format(end_times)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> labels_aligned.append(labels) <NEW_LINE> <DEDENT> <DEDENT> if precision is not None: <NEW_LINE> <INDENT> Time._precision = precision_old <NEW_LINE> <DEDENT> return labels_aligned
Create a list of lists for all time-aligned Interval or Point object in tiers_list, whose text matches regex. For example: [[label_1-tier_1, label_1-tier_2, label_1-tier_3], [label_2-tier_1, label_2-tier_2, label_2-tier_3], ... [label_n-tier_n, label_n-tier_n, label_n-tier_n]] The allowed mismatch between object's timestamps can be controlled via the precision parameter.
625941b466656f66f7cbbf84
def repo_contents(self, owner: str, repo: str, path: str, ref: str = 'master') -> dict: <NEW_LINE> <INDENT> return self.request('repos/{}/{}/contents/{}'.format(owner, repo, path), ref=ref)
Get repository's contents https://developer.github.com/v3/repos/contents/#get-contents
625941b4287bf620b61d384b
def highlight_text(raw_text_document, lexer): <NEW_LINE> <INDENT> highlighted_text = HighlightedText(raw_text_document, lexer) <NEW_LINE> document = TextDocumentModel() <NEW_LINE> text_block = TextBlock(raw_text_document.line_slice) <NEW_LINE> document.append(text_block) <NEW_LINE> for highlighted_fragment in highlighted_text: <NEW_LINE> <INDENT> text_fragment = TextFragment(raw_text_document.light_view(highlighted_fragment.slice), token_type=highlighted_fragment.token) <NEW_LINE> text_block.append(text_fragment) <NEW_LINE> <DEDENT> return document
Highlight a text. The parameter *raw_text_document* is a :class:`DiffViewer.RawTextDocument` instance and the parameter *lexer* is a Pygments lexer instance. Return an :class:`DiffViewer.TextDocumentModel` instance. The document has one text block that contains all the fragments. Text fragments use light views.
625941b497e22403b379cd72
def four_digits_num(n: int) -> int: <NEW_LINE> <INDENT> total = 0 <NEW_LINE> while n > 0: <NEW_LINE> <INDENT> total += n % 10 <NEW_LINE> n //= 10 <NEW_LINE> <DEDENT> return(total)
>>> four_digits_num(9000) 9 >>> four_digits_num(1234) 10 >>> four_digits_num(1925) 17
625941b4be8e80087fb20a29
def highlight(self, sleep_and_stop=2): <NEW_LINE> <INDENT> self.original_style = self.get_attribute('style') <NEW_LINE> self.set_attribute('style', '"border: 2px solid red;"') <NEW_LINE> if sleep_and_stop > 0: <NEW_LINE> <INDENT> sleep(sleep_and_stop) <NEW_LINE> self.set_attribute('style', self.original_style)
turn current element's border into solid red 2px. return original style after 'sleep_and_stop' seconds of time.sleep if 'sleep_and_stop' is 0 - border stays and no sleep. original stlyle saved to self.original_style
625941b48e7ae83300e4ada6
def _is_episode_terminated(self): <NEW_LINE> <INDENT> dist = self.distance_from_light() <NEW_LINE> done = self.current_energy <= self.min_energy or dist < self.min_light_distance or dist > self.max_light_distance <NEW_LINE> return bool(done)
Returns a True if the episode is finished. :return: True if the termination criteria is met, false otherwise
625941b40c0af96317bb7fc3
def load_workspace(self, file_paths, merge=False, force_overwrite=False): <NEW_LINE> <INDENT> with show_busy(self._view): <NEW_LINE> <INDENT> ws_names = [os.path.splitext(os.path.basename(base))[0] for base in file_paths] <NEW_LINE> if merge: <NEW_LINE> <INDENT> if not self.file_types_match(file_paths): <NEW_LINE> <INDENT> self._view.error_merge_different_file_formats() <NEW_LINE> return <NEW_LINE> <DEDENT> ws_names = [ws_names[0] + '_merged'] <NEW_LINE> file_paths = ['+'.join(file_paths)] <NEW_LINE> <DEDENT> self._load_ws(file_paths, ws_names, force_overwrite)
Loads one or more workspaces. :param file_paths: list of paths to files to load :param merge: boolean - whether to combine files into a single workspace :param force_overwrite: int - 0: asks for overwrite, 1 always overwrite, -1 never overwrite
625941b494891a1f4081b882
def __init__(self,X,k=20): <NEW_LINE> <INDENT> self.X=np.array(X) <NEW_LINE> self.k=k <NEW_LINE> self.ave=np.mean(self.X[:,2]) <NEW_LINE> print("the input data size is ",self.X.shape ) <NEW_LINE> self.bi={} <NEW_LINE> self.bu={} <NEW_LINE> self.qi={} <NEW_LINE> self.pu={} <NEW_LINE> self.movie_user={} <NEW_LINE> self.user_movie={} <NEW_LINE> for i in range(self.X.shape[0]): <NEW_LINE> <INDENT> uid=self.X[i][0] <NEW_LINE> mid=self.X[i][1] <NEW_LINE> rat=self.X[i][2] <NEW_LINE> self.movie_user.setdefault(mid,{}) <NEW_LINE> self.user_movie.setdefault(uid,{}) <NEW_LINE> self.movie_user[mid][uid]=rat <NEW_LINE> self.user_movie[uid][mid]=rat <NEW_LINE> self.bi.setdefault(mid,0) <NEW_LINE> self.bu.setdefault(uid,0) <NEW_LINE> self.qi.setdefault(mid,random((self.k,1))/10*(np.sqrt(self.k))) <NEW_LINE> self.pu.setdefault(uid,random((self.k,1))/10*(np.sqrt(self.k)))
'' k is the length of vector
625941b4aad79263cf390815
def __init__(self, k=2, n_estimators=50, max_features=5, min_features=2, scale_features=True, verbose=0, random_state=None, voting_rule="hard", **kwargs): <NEW_LINE> <INDENT> self.n_estimators = n_estimators <NEW_LINE> self.max_features = max_features <NEW_LINE> self.min_features = min_features <NEW_LINE> self.scale_features = scale_features <NEW_LINE> self.verbose = verbose <NEW_LINE> self._is_fitted = False <NEW_LINE> self.random_state = random_state <NEW_LINE> self.k = k <NEW_LINE> if voting_rule not in ("soft", "hard"): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> self.voting_rule = voting_rule <NEW_LINE> if random_state: <NEW_LINE> <INDENT> self.np_random = np.random.RandomState(random_state) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.np_random = np.random.RandomState() <NEW_LINE> <DEDENT> kwargs["random_state"] = random_state <NEW_LINE> self._kmean_kwargs = kwargs
Parameters ---------- k : integer, optional (default=2) number of clusters in k-means model n_estimators: integer, optional (default=50) number of k-means model to run max_features: integer, optional (default=5) maximum number of features to use in each model; would be overwritten if max_features is greater than number of features in the data. min_features: integer, optional (default=2) minimum number of features to use in each model scale_features: boolean, optional (default=True) whether to scale each features. If true, a StandardScaler will be used to scale the each feature based on the data in `X`. The same scaling will be applied to `probes` and the test data voting_rule: string, optional (default="hard") available: "hard and "soft" The voting rules for ensemble prediction. Hard: prediction from each model is either 0 (False) or 1 (True) Soft: prediction from each model is the probability (between 0 & 1) The final prediction is the sum of the prediction from all models # to do: assert max > min
625941b4187af65679ca4efe
def is_permitted(body_request): <NEW_LINE> <INDENT> perm = whoami(body_request.get('authname')) <NEW_LINE> try: <NEW_LINE> <INDENT> login, permission, authed, tstamp, ip = perm[0], perm[1], perm[2], perm[3], perm[4] <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if login in permited_user and time() - float(tstamp) < 90: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
Tryes to checks ttl, ipaddress and authname that graps from body then access will be granted or forbidden
625941b44527f215b584c236
def health_check(cancelled_event): <NEW_LINE> <INDENT> health_url = '{0}_info/'.format(datalab_address) <NEW_LINE> healthy = False <NEW_LINE> print('Waiting for Datalab to be reachable at ' + datalab_address) <NEW_LINE> while not cancelled_event.is_set(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> health_resp = urllib2.urlopen(health_url) <NEW_LINE> if health_resp.getcode() == 200: <NEW_LINE> <INDENT> healthy = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> if healthy: <NEW_LINE> <INDENT> on_ready() <NEW_LINE> <DEDENT> return
Check if the Datalab instance is reachable via the connection. After the instance is reachable, the `on_ready` method is called. This method is meant to be suitable for running in a separate thread, and takes an event argument to indicate when that thread should exit. Args: cancelled_event: A threading.Event instance that indicates we should give up on the instance becoming reachable.
625941b41f5feb6acb0c492f
def logoff_and_openrecord(self, username, password, menu1, menu2, menu3, recordname): <NEW_LINE> <INDENT> self.goback() <NEW_LINE> self.switch_account(username, password) <NEW_LINE> self.close_message() <NEW_LINE> self.open_m(menu1, menu2, menu3) <NEW_LINE> self.openagain_record(recordname)
退出当前登录,切换账号并打开菜单记录for视图
625941b423e79379d52ee343
def deleteAll(self, exchange_id, yes_I_mean_it=False): <NEW_LINE> <INDENT> if not yes_I_mean_it: <NEW_LINE> <INDENT> raise RuntimeError("Cowardly refusing to delete all an exch's history without the 'yes_I_mean_it' flag") <NEW_LINE> <DEDENT> sql = "DELETE FROM HistTrades WHERE exchange_id=?" <NEW_LINE> cur = self.db.cursor() <NEW_LINE> cur.execute(sql, (exchange_id,))
Delete ALL trades held for exchange with id `exchange_id`. This is useful when you're about to batch-load a new set.
625941b482261d6c526ab27d
def perform_pca(data, n_components=3, y=None): <NEW_LINE> <INDENT> features = data.copy(deep=True) <NEW_LINE> if (y is not None): <NEW_LINE> <INDENT> features = data.drop(y, axis=1) <NEW_LINE> <DEDENT> x = StandardScaler().fit_transform(features) <NEW_LINE> pca = PCA(n_components=n_components) <NEW_LINE> principalComponents = pca.fit_transform(x) <NEW_LINE> principal_comp = pd.DataFrame( data=principalComponents, columns=['Principal Component ' + str(i) for i in range(1, (n_components+1))]) <NEW_LINE> if (y is not None): <NEW_LINE> <INDENT> final_df = pd.concat([principal_comp, data[y]], axis=1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> final_df = principal_comp.copy(deep=True) <NEW_LINE> <DEDENT> print("% variance explained by each Principal Component: " + str(np.round(pca.explained_variance_ratio_ * 100, 2))) <NEW_LINE> print("% Total variance explained by all Principal Components: " + str(round(sum(pca.explained_variance_ratio_) * 100, 2)) + "%") <NEW_LINE> return (final_df)
data = Data on which to perform PCA n_components = 3. This is the number of Principal Components to consider for the analysis
625941b4004d5f362079a112
@greet.command() <NEW_LINE> def list_available(): <NEW_LINE> <INDENT> indices.list_available_index_files()
List the indices available for download. A list is printed that shows which index locations have been implemented with their URLs for downloading.
625941b4be7bc26dc91cd3e1
def get_structuralresources_content_constraints(limit=25, offset=0, query='', orderby=''): <NEW_LINE> <INDENT> path = 'contentConstraints' <NEW_LINE> url = services.build_entrypoint_url( API, path, limit=limit, offset=offset, query=query, orderBy=orderby ) <NEW_LINE> return services.get_content(url)
Get content constraints This function returns the content from ``/v1.0/contentConstraints`` Args: limit (int): Results limit. By default ``limit = 25``. offset (int): Displacement. Result from which it is returned. By default ``offset = 0``. query (string): Query to filter the results. orderby (string): Field by which to sort the results. Examples: >>> get_structuralresources_content_constraints() >>> get_structuralresources_content_constraints( ... query="ID EQ 2090", ... orderby="ID ASC" ... )
625941b45510c4643540f1d4
def lnprior(pars): <NEW_LINE> <INDENT> logprob = uniform_prior(pars[0], 0., np.inf) + uniform_prior(pars[1], -1, 5) <NEW_LINE> return logprob
Return probability of parameter values according to prior knowledge. Parameter limits should be done here through uniform prior ditributions
625941b4a934411ee3751476
def GeneralFastOCR(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> params = request._serialize() <NEW_LINE> body = self.call("GeneralFastOCR", params) <NEW_LINE> response = json.loads(body) <NEW_LINE> if "Error" not in response["Response"]: <NEW_LINE> <INDENT> model = models.GeneralFastOCRResponse() <NEW_LINE> model._deserialize(response["Response"]) <NEW_LINE> return model <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> code = response["Response"]["Error"]["Code"] <NEW_LINE> message = response["Response"]["Error"]["Message"] <NEW_LINE> reqid = response["Response"]["RequestId"] <NEW_LINE> raise TencentCloudSDKException(code, message, reqid) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if isinstance(e, TencentCloudSDKException): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TencentCloudSDKException(e.message, e.message)
本接口支持图片中整体文字的检测和识别,返回文字框位置与文字内容。相比通用印刷体识别接口,识别速度更快。 :param request: Request instance for GeneralFastOCR. :type request: :class:`tencentcloud.ocr.v20181119.models.GeneralFastOCRRequest` :rtype: :class:`tencentcloud.ocr.v20181119.models.GeneralFastOCRResponse`
625941b4e5267d203edcda7c
def _get_standard_pressure_levels2(self): <NEW_LINE> <INDENT> d = dict() <NEW_LINE> for sec, standard_pressure_level in self.data.standard_pressure_levels.items(): <NEW_LINE> <INDENT> d[standard_pressure_level.press] = standard_pressure_level <NEW_LINE> <DEDENT> return d
Return the same dict, but where keys are pressure levels instead of secs. This way we reduce the number of loops per pressure reading in the wind_data
625941b41b99ca400220a88b
def create(self): <NEW_LINE> <INDENT> self.elem = etree.makeEtreeElement(str(self.qname), SOAPHeaderBase.DEFAULT_ELEMENT_NS_PREFIX, SOAPHeaderBase.DEFAULT_ELEMENT_NS)
Create header ElementTree element
625941b45e10d32532c5ed0a
def _prepare_invoice(self): <NEW_LINE> <INDENT> vals = super()._prepare_invoice() <NEW_LINE> self._get_payment_mode_vals(vals) <NEW_LINE> return vals
Copy bank partner from sale order to invoice
625941b4ad47b63b2c509d66
def takepicture(self): <NEW_LINE> <INDENT> camera = cv2.VideoCapture(0) <NEW_LINE> while True: <NEW_LINE> <INDENT> check, image = camera.read() <NEW_LINE> cv2.imshow('image', image) <NEW_LINE> if cv2.waitKey(1) & 0xFF == ord('s'): <NEW_LINE> <INDENT> cv2.imwrite(str(self.textname.get(1.0, END))+str(self.textsurname.get(1.0, END))+'.jpg', image) <NEW_LINE> break <NEW_LINE> <DEDENT> camera.release() <NEW_LINE> cv2.destroyAllWindows()
takes a picture of a guest
625941b4ec188e330fd5a582
def get_olx_hash_for_definition_key(def_key): <NEW_LINE> <INDENT> if def_key.bundle_version: <NEW_LINE> <INDENT> files_list = get_bundle_version_files_cached(def_key.bundle_uuid, def_key.bundle_version) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> files_list = get_bundle_draft_files_cached(def_key.bundle_uuid, def_key.draft_name) <NEW_LINE> <DEDENT> for entry in files_list: <NEW_LINE> <INDENT> if entry.path == def_key.olx_path: <NEW_LINE> <INDENT> return entry.hash_digest <NEW_LINE> <DEDENT> <DEDENT> raise NoSuchDefinition(f"Could not load OLX file for key {def_key}")
Given a BundleDefinitionLocator, which identifies a specific version of an OLX file, return the hash of the OLX file as given by the Blockstore API.
625941b44428ac0f6e5ba5d4
@pytest.fixture() <NEW_LINE> def cleandir() -> str: <NEW_LINE> <INDENT> with tempfile.TemporaryDirectory() as tmpdirname: <NEW_LINE> <INDENT> old_dir = os.getcwd() <NEW_LINE> os.chdir(tmpdirname) <NEW_LINE> yield tmpdirname <NEW_LINE> os.chdir(old_dir)
This fixture will use the stdlib `tempfile` module to move the current working directory to a tmp-dir for the duration of the test. Afterwards, the session returns to its previous working directory, and the temporary directory and its contents are removed. Yields ------ str The name of the temporary directory.
625941b4d58c6744b4257a3b
def test_coordinates(self): <NEW_LINE> <INDENT> pass
Ensure the POINTS are respecting lat/long from returned data
625941b4ac7a0e7691ed3eb5
def __init__(self, feat_appear_limit=20): <NEW_LINE> <INDENT> self._dic = None <NEW_LINE> self._count = None <NEW_LINE> self._feat_appear_limit = feat_appear_limit
@feat_appear_limit: int
625941b4c4546d3d9de72813
def read_topology_from_file(file_name): <NEW_LINE> <INDENT> import json <NEW_LINE> with open(file_name, 'r') as f: <NEW_LINE> <INDENT> return json.loads(f.read())
Read saved topology. :param string file_name: Path to saved topology file. :return: dictionary in which agents names are the keys and lists of neighbours agents names are values. :rtype: dictionary
625941b430dc7b7665901746
def compute_vanderijt_edge_balance( dgraph: nx.DiGraph) -> Dict[tuple, Dict[str, int]]: <NEW_LINE> <INDENT> edge_sign = {} <NEW_LINE> for edge in dgraph.edges(): <NEW_LINE> <INDENT> nodes3 = 0 <NEW_LINE> balanced_node3 = 0 <NEW_LINE> nodes = list(set(dgraph.nodes()) - set(edge)) <NEW_LINE> xij = dgraph.get_edge_data(edge[0], edge[1])['weight'] <NEW_LINE> for node in nodes: <NEW_LINE> <INDENT> if (dgraph.has_edge(edge[0], node) and dgraph.has_edge(node, edge[1])): <NEW_LINE> <INDENT> xik = dgraph.get_edge_data(edge[0], node)['weight'] <NEW_LINE> xkj = dgraph.get_edge_data(node, edge[1])['weight'] <NEW_LINE> nodes3 += 1 <NEW_LINE> if np.sign(xij * xik * xkj) > 0: <NEW_LINE> <INDENT> balanced_node3 += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if nodes3: <NEW_LINE> <INDENT> edge_sign[edge] = { '#balanced_node3': balanced_node3, '#nodes3': nodes3} <NEW_LINE> <DEDENT> <DEDENT> return edge_sign
Computes edge balance based on Van De Rijt (2011). Args: dgraph: Directed weighted graph to apply edge balance. Returns: Dictionary of edges mapped to the number of balanced and total number of triads the edge is involved in. Raises: None
625941b46fb2d068a760ee7c
def fit(self, dataset, n_episodes_per_epoch=1000, nb_epochs=1, n_pos=1, n_neg=9, log_every_n_samples=10, **kwargs): <NEW_LINE> <INDENT> time_start = time.time() <NEW_LINE> n_tasks = len(dataset.get_task_names()) <NEW_LINE> n_test = self.test_batch_size <NEW_LINE> feed_total, run_total = 0, 0 <NEW_LINE> for epoch in range(nb_epochs): <NEW_LINE> <INDENT> episode_generator = EpisodeGenerator(dataset, n_pos, n_neg, n_test, n_episodes_per_epoch) <NEW_LINE> recent_losses = [] <NEW_LINE> for ind, (task, support, test) in enumerate(episode_generator): <NEW_LINE> <INDENT> if ind % log_every_n_samples == 0: <NEW_LINE> <INDENT> print("Epoch %d, Sample %d from task %s" % (epoch, ind, str(task))) <NEW_LINE> <DEDENT> feed_start = time.time() <NEW_LINE> feed_dict = self.construct_feed_dict(test, support) <NEW_LINE> feed_end = time.time() <NEW_LINE> feed_total += (feed_end - feed_start) <NEW_LINE> run_start = time.time() <NEW_LINE> _, loss = self.sess.run( [self.train_op, self.loss_op], feed_dict=feed_dict) <NEW_LINE> run_end = time.time() <NEW_LINE> run_total += (run_end - run_start) <NEW_LINE> if ind % log_every_n_samples == 0: <NEW_LINE> <INDENT> mean_loss = np.mean(np.array(recent_losses)) <NEW_LINE> print("\tmean loss is %s" % str(mean_loss)) <NEW_LINE> recent_losses = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> recent_losses.append(loss) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> time_end = time.time() <NEW_LINE> print("fit took %s seconds" % str(time_end - time_start)) <NEW_LINE> print("feed_total: %s" % str(feed_total)) <NEW_LINE> print("run_total: %s" % str(run_total))
Fits model on dataset using cached supports. For each epcoh, sample n_episodes_per_epoch (support, test) pairs and does gradient descent. Parameters ---------- dataset: dc.data.Dataset Dataset to fit model on. nb_epochs: int, optional number of epochs of training. n_episodes_per_epoch: int, optional Number of (support, test) pairs to sample and train on per epoch. n_pos: int, optional Number of positive examples per support. n_neg: int, optional Number of negative examples per support. log_every_n_samples: int, optional Displays info every this number of samples
625941b4b830903b967e96f3
def updateTopologyViews(self, **kwargs): <NEW_LINE> <INDENT> allParams = ['applicationUuid', 'pageUuid', 'topologyViewDtoList'] <NEW_LINE> params = locals() <NEW_LINE> for (key, val) in list(params['kwargs'].items()): <NEW_LINE> <INDENT> if key not in allParams: <NEW_LINE> <INDENT> raise TypeError("Got an unexpected keyword argument '%s' to method updateTopologyViews" % key) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> resourcePath = '/topology/application/{applicationUuid}/page/{pageUuid}/view' <NEW_LINE> resourcePath = resourcePath.replace('{format}', 'json') <NEW_LINE> method = 'PUT' <NEW_LINE> queryParams = {} <NEW_LINE> headerParams = {} <NEW_LINE> formParams = {} <NEW_LINE> files = {} <NEW_LINE> bodyParam = None <NEW_LINE> headerParams['Accept'] = 'application/json' <NEW_LINE> headerParams['Content-Type'] = 'application/json' <NEW_LINE> if ('applicationUuid' in params): <NEW_LINE> <INDENT> replacement = str(self.apiClient.toPathValue(params['applicationUuid'])) <NEW_LINE> replacement = urllib.parse.quote(replacement) <NEW_LINE> resourcePath = resourcePath.replace('{' + 'applicationUuid' + '}', replacement) <NEW_LINE> <DEDENT> if ('pageUuid' in params): <NEW_LINE> <INDENT> replacement = str(self.apiClient.toPathValue(params['pageUuid'])) <NEW_LINE> replacement = urllib.parse.quote(replacement) <NEW_LINE> resourcePath = resourcePath.replace('{' + 'pageUuid' + '}', replacement) <NEW_LINE> <DEDENT> if ('topologyViewDtoList' in params): <NEW_LINE> <INDENT> bodyParam = params['topologyViewDtoList'] <NEW_LINE> <DEDENT> postData = (formParams if formParams else bodyParam) <NEW_LINE> response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files) <NEW_LINE> if not response: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> responseObject = self.apiClient.deserialize(response, 'TaskIdResult') <NEW_LINE> return responseObject
updateTopologyViews Args: applicationUuid, str: Topology Application Uuid (required) pageUuid, str: Topology Application Page Uuid (required) topologyViewDtoList, list[TopologyViewDto]: page (required) Returns: TaskIdResult
625941b42ae34c7f2600cf0c
def projectRerunRequest(self, project, item, clearError): <NEW_LINE> <INDENT> cmdstring="project-rerun" <NEW_LINE> fields = [] <NEW_LINE> fields.append(Input('cmd', cmdstring)) <NEW_LINE> fields.append(Input('version', "1")) <NEW_LINE> fields.append(Input('item', item)) <NEW_LINE> if clearError: <NEW_LINE> <INDENT> fields.append(Input('clear-error', 1)) <NEW_LINE> fields.append(Input('recursive', 1)) <NEW_LINE> <DEDENT> response= self.postRequest(ServerRequest.prepareRequest(fields, [])) <NEW_LINE> return response
Force a rerun and optionally clear an error.
625941b41f037a2d8b945fda
def delta_0f(beta): <NEW_LINE> <INDENT> beta = ZZ(round(beta)) <NEW_LINE> return BKZ._delta_0f(beta)
Compute root-Hermite factor `δ_0` from block size `β`.
625941b4462c4b4f79d1d4ab
def parse_parameter(self, testblock_name, metric_name, params): <NEW_LINE> <INDENT> metric_type = "time" <NEW_LINE> unit = "s" <NEW_LINE> split_name = metric_name.split("::") <NEW_LINE> if len(split_name) != 2: <NEW_LINE> <INDENT> raise ATFConfigurationError("no valid metric name for metric '%s' in testblock '%s'" %(metric_name, testblock_name)) <NEW_LINE> <DEDENT> if split_name[0] != metric_type: <NEW_LINE> <INDENT> raise ATFConfigurationError("called invalid metric handle for metric '%s' in testblock '%s'." %(metric_name, testblock_name)) <NEW_LINE> <DEDENT> if type(params) is not dict: <NEW_LINE> <INDENT> rospy.logerr("metric config not a dictionary") <NEW_LINE> raise ATFConfigurationError("no valid metric configuration for metric '%s' in testblock '%s': %s" %(metric_name, testblock_name, str(params))) <NEW_LINE> <DEDENT> groundtruth = Groundtruth() <NEW_LINE> try: <NEW_LINE> <INDENT> groundtruth.data = params["groundtruth"]["data"] <NEW_LINE> groundtruth.epsilon = params["groundtruth"]["epsilon"] <NEW_LINE> groundtruth.available = True <NEW_LINE> <DEDENT> except (TypeError, KeyError): <NEW_LINE> <INDENT> groundtruth.data = 0 <NEW_LINE> groundtruth.epsilon = 0 <NEW_LINE> groundtruth.available = False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> mode = params["mode"] <NEW_LINE> <DEDENT> except (TypeError, KeyError): <NEW_LINE> <INDENT> mode = MetricResult.SNAP <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> series_mode = params["series_mode"] <NEW_LINE> <DEDENT> except (TypeError, KeyError): <NEW_LINE> <INDENT> series_mode = None <NEW_LINE> <DEDENT> return CalculateTime(metric_name, testblock_name, groundtruth, mode, series_mode, unit)
Method that returns the metric method with the given parameter. :param params: Parameter
625941b47c178a314d6ef232