code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def compute_loss(y, tx, w): <NEW_LINE> <INDENT> error = y - np.dot(tx, w) <NEW_LINE> return 0.5 * np.mean(error ** 2)
Calculate the loss. You can calculate the loss using mse or mae.
625941b3627d3e7fe0d68c0d
def int_to_list(n: int, base: int=10) -> List[int]: <NEW_LINE> <INDENT> digit_list = [] <NEW_LINE> while n: <NEW_LINE> <INDENT> digit_list += [n % base] <NEW_LINE> n //= base <NEW_LINE> <DEDENT> return list(reversed(digit_list))
Returns a list of the digits of N.
625941b38a43f66fc4b53e29
def open_temp_file(prefix): <NEW_LINE> <INDENT> (fd, filename) = mkstemp( dir=CFG_TMPSHAREDDIR, prefix='prefix_' + time.strftime("%Y%m%d_%H%M%S_", time.localtime()) ) <NEW_LINE> file_out = os.fdopen(fd, "w") <NEW_LINE> logger.debug("Created temporary file %s" % filename) <NEW_LINE> return (file_out, filename)
Create a temporary file to write MARC XML in
625941b3baa26c4b54cb0ee3
def teardown_method(self, method): <NEW_LINE> <INDENT> self.client.close()
Teardoen method.
625941b34e4d5625662d419d
def should_inv_be_displayed(self): <NEW_LINE> <INDENT> if self.collected: <NEW_LINE> <INDENT> self.display_inventory()
Check if an inventory tile should be displayed
625941b34428ac0f6e5ba5b8
def add_attributes(item, item_source): <NEW_LINE> <INDENT> for name, value in item_source.attrs.iteritems(): <NEW_LINE> <INDENT> item.attrs.modify(name, value)
Add all the attrs from item_source as attributes in item, where item_source can be a group or a dataset.
625941b3925a0f43d2549c32
def __repr__(self): <NEW_LINE> <INDENT> return self.__urepr__().encode("utf-8")
__repr__ *must* return a str, not a unicode.
625941b3fff4ab517eb2f1f8
def add(self, est): <NEW_LINE> <INDENT> if isinstance(est, EstimatorConfig): <NEW_LINE> <INDENT> self.est_configs.append(est.get_est_args()) <NEW_LINE> <DEDENT> elif isinstance(est, dict): <NEW_LINE> <INDENT> self.est_configs.append(est) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unknown estimator information {}".format(est))
Add an estimator to the auto growing cascade layer. :param est: :return:
625941b3cdde0d52a9e52df3
def subtract(numbers): <NEW_LINE> <INDENT> return numbers[0] - numbers[1]
Subtracts the 1..Nth numbers from the 0th one
625941b34e4d5625662d419e
def __init__(self, links: List['ProfileLink']) -> None: <NEW_LINE> <INDENT> self.links = links
Initialize a ProfileLinkList object. :param List[ProfileLink] links: List of links to a trusted profile.
625941b324f1403a92600931
def __bootstrap__(): <NEW_LINE> <INDENT> import sys <NEW_LINE> import core <NEW_LINE> import os <NEW_LINE> in_test = 'unittest' in sys.modules <NEW_LINE> try: <NEW_LINE> <INDENT> num_threads = int(os.getenv('OMP_NUM_THREADS', '1')) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> num_threads = 1 <NEW_LINE> <DEDENT> if num_threads > 1: <NEW_LINE> <INDENT> print( 'WARNING: OMP_NUM_THREADS set to {0}, not 1. The computation ' 'speed will not be optimized if you use data parallel. It will ' 'fail if this PaddlePaddle binary is compiled with OpenBlas since' ' OpenBlas does not support multi-threads.'.format(num_threads), file=sys.stderr) <NEW_LINE> print('PLEASE USE OMP_NUM_THREADS WISELY.', file=sys.stderr) <NEW_LINE> <DEDENT> os.environ['OMP_NUM_THREADS'] = str(num_threads) <NEW_LINE> read_env_flags = [ 'use_pinned_memory', 'check_nan_inf', 'benchmark', 'warpctc_dir', 'eager_delete_scope', 'use_mkldnn', 'initial_cpu_memory_in_mb', 'init_allocated_mem' ] <NEW_LINE> if core.is_compiled_with_cuda(): <NEW_LINE> <INDENT> read_env_flags += [ 'fraction_of_gpu_memory_to_use', 'cudnn_deterministic' ] <NEW_LINE> <DEDENT> core.init_gflags([sys.argv[0]] + ["--tryfromenv=" + ",".join(read_env_flags)]) <NEW_LINE> core.init_glog(sys.argv[0]) <NEW_LINE> core.init_devices(not in_test)
Enable reading gflags from environment variables. Returns: None
625941b3099cdd3c635f0a1c
def main(): <NEW_LINE> <INDENT> def convert_file(in_file_name): <NEW_LINE> <INDENT> def get_out_file_name(in_file_name): <NEW_LINE> <INDENT> return Config.out_file_infix.join(os.path.splitext(in_file_name)) <NEW_LINE> <DEDENT> def convert_data_line(line): <NEW_LINE> <INDENT> line[0] = convert_utc_to_local_time_zone(line[0]) <NEW_LINE> return line <NEW_LINE> <DEDENT> csv_filter( in_file_name , get_out_file_name(in_file_name) , lambda line : line , convert_data_line ) <NEW_LINE> <DEDENT> iterate_argv(convert_file)
top level entry function
625941b315fb5d323cde08c7
def CHI(self) -> str: <NEW_LINE> <INDENT> return f'{self.firstName} {self.lastName}, "{self.title}," {self.publication}, last modified {self.date}, {self.url}.'
Returns a Chicago Citation
625941b371ff763f4b54944d
def __init__(self, tmx_tileset): <NEW_LINE> <INDENT> self.name = tmx_tileset.name <NEW_LINE> self.tile_width, self.tile_height = tmx_tileset.tile_size <NEW_LINE> image_file = tmx_tileset.image.source <NEW_LINE> self.tile_gfx = assets.getImageList(image_file, tmx_tileset.column_count, tmx_tileset.row_count, False) <NEW_LINE> self.tiles = [] <NEW_LINE> for t in tmx_tileset: <NEW_LINE> <INDENT> self.tiles.append(Tile(t.number, t.properties, self.tile_gfx[t.number]))
A collection of tiles.
625941b315baa723493c3d31
def __init__(self, id=None, name=None, organization=None, date_created=None, created_by=None, workflow_step_net_schemes=None, rates=None): <NEW_LINE> <INDENT> self._id = None <NEW_LINE> self._name = None <NEW_LINE> self._organization = None <NEW_LINE> self._date_created = None <NEW_LINE> self._created_by = None <NEW_LINE> self._workflow_step_net_schemes = None <NEW_LINE> self._rates = None <NEW_LINE> self.discriminator = None <NEW_LINE> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if organization is not None: <NEW_LINE> <INDENT> self.organization = organization <NEW_LINE> <DEDENT> if date_created is not None: <NEW_LINE> <INDENT> self.date_created = date_created <NEW_LINE> <DEDENT> if created_by is not None: <NEW_LINE> <INDENT> self.created_by = created_by <NEW_LINE> <DEDENT> if workflow_step_net_schemes is not None: <NEW_LINE> <INDENT> self.workflow_step_net_schemes = workflow_step_net_schemes <NEW_LINE> <DEDENT> if rates is not None: <NEW_LINE> <INDENT> self.rates = rates
NetRateScheme - a model defined in Swagger
625941b3e8904600ed9f1ce9
def wait_for_button(pin): <NEW_LINE> <INDENT> global received_signal <NEW_LINE> global roof_opened <NEW_LINE> global roof_closed <NEW_LINE> global roof_opening <NEW_LINE> global roof_closing <NEW_LINE> global next_possible_green_button_action <NEW_LINE> time.sleep(1) <NEW_LINE> while True: <NEW_LINE> <INDENT> while wiringpi.digitalRead(pin) == True and received_signal == 0: <NEW_LINE> <INDENT> time.sleep(0.01) <NEW_LINE> <DEDENT> if received_signal != 0: <NEW_LINE> <INDENT> print("Signal {}, opened {}, closed {}, opening {}, closing {}".format( received_signal, roof_opened, roof_closed, roof_opening, roof_closing)) <NEW_LINE> received_signal = 0 <NEW_LINE> return True <NEW_LINE> <DEDENT> pressed_time = 0 <NEW_LINE> while wiringpi.digitalRead(pin) == False: <NEW_LINE> <INDENT> time.sleep(0.01) <NEW_LINE> pressed_time += 10 <NEW_LINE> <DEDENT> if pressed_time >= 100: <NEW_LINE> <INDENT> print("Pressed for", pressed_time, "ms") <NEW_LINE> if next_possible_green_button_action == GREEN_BUTTON_CLOSING: <NEW_LINE> <INDENT> roof_closing = True <NEW_LINE> <DEDENT> elif next_possible_green_button_action == GREEN_BUTTON_OPENING: <NEW_LINE> <INDENT> roof_opening = True <NEW_LINE> <DEDENT> elif next_possible_green_button_action == GREEN_BUTTON_STOP: <NEW_LINE> <INDENT> roof_closing = False <NEW_LINE> roof_opening = False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Too brief press:", pressed_time, "ms")
wait green button or signal
625941b34e696a04525c9214
def topKFrequent(self, nums, k): <NEW_LINE> <INDENT> dic = dict() <NEW_LINE> for num in nums: <NEW_LINE> <INDENT> if num in dic: <NEW_LINE> <INDENT> dic[num] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dic[num] = 1 <NEW_LINE> <DEDENT> <DEDENT> dic_sorted = sorted(dic.items(), key=lambda item: item[1], reverse=True) <NEW_LINE> result = [item[0] for item in dic_sorted[:k]] <NEW_LINE> return result
:type nums: List[int] :type k: int :rtype: List[int]
625941b3b5575c28eb68ddbc
def var(): <NEW_LINE> <INDENT> return Parse.next_token().if_type(['VAR']).expect('var')
parser for a single variable. Accepts a single token that is a variable.
625941b3507cdc57c6306a91
def auth_delete(method): <NEW_LINE> <INDENT> def wrapper(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.current_user: <NEW_LINE> <INDENT> if is_prived(self.userinfo.role, ROLE_CFG['delete']): <NEW_LINE> <INDENT> return method(self, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kwd = { 'info': 'No role', } <NEW_LINE> self.render('misc/html/404.html', kwd=kwd, userinfo=self.userinfo) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> kwd = { 'info': 'No role', } <NEW_LINE> self.render('misc/html/404.html', kwd=kwd, userinfo=self.userinfo) <NEW_LINE> <DEDENT> <DEDENT> return wrapper
role for delete.
625941b34d74a7450ccd3f83
def test_not_authenticated(self): <NEW_LINE> <INDENT> response = self.app.get(self.url) <NEW_LINE> self.assertRedirects(response, "/?next=/contributor/evaluation/%s/edit" % TESTING_EVALUATION_ID)
Asserts that an unauthorized user gets redirected to the login page.
625941b323849d37ff7b2e52
def read_cpy(f, scale=1.): <NEW_LINE> <INDENT> surfs = OrderedDict() <NEW_LINE> for line in f: <NEW_LINE> <INDENT> line = [field.strip() for field in line.split(",")] <NEW_LINE> if not line: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> cmd = line.pop(0) <NEW_LINE> if cmd == "#": <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif cmd == "WP": <NEW_LINE> <INDENT> point_name = line.pop(0) <NEW_LINE> if point_name.startswith("TrapCenter"): <NEW_LINE> <INDENT> origin = map(float, line) <NEW_LINE> <DEDENT> elif point_name.startswith("Center Point"): <NEW_LINE> <INDENT> origin = map(float, line) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("ignoring line", cmd, point_name, line) <NEW_LINE> <DEDENT> <DEDENT> elif cmd == "S": <NEW_LINE> <INDENT> points, panels = [], [] <NEW_LINE> name = line.pop(0) <NEW_LINE> if name.startswith("TRAPELECTRODE_"): <NEW_LINE> <INDENT> name = name[len("TRAPELECTRODE_"):] <NEW_LINE> <DEDENT> <DEDENT> elif cmd == "V": <NEW_LINE> <INDENT> points.append(map(float, line)) <NEW_LINE> <DEDENT> elif cmd == "T": <NEW_LINE> <INDENT> panels.append(map(int, line)) <NEW_LINE> <DEDENT> elif cmd == "SEND": <NEW_LINE> <INDENT> points = np.array(points, dtype=np.double) <NEW_LINE> panels = np.array(panels, dtype=np.intc) <NEW_LINE> points /= scale <NEW_LINE> panels -= 1 <NEW_LINE> surfs.setdefault(name, []).append((points, panels)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("ignoring line", cmd, line) <NEW_LINE> <DEDENT> <DEDENT> return surfs
reads cpy text file and returns a dict {name: [(points, triangles), ...]} for each name, a list of surfaces consisting of a points array and a (n,3) triangles array with indices into points * only triangles are supported * origin (TrapCenter or Center Point) is ignored * the TRAPELECTRODE_ in name is stripped
625941b31d351010ab8558e5
def test_set_fields(self): <NEW_LINE> <INDENT> base = self.base.copy() <NEW_LINE> mbase = base.view(mrecarray) <NEW_LINE> mbase = mbase.copy() <NEW_LINE> mbase.fill_value = (999999,1e20,'N/A') <NEW_LINE> mbase.a._data[:] = 5 <NEW_LINE> assert_equal(mbase['a']._data, [5,5,5,5,5]) <NEW_LINE> assert_equal(mbase['a']._mask, [0,1,0,0,1]) <NEW_LINE> mbase.a = 1 <NEW_LINE> assert_equal(mbase['a']._data, [1]*5) <NEW_LINE> assert_equal(ma.getmaskarray(mbase['a']), [0]*5) <NEW_LINE> assert_equal(mbase._mask, [False]*5) <NEW_LINE> assert_equal(mbase._fieldmask.tolist(), np.array([(0,0,0),(0,1,1),(0,0,0),(0,0,0),(0,1,1)], dtype=bool)) <NEW_LINE> mbase.c = masked <NEW_LINE> assert_equal(mbase.c.mask, [1]*5) <NEW_LINE> assert_equal(ma.getmaskarray(mbase['c']), [1]*5) <NEW_LINE> assert_equal(ma.getdata(mbase['c']), ['N/A']*5) <NEW_LINE> assert_equal(mbase._fieldmask.tolist(), np.array([(0,0,1),(0,1,1),(0,0,1),(0,0,1),(0,1,1)], dtype=bool)) <NEW_LINE> mbase = base.view(mrecarray).copy() <NEW_LINE> mbase.a[3:] = 5 <NEW_LINE> assert_equal(mbase.a, [1,2,3,5,5]) <NEW_LINE> assert_equal(mbase.a._mask, [0,1,0,0,0]) <NEW_LINE> mbase.b[3:] = masked <NEW_LINE> assert_equal(mbase.b, base['b']) <NEW_LINE> assert_equal(mbase.b._mask, [0,1,0,1,1]) <NEW_LINE> ndtype = [('alpha','|S1'),('num',int)] <NEW_LINE> data = ma.array([('a',1),('b',2),('c',3)], dtype=ndtype) <NEW_LINE> rdata = data.view(MaskedRecords) <NEW_LINE> val = ma.array([10,20,30], mask=[1,0,0]) <NEW_LINE> import warnings <NEW_LINE> warnings.simplefilter("ignore") <NEW_LINE> rdata['num'] = val <NEW_LINE> assert_equal(rdata.num, val) <NEW_LINE> assert_equal(rdata.num.mask, [1,0,0])
Tests setting fields.
625941b360cbc95b062c6309
def start(self): <NEW_LINE> <INDENT> with self.userLock: <NEW_LINE> <INDENT> self.say(self.sentences["startup"]) <NEW_LINE> <DEDENT> self.detector = decoder.HotwordDetector(root + "/" + self.model, sensitivity=0.4) <NEW_LINE> self.detector.start(detected_callback=self.hotword_has_been_detected, interrupt_check=self.interrupt_callback, sleep_time=0.03)
Wrapper around snowboy's detector() :return:
625941b32eb69b55b151c669
def switch_off(self): <NEW_LINE> <INDENT> if self._visible: <NEW_LINE> <INDENT> self._visible = False <NEW_LINE> turtle.up() <NEW_LINE> turtle.setpos(self._x, self._y - self._r) <NEW_LINE> turtle.down() <NEW_LINE> c = turtle.pencolor() <NEW_LINE> turtle.pencolor(turtle.bgcolor()) <NEW_LINE> turtle.circle(self._r) <NEW_LINE> turtle.pencolor(c)
Робить коло невидимим на екрані
625941b3167d2b6e3121895e
def start(self, xmlSubPanel, boardConfiguration): <NEW_LINE> <INDENT> self.xmlSubPanel = xmlSubPanel <NEW_LINE> self.boardConfiguration = boardConfiguration <NEW_LINE> if self.comm.isConnected() == True: <NEW_LINE> <INDENT> telemetry = self.xml.find(xmlSubPanel + "/Telemetry").text <NEW_LINE> if telemetry != None: <NEW_LINE> <INDENT> self.comm.write(telemetry) <NEW_LINE> <DEDENT> self.timer = QtCore.QTimer() <NEW_LINE> self.timer.timeout.connect(self.readContinuousData) <NEW_LINE> self.timer.start(10)
This method starts a timer used for any long running loops in a subpanel
625941b363f4b57ef0000eea
def add(self, vec2): <NEW_LINE> <INDENT> self.x += vec2.x <NEW_LINE> self.y += vec2.y <NEW_LINE> return Vec2(self.x, self.y)
This method is destructive! <returns tears in non-functional approach>
625941b38e71fb1e9831d575
def _end_selection(self, accel_group, acceleratable, keyval, modifier): <NEW_LINE> <INDENT> self.ui.action_selection.set_active(False)
End of the selection mode
625941b3b545ff76a8913be1
def test_delete_server_wait_for_deleted(self): <NEW_LINE> <INDENT> server = fakes.make_fake_server('9999', 'wily', 'ACTIVE') <NEW_LINE> self.register_uris([ self.get_nova_discovery_mock_dict(), dict(method='GET', uri=self.get_mock_url( 'compute', 'public', append=['servers', 'detail']), json={'servers': [server]}), dict(method='DELETE', uri=self.get_mock_url( 'compute', 'public', append=['servers', '9999'])), dict(method='GET', uri=self.get_mock_url( 'compute', 'public', append=['servers', 'detail']), json={'servers': [server]}), dict(method='GET', uri=self.get_mock_url( 'compute', 'public', append=['servers', 'detail']), json={'servers': []}), ]) <NEW_LINE> self.assertTrue(self.cloud.delete_server('wily', wait=True)) <NEW_LINE> self.assert_calls()
Test that delete_server waits for the server to be gone
625941b3baa26c4b54cb0ee4
def wait_for_repetition(m): <NEW_LINE> <INDENT> prevnumber = m + 1 <NEW_LINE> while True: <NEW_LINE> <INDENT> number = random.randrange(m) <NEW_LINE> print(number, end=' ') <NEW_LINE> if number == prevnumber: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> prevnumber = number <NEW_LINE> <DEDENT> print() <NEW_LINE> return number
Repeatedly generates random integers in the range [0, m). Stops generating random integers when one is generated that is the same as the previous one. Returns that last generated integer. For example, if the random integers generated are (in order): 37 23 13 50 32 32 then 32 is returned. Precondition: m is a positive integer.
625941b376d4e153a657e8f0
def summarise(self, field, maxlen=600, hl=('<b>', '</b>'), query=None): <NEW_LINE> <INDENT> highlighter = highlight.Highlighter(language_code=self._get_language(field)) <NEW_LINE> field = self.data[field] <NEW_LINE> results = [] <NEW_LINE> text = '\n'.join(field) <NEW_LINE> if query is None: <NEW_LINE> <INDENT> query = self._query <NEW_LINE> <DEDENT> return highlighter.makeSample(text, query, maxlen, hl)
Return a summarised version of the field specified. This will return a summary of the contents of the field stored in the search result, with words which match the query highlighted. The maximum length of the summary (in characters) may be set using the maxlen parameter. The return value will be a string holding the summary, with highlighting applied. If there are multiple instances of the field in the document, the instances will be joined with a newline character. To turn off highlighting, set hl to None. Each highlight will consist of the first entry in the `hl` list being placed before the word, and the second entry in the `hl` list being placed after the word. Any XML or HTML style markup tags in the field will be stripped before the summarisation algorithm is applied. If `query` is supplied, it should contain a Query object, as returned from SearchConnection.query_parse() or related methods, which will be used as the basis of the summarisation and highlighting rather than the query which was used for the search. Raises KeyError if the field is not known.
625941b36fece00bbac2d4fb
def h_setlevel(self): <NEW_LINE> <INDENT> def is_all_mutex(layer): <NEW_LINE> <INDENT> for subgoalA, subgoalB in combinations(self.goal, 2): <NEW_LINE> <INDENT> if subgoalA in layer and subgoalB in layer: <NEW_LINE> <INDENT> if layer.is_mutex(subgoalA, subgoalB): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> self.fill() <NEW_LINE> for i, layer in enumerate(self.literal_layers): <NEW_LINE> <INDENT> goal_reached = True <NEW_LINE> for g in self.goal: <NEW_LINE> <INDENT> if g not in layer: <NEW_LINE> <INDENT> goal_reached = False <NEW_LINE> <DEDENT> <DEDENT> if goal_reached and is_all_mutex(layer): <NEW_LINE> <INDENT> return i <NEW_LINE> <DEDENT> <DEDENT> return -1
Calculate the set level heuristic for the planning graph The set level of a planning graph is the first level where all goals appear such that no pair of goal literals are mutex in the last layer of the planning graph. Hints ----- - See the pseudocode folder for help on a simple implementation - You can implement this function more efficiently if you expand the graph one level at a time until you find the set level rather than filling the whole graph at the start. See Also -------- Russell-Norvig 10.3.1 (3rd Edition) Notes ----- WARNING: you should expect long runtimes using this heuristic on complex problems
625941b30c0af96317bb7fa9
def get_num_seeds_peers(self): <NEW_LINE> <INDENT> if not self.lt_status or self.get_status() not in [DLSTATUS_DOWNLOADING, DLSTATUS_SEEDING]: <NEW_LINE> <INDENT> return 0, 0 <NEW_LINE> <DEDENT> total = self.lt_status.list_peers <NEW_LINE> seeds = self.lt_status.list_seeds <NEW_LINE> return seeds, total - seeds
Returns the sum of the number of seeds and peers. @return A tuple (num seeds, num peers)
625941b3d99f1b3c44c67360
def on_aprs_status(self, origframe, source, payload, via=None): <NEW_LINE> <INDENT> pass
APRS status packet (data type: >)
625941b315fb5d323cde08c8
def __init__( self, workers: Optional[List[Dict]] = list(), managers: Optional[Dict] = None ): <NEW_LINE> <INDENT> self._workerspecs = {doc['name']: doc for doc in workers} <NEW_LINE> self._workers = dict() <NEW_LINE> self.managers = managers if managers is not None else dict()
Initialize the specifications for the workers that are managed by this worker pool and the optional list of task managers for individual workflow steps. Parameters ---------- workers: list, default=list List of worker specifications. managers: dict, default=None Mapping from workflow step identifier to worker identifier that defines the worker that is responsible for the execution of the respective workflow step.
625941b3d18da76e23532290
@commands.command(r'clearcache') <NEW_LINE> def clearcache(): <NEW_LINE> <INDENT> g.pafs = {} <NEW_LINE> g.streams = {} <NEW_LINE> dbg("%scache cleared%s", c.p, c.w) <NEW_LINE> g.message = "cache cleared"
Clear cached items - for debugging use.
625941b3b7558d58953c4cdc
def buckets(resource): <NEW_LINE> <INDENT> all_buckets = [] <NEW_LINE> for bucket in resource.buckets.all(): <NEW_LINE> <INDENT> all_buckets.append(bucket) <NEW_LINE> <DEDENT> return all_buckets
Return all available buckets object.
625941b3aad79263cf3907fb
def make_lookup(self, results): <NEW_LINE> <INDENT> return {r['category'].lower(): r for r in results}
Convert Trading API category list into a lookup table. Parameters ---------- results : list of dicts a `parse` result Returns ------- Category lookup table : dict Examples -------- >>> trading = Trading(sandbox=True) >>> response = trading.get_categories() >>> results = trading.parse(response.CategoryArray.Category) >>> set(trading.make_lookup(results)) == { ... 'toys & hobbies', 'health & beauty', 'music', ... 'musical instruments & gear', 'clothing, shoes & accessories', ... 'real estate', 'art', 'antiques', 'home & garden', ... 'dolls & bears', 'computers/tablets & networking', ... 'business & industrial', 'video games & consoles', ... 'consumer electronics', 'tickets & experiences', ... 'sports mem, cards & fan shop', 'jewelry & watches', ... 'gift cards & coupons', 'entertainment memorabilia', ... 'specialty services', 'stamps', 'cameras & photo', ... 'pottery & glass', 'coins & paper money', 'everything else', ... 'dvds & movies', 'crafts', 'travel', 'pet supplies', 'baby', ... 'collectibles', 'books', 'sporting goods', ... 'cell phones & accessories'} True
625941b355399d3f05588474
def choose_new_tile(key, board): <NEW_LINE> <INDENT> if NEW_TILE_STRATEGY == 'random': <NEW_LINE> <INDENT> return random_tile(board) <NEW_LINE> <DEDENT> elif NEW_TILE_STRATEGY == 'always2': <NEW_LINE> <INDENT> return always2_tile(board) <NEW_LINE> <DEDENT> elif NEW_TILE_STRATEGY == 'minmax_worst': <NEW_LINE> <INDENT> return minmax_worst_tile(board) <NEW_LINE> <DEDENT> elif NEW_TILE_STRATEGY == 'minmax_best': <NEW_LINE> <INDENT> return minmax_best_tile(board) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('No such strategy', NEW_TILE_STRATEGY)
selectionne la strategie a utiliser pour placer les tuiles
625941b330dc7b766590172b
def _init_table_dict_list(self): <NEW_LINE> <INDENT> if not self._information_schema_columns: <NEW_LINE> <INDENT> self._init_information_schema_columns() <NEW_LINE> <DEDENT> sql = "SELECT TABLE_NAME FROM information_schema.`TABLES` WHERE TABLE_SCHEMA='%s'" % ( self._database) <NEW_LINE> table_tuple = self.execute_query(sql) <NEW_LINE> self._table_dict = {t[0]: {} for t in table_tuple} <NEW_LINE> for table in table_tuple: <NEW_LINE> <INDENT> self._init_table_dict(table[0])
初始化表字典对象
625941b3091ae35668666d27
def __str__(self): <NEW_LINE> <INDENT> raise NotImplementedError()
Debug string
625941b363d6d428bbe442b7
def stringMerge(rawInputs,gtfFile,genoFile): <NEW_LINE> <INDENT> print ("\n#### Generating list of lib-specific assemblies to merge") <NEW_LINE> assemblyFile = 'mergelist.txt' <NEW_LINE> assemblyOut = open(assemblyFile,'w') <NEW_LINE> for aninput in rawInputs: <NEW_LINE> <INDENT> lib,ext,nthreads = aninput <NEW_LINE> assemblyPath = './%s.%s\n' % (lib,ext) <NEW_LINE> print("adding lib to list for merging:%s" % (assemblyPath)) <NEW_LINE> assemblyOut.write(assemblyPath) <NEW_LINE> <DEDENT> assemblyOut.close() <NEW_LINE> print ("\n#### Merging lib-specific assemblies from StringTie") <NEW_LINE> mergedAssembly = "stringtie_merged.gtf" <NEW_LINE> nproc2 = str(nproc) <NEW_LINE> if referenceGTF == 'T': <NEW_LINE> <INDENT> print("CMD:", stringtie, "--merge", "-p", nproc2, "-G", gtfFile, "-o", mergedAssembly, assemblyFile) <NEW_LINE> retcode = subprocess.call([stringtie, "--merge", "-p", nproc2, "-G", gtfFile, "-o", mergedAssembly, assemblyFile]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("CMD:", stringtie, "--merge", "-p", nproc2, "-o", mergedAssembly, assemblyFile) <NEW_LINE> retcode = subprocess.call([stringtie, "--merge", "-p", nproc2, "-o", mergedAssembly, assemblyFile]) <NEW_LINE> <DEDENT> if retcode == 0: <NEW_LINE> <INDENT> print("Lib-speciifc assemblies merged successfully\n\n") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Problem merging lib-specific assemblies - System wil exit now\n") <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> return None
merge all output for stringtie from pheno file; first generate a merge list and then supply that for merging https://github.com/griffithlab/rnaseq_tutorial/wiki/Transcript-Assembly-Merge
625941b323e79379d52ee32a
@operation <NEW_LINE> def group(group, present=True, system=False, gid=None, state=None, host=None): <NEW_LINE> <INDENT> groups = host.get_fact(Groups) <NEW_LINE> is_present = group in groups <NEW_LINE> if not present and is_present: <NEW_LINE> <INDENT> yield 'groupdel {0}'.format(group) <NEW_LINE> groups.remove(group) <NEW_LINE> <DEDENT> elif present and not is_present: <NEW_LINE> <INDENT> args = [] <NEW_LINE> if system and 'BSD' not in host.get_fact(Os): <NEW_LINE> <INDENT> args.append('-r') <NEW_LINE> <DEDENT> args.append(group) <NEW_LINE> if gid: <NEW_LINE> <INDENT> args.append('--gid {0}'.format(gid)) <NEW_LINE> <DEDENT> yield "grep '^{0}:' /etc/group || groupadd {1}".format( group, ' '.join(args), ) <NEW_LINE> groups.append(group)
Add/remove system groups. + group: name of the group to ensure + present: whether the group should be present or not + system: whether to create a system group System users: System users don't exist on BSD, so the argument is ignored for BSD targets. Examples: .. code:: python server.group( name='Create docker group', group='docker', ) # multiple groups for group in ['wheel', 'lusers']: server.group( name=f'Create the group {group}', group=group, )
625941b3c432627299f04a04
def total_meet_medals(details=False): <NEW_LINE> <INDENT> medalsa = Division.objects.exclude(event_award_count__lte=3). aggregate( num_divisions_indiv_other_places=Count('name'), indiv_other_place_medals=(Sum(F('event_award_count') * 6) - (Count('name') * 3 * 6)), ) <NEW_LINE> medalsb = Division.objects.exclude(all_around_award_count__lte=3). aggregate( num_divisions_aa_other_places=Count('name'), aa_other_place_medals=(Sum('all_around_award_count') - (Count('name') * 3)) ) <NEW_LINE> medalsc = Division.objects.all(). aggregate( num_divisions_total=Count('name'), indiv_top_3_total=Count('name') * 6 * 3, aa_top_3_total=Count('name') * 3, total_awards=Sum('all_around_award_count') + Sum(F('event_award_count') * 6) ) <NEW_LINE> medals = {} <NEW_LINE> medals.update(medalsa) <NEW_LINE> medals.update(medalsb) <NEW_LINE> medals.update(medalsc) <NEW_LINE> try: <NEW_LINE> <INDENT> medals['total_awards'] = medalsa['indiv_other_place_medals'] + medalsb['aa_other_place_medals'] + medalsc['indiv_top_3_total'] + medalsc['aa_top_3_total'] <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> medals['total_awards'] = 0 <NEW_LINE> <DEDENT> if details: <NEW_LINE> <INDENT> return medals <NEW_LINE> <DEDENT> if medals['total_awards'] < 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return medals['total_awards']
For individual medals, count the number of awards we are giving in each division and multiply by the number of events For all around medals, count the number of awards we are giving in each division and subtract 3 for each division, as 1st-3rd place get trophies
625941b391f36d47f21ac2b5
def test_account_info(self): <NEW_LINE> <INDENT> from invenio_oauthclient.client import oauth <NEW_LINE> self.client.get(url_for("oauthclient.login", remote_app='orcid')) <NEW_LINE> self.assertEqual( account_info(oauth.remote_apps['orcid'], self.example_data), dict(external_id="0000-0002-1825-0097", external_method="orcid", nickname="0000-0002-1825-0097") ) <NEW_LINE> self.assertEqual( account_info(oauth.remote_apps['orcid'], {}), dict(external_id=None, external_method="orcid", nickname=None) )
Test account info extraction.
625941b3d164cc6175782b0e
def run_game(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self._check_events() <NEW_LINE> self.mario.update() <NEW_LINE> self._update_bullets() <NEW_LINE> self._update_enemies() <NEW_LINE> self._update_screen()
Main loop
625941b3c432627299f04a05
def acceptedPercent(self, lower, upper=None, msg=None): <NEW_LINE> <INDENT> return AcceptedPercent(lower, upper, msg)
acceptedPercent(tolerance, /, msg=None) acceptedPercent(lower, upper, msg=None) Wrapper for :meth:`accepted.percent`.
625941b3498bea3a759b9872
def train(train_data, test_data, net, loss, trainer, ctx, num_epochs, print_batches=None): <NEW_LINE> <INDENT> print("Start training on ", ctx) <NEW_LINE> if isinstance(ctx, mx.Context): <NEW_LINE> <INDENT> ctx = [ctx] <NEW_LINE> <DEDENT> for epoch in range(num_epochs): <NEW_LINE> <INDENT> train_loss, train_acc, n, m = 0.0, 0.0, 0.0, 0.0 <NEW_LINE> if isinstance(train_data, mx.io.MXDataIter) or isinstance(train_data,mx.image.ImageIter): <NEW_LINE> <INDENT> train_data.reset() <NEW_LINE> <DEDENT> start = time() <NEW_LINE> for i, batch in enumerate(train_data): <NEW_LINE> <INDENT> data, label, batch_size = _get_batch(batch, ctx) <NEW_LINE> losses = [] <NEW_LINE> with autograd.record(): <NEW_LINE> <INDENT> outputs = [net(X) for X in data] <NEW_LINE> losses = [loss(yhat, y) for yhat, y in zip(outputs, label)] <NEW_LINE> <DEDENT> for l in losses: <NEW_LINE> <INDENT> l.backward() <NEW_LINE> <DEDENT> train_acc += sum([(yhat.argmax(axis=1)==y).sum().asscalar() for yhat, y in zip(outputs, label)]) <NEW_LINE> train_loss += sum([l.sum().asscalar() for l in losses]) <NEW_LINE> trainer.step(batch_size) <NEW_LINE> n += batch_size <NEW_LINE> m += sum([y.size for y in label]) <NEW_LINE> if print_batches and (i+1) % print_batches == 0: <NEW_LINE> <INDENT> print("Batch %d. Loss: %f, Train acc %f" % ( n, train_loss/n, train_acc/m )) <NEW_LINE> <DEDENT> <DEDENT> test_acc = evaluate_accuracy(test_data, net, ctx) <NEW_LINE> print("Epoch %d. Loss: %.3f, Train acc %.2f, Test acc %.2f, Time %.1f sec" % ( epoch, train_loss/n, train_acc/m, test_acc, time() - start )) <NEW_LINE> print("done")
Train a network
625941b3d53ae8145f87a03b
def word_count(filename): <NEW_LINE> <INDENT> word_count_dict = {} <NEW_LINE> with open(filename, 'r') as f: <NEW_LINE> <INDENT> for line in f: <NEW_LINE> <INDENT> words = line.split() <NEW_LINE> for word in words: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return word_count_dict
A function that returns a dictionary with tokens as keys and counts of how many times each token appeared as values in the file with the given filename. Inputs: filename - the name of a plaintext file Outputs: A dictionary mapping tokens to counts.
625941b34d74a7450ccd3f84
def get_drmaa_imformation(): <NEW_LINE> <INDENT> with drmaa.Session() as s: <NEW_LINE> <INDENT> print('A DRMAA object was created') <NEW_LINE> print('Supported contact strings: %s' % s.contact) <NEW_LINE> print('Supported DRM systems: %s' % s.drmsInfo) <NEW_LINE> print('Supported DRMAA implementations: %s' % s.drmaaImplementation) <NEW_LINE> print('Exiting')
Query the system.
625941b323849d37ff7b2e53
@app.route('/video_feed') <NEW_LINE> def video_feed(): <NEW_LINE> <INDENT> return Response(gen(cv2.VideoCapture(2)), mimetype='multipart/x-mixed-replace; boundary=frame')
Video streaming route. Put this in the src attribute of an img tag.
625941b3925a0f43d2549c33
def delete(self, room_name): <NEW_LINE> <INDENT> if room_name is None: <NEW_LINE> <INDENT> Room.drop_collection() <NEW_LINE> return 'no content', 204 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> Room.objects.get(name=room_name).delete() <NEW_LINE> return 'no content', 204 <NEW_LINE> <DEDENT> except db.DoesNotExist: <NEW_LINE> <INDENT> abort(404)
Delete specified room.
625941b31b99ca400220a871
def genNewParticles(self, num_particles=100): <NEW_LINE> <INDENT> particles = np.random.rand(num_particles,2) <NEW_LINE> particles[:,0] *= self.img.shape[0] <NEW_LINE> particles[:,1] *= self.img.shape[1] <NEW_LINE> return particles.astype(int).tolist()
Generate a new set particles :return (numpy.array): Array of new particles
625941b316aa5153ce362238
def get_cdrom_attach_config_spec(client_factory, datastore, file_path, cdrom_unit_number): <NEW_LINE> <INDENT> config_spec = client_factory.create('ns0:VirtualMachineConfigSpec') <NEW_LINE> device_config_spec = [] <NEW_LINE> controller_key = 200 <NEW_LINE> virtual_device_config_spec = create_virtual_cdrom_spec(client_factory, datastore, controller_key, file_path, cdrom_unit_number) <NEW_LINE> device_config_spec.append(virtual_device_config_spec) <NEW_LINE> config_spec.deviceChange = device_config_spec <NEW_LINE> return config_spec
Builds and returns the cdrom attach config spec.
625941b332920d7e50b27f8e
def queryDataRows(self, **options): <NEW_LINE> <INDENT> from .datarow import DataRow <NEW_LINE> return self._api.newApiCursor(DataRow, self.getBaseApiPath() + "/rows", options)
Queries data rows associated with this contact (in any data table). Arguments: - time_created (UNIX timestamp) * Filter data rows by the time they were created * Allowed modifiers: time_created[ne], time_created[min], time_created[max] - vars (dict) * Filter data rows by value of a custom variable (e.g. vars[q1], vars[foo], etc.) * Allowed modifiers: vars[foo][exists], vars[foo][ne], vars[foo][prefix], vars[foo][not_prefix], vars[foo][gte], vars[foo][gt], vars[foo][lt], vars[foo][lte], vars[foo][min], vars[foo][max] - sort * Sort the results based on a field * Allowed values: default * Default: default - sort_dir * Sort the results in ascending or descending order * Allowed values: asc, desc * Default: asc - page_size (int) * Number of results returned per page (max 200) * Default: 50 - offset (int) * Number of items to skip from beginning of result set * Default: 0 Returns: APICursor (of DataRow)
625941b3e64d504609d74601
def solve(n=50): <NEW_LINE> <INDENT> return triangle_count(n)
For this problem, there appears to be 2 fundamental cases. All triangles that are not fundamental cases can be found from fundamental cases by rotation. A fundamental case will either: 1. have it's 90 degree angle on the origin. In this case, there are 3 possible rotations (including not rotating at all). 2. have it's 90 degree angle NOT on an axis. If it is not on the axis it will be located somewhere in the grid. We will say that for this type of triangle to be fundamental, one leg of the 90 degrees will go towards the origin, and the other towards the x-axis. In this case, there are 2 possible triangles (1 fundamental, 1 mirror). Also it can be observed that the triangles possible on an n * n grid will be the triangles possible on a (n - 1) * (n - 1) grid + all the new triangles that have a corner on the last column or highest row. So this problem can be solved with a recursive function. It is easy to test and observe that for the first type of fundamental triangle, there are n**2 possible. Since we are making a recursive function, we are only concerned with the possible triangles of type 1 that have a corner on the highest row or last column. Of these, there are 3 *(2*n - 1). 2 * n for each row and column and subtract 1 because there is only 1 that goes from the top of the first column to the outside of the first row. Then multiply by 3 for all 3 rotations. Triangles of type 2 have 2 subtypes. The first is with a corner on the farthest column. The second is with a corner on the highest row. Those with corner on the farthest column come first, and the right angle may be anywhere. This means all points must be tested to find some that satisfy this. For those with the right angle on the highest row, the 3rd corner may or may not be on the farthest right column. Also count 2 for each fundamental triangle found for their mirrors.
625941b3956e5f7376d70c3d
def fasta_parser(self, target_file): <NEW_LINE> <INDENT> target_handle = open(target_file, 'r') <NEW_LINE> verified_ids = set() <NEW_LINE> for lines in target_handle: <NEW_LINE> <INDENT> if lines.startswith(">"): <NEW_LINE> <INDENT> seqid = re.match("([^\s]+)", lines).group(0)[1:] <NEW_LINE> verified_ids.add(seqid) <NEW_LINE> <DEDENT> <DEDENT> target_handle.close() <NEW_LINE> return verified_ids
Parses a FASTA file and retruns a set of found Accession numbers
625941b316aa5153ce362239
def create_vm(self, vcpu, ram, disk, name, network_uuid, os_image_uuid, cloud_config): <NEW_LINE> <INDENT> data = { "name": name, "memory_mb": ram * 1024, "num_vcpus": vcpu, "description": "", "num_cores_per_vcpu": 1, "vm_disks": [ { "is_cdrom": True, "is_empty": True, "disk_address": { "device_bus": "ide" } }, { "is_cdrom": False, "disk_address": { "device_bus": "scsi" }, "vm_disk_clone": { "disk_address": { "vmdisk_uuid": os_image_uuid }, "minimum_size": disk * 1024 ** 3 } } ], "vm_nics": [ { "network_uuid": network_uuid } ], "hypervisor_type": "ACROPOLIS", "affinity": None, "vm_customization_config": { "userdata": cloud_config, "files_to_inject_list": [] } } <NEW_LINE> self.wait_for_task( self.api.vms_create(data) )
Create Virtual Machine with specified configuration. This method call asynchronous operation and wait for it to report success or failure. :param int vcpu: Number of vCPUs for Virtual Machine. :param int ram: Size of RAM (GB) for Virtual Machine. :param int disk: Size of Disk (GB) for Virtual Machine. :param str name: Name of Virtual Machine. :param str network_uuid: Uuid of Nutanix network used for Virtual Machine. :param str os_image_uuid: Uuid of OS Image used for Virtual Machine. :param str cloud_config: Cloud config for customization of Virtual Machine. :return: None :raises HTTPError: If API call was not successful. :raises TaskFailed: If creation task failed
625941b3f548e778e58cd33c
def get_items_with_key_prefix(items, prefix, strip_prefix=True, processors=()): <NEW_LINE> <INDENT> include = lambda k, v: k.startswith(prefix) <NEW_LINE> if strip_prefix: <NEW_LINE> <INDENT> prefix_len = len(prefix) <NEW_LINE> processors = (lambda k, v: (k[prefix_len:], v),) + processors <NEW_LINE> <DEDENT> filtered = filter_items(items, include=include, processors=processors) <NEW_LINE> return items.__class__(filtered)
Filter ``items`` to those with a key that starts with ``prefix``. ``items`` is typically a dict but can also be a sequence. See :func:`filter_items` for more on that.
625941b396565a6dacc8f496
def delete(self, guid): <NEW_LINE> <INDENT> return super().delete(id=guid)
A method to delete an CloudAccounts object. :param guid: A string representing the object GUID. :return response json
625941b32ae34c7f2600cef3
def test_from_theta(self): <NEW_LINE> <INDENT> riskfree = .01 <NEW_LINE> mean_v = .5 <NEW_LINE> kappa = 1.5 <NEW_LINE> eta = .1 <NEW_LINE> lmbd = .01 <NEW_LINE> lmbd_v = .5 <NEW_LINE> rho = -.5 <NEW_LINE> theta = [riskfree, mean_v, kappa, eta, rho, lmbd, lmbd_v] <NEW_LINE> param = HestonParam.from_theta(theta, measure='P') <NEW_LINE> self.assertEqual(param.measure, 'P') <NEW_LINE> self.assertEqual(param.riskfree, riskfree) <NEW_LINE> self.assertEqual(param.lmbd, lmbd) <NEW_LINE> self.assertEqual(param.lmbd_v, lmbd_v) <NEW_LINE> self.assertEqual(param.mean_v, mean_v) <NEW_LINE> self.assertEqual(param.kappa, kappa) <NEW_LINE> self.assertEqual(param.eta, eta) <NEW_LINE> self.assertEqual(param.rho, rho) <NEW_LINE> self.assertTrue(param.is_valid())
Test from theta.
625941b30c0af96317bb7faa
def test_decode_nibbles_fixed_partial() -> None: <NEW_LINE> <INDENT> spec = copy.deepcopy(iso8583.specs.default) <NEW_LINE> spec["t"]["data_enc"] = "ascii" <NEW_LINE> spec["p"]["data_enc"] = "ascii" <NEW_LINE> spec["2"]["data_enc"] = "ascii" <NEW_LINE> spec["2"]["len_enc"] = "ascii" <NEW_LINE> spec["2"]["len_type"] = 0 <NEW_LINE> spec["2"]["max_len"] = 4 <NEW_LINE> spec["2"]["len_count"] = "nibbles" <NEW_LINE> s = b"020040000000000000001" <NEW_LINE> with pytest.raises( iso8583.DecodeError, match="Field data is 2 nibbles, expecting 4: field 2 pos 20", ): <NEW_LINE> <INDENT> iso8583.decode(s, spec=spec)
Fixed field is provided partially
625941b37b180e01f3dc45c9
def extract_func_typedata(typedata, table): <NEW_LINE> <INDENT> func_typedata_split = typedata.split("&&&") <NEW_LINE> param_segment = func_typedata_split[0] <NEW_LINE> parameters = param_segment.split("---")[1:] <NEW_LINE> default_values = [] <NEW_LINE> for seg in func_typedata_split[1:]: <NEW_LINE> <INDENT> default_value, _, _, _, _ = table.get_by_id(int(seg)) <NEW_LINE> default_values.append(default_value) <NEW_LINE> <DEDENT> return parameters, default_values
Extract typedata of function Params ====== typedata (string) = Typedata of function in format "function---param1---param2---...&&&default_val1&&&... table (SymbolTable) = Symbol table Returns ======= parameters (list) = Parameter names default_values (list) = Default values
625941b3ab23a570cc24ff48
def dls(td): <NEW_LINE> <INDENT> td2 = [] <NEW_LINE> dd = [] <NEW_LINE> for i in range(0, len(td)): <NEW_LINE> <INDENT> td2.append((0.25/td[i])) <NEW_LINE> dd.append(0.5*np.exp(-1.0*td2[i])) <NEW_LINE> <DEDENT> sd = 0.5*sp.special.expn(1,td2) <NEW_LINE> return sd, dd
THS_DLS - Dimensionless drawdown of the Theis model Syntax: sd,dd = hp.ths.dls(td) Description: Calculates the dimensionless drawdown sd and the dimensionless derivative dd for a given dimensionless reduced time td/rd^2 See also: ths_lap
625941b3baa26c4b54cb0ee5
def get_vthr(self): <NEW_LINE> <INDENT> if self.vthr > 0: <NEW_LINE> <INDENT> thr = self.vthr <NEW_LINE> self.vthr = [] <NEW_LINE> for roc in self.dut.rocs(): <NEW_LINE> <INDENT> self.vthr.append(thr) <NEW_LINE> <DEDENT> self.logger.info('Using min VthrComp %s from config' %self.vthr) <NEW_LINE> return <NEW_LINE> <DEDENT> self.tb.set_dac('Vcal', self.vcal) <NEW_LINE> self.dut_VthrComp_map = self.tb.get_threshold(self.n_triggers, 'VthrComp', self.xtalk, self.cals, self.reverse) <NEW_LINE> self.vthr = [] <NEW_LINE> for i, roc in enumerate(self.dut.rocs()): <NEW_LINE> <INDENT> mean = numpy.mean(self.dut_VthrComp_map[roc.number]) <NEW_LINE> std_dev = numpy.std(self.dut_VthrComp_map[roc.number])/2. <NEW_LINE> minimum = numpy.amin(numpy.ma.masked_less_equal(self.dut_VthrComp_map[roc.number],0)) <NEW_LINE> noise_min = -1 <NEW_LINE> dut_vthr_min = int(minimum) <NEW_LINE> self.logger.debug('VthrComp %s mean: %.2f sigma: %.2f min: %s noise_min %s set: %s' %(roc, mean, std_dev, minimum, noise_min, dut_vthr_min)) <NEW_LINE> self.vthr.append(dut_vthr_min) <NEW_LINE> self.tb.set_dac_roc(roc,'Vcal', roc.dac('Vcal').stored_value)
Find minimal VthrComp threshold for each ROC with Vcal = self.vcal
625941b3fbf16365ca6f5f84
def gfaks89(): <NEW_LINE> <INDENT> return _loadnan('gfaks89.dat')
Return Surface elevation measured at Gullfaks C 24.12.1989 Data summary ------------ Size : 39000 X 2 Sampling Rate : 2.5 Hz Device : EMI laser Source : STATOIL Format : ascii, c1: time c2: surface elevation Description ------------ The wave data was measured 24th December 1989 at the Gullfaks C platform in the North Sea from 17.00 to 21.20. The period from 20.00 to 20.20 is missing and contains NaNs. The water depth of 218 m is regarded as deep water for the most important wave components. There are two EMI laser sensors named 219 and 220. This data set is obtained from sensor 219, which is located in the Northwest corner approximately two platform leg diameters away from the closest leg. Thus the wave elevation is not expected to be significantly affected by diffraction effects for incoming waves in the western sector. The wind direction for this period is from the south. Some difficulties in calibration of the instruments have been reported resulting in several consecutive measured values being equal or almost equal in the observed data set. This dataset is for non-commercial use only. Hm0 = 6.8m, Tm02 = 8s, Tp = 10.5 Example ------- >>> import pylab >>> import wafo >>> x = wafo.data.gfaks89() >>> h = pylab.plot(x[:,0],x[:,1]) Acknowledgement: --------------- This dataset were prepared and made available by Dr. S. Haver, STATOIL, Norway See also -------- gfaksr89, northsea
625941b356ac1b37e6263fa3
def __init__(self, type=None, name=None): <NEW_LINE> <INDENT> self._type = None <NEW_LINE> self._name = None <NEW_LINE> self.discriminator = 'type' <NEW_LINE> self.type = type <NEW_LINE> self.name = name
QACheckDtoV2 - a model defined in Swagger
625941b391af0d3eaac9b7d4
def consecutiveNumbersSum_v2(self, N): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> for d in range(1, N + 1): <NEW_LINE> <INDENT> dsum = d * (d - 1) / 2 <NEW_LINE> nd = N - dsum <NEW_LINE> if nd <= 0: break <NEW_LINE> if nd % d == 0: count += 1 <NEW_LINE> <DEDENT> return count
:type N: int :rtype: int
625941b3925a0f43d2549c34
def onBrowseFolder(self, event): <NEW_LINE> <INDENT> widget = event.GetEventObject() <NEW_LINE> name = widget.GetName() <NEW_LINE> if name == "InputFiles": <NEW_LINE> <INDENT> infomessage = "Choose a folder containing the data:" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> infomessage = "Choose an output folder for the maps:" <NEW_LINE> <DEDENT> dlg = wx.DirDialog( self, message=infomessage, style=wx.DD_DEFAULT_STYLE ) <NEW_LINE> if dlg.ShowModal() == wx.ID_OK: <NEW_LINE> <INDENT> path = dlg.GetPath() <NEW_LINE> if name == "InputFiles": <NEW_LINE> <INDENT> self.InputFold.SetValue(path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.outputPath.SetValue(path) <NEW_LINE> <DEDENT> if "self.currentPath" in locals(): <NEW_LINE> <INDENT> self.currentPath = os.path.dirname(path) <NEW_LINE> <DEDENT> <DEDENT> dlg.Destroy()
Browse for folders
625941b33539df3088e2e10c
def applyEdits(self, addFeatures=[], updateFeatures=[], deleteFeatures=None, gdbVersion=None, rollbackOnFailure=True): <NEW_LINE> <INDENT> editURL = self._url + "/applyEdits" <NEW_LINE> params = {"f": "json", 'rollbackOnFailure' : rollbackOnFailure } <NEW_LINE> if not gdbVersion is None: <NEW_LINE> <INDENT> params['gdbVersion'] = gdbVersion <NEW_LINE> <DEDENT> if len(addFeatures) > 0 and isinstance(addFeatures[0], Feature): <NEW_LINE> <INDENT> params['adds'] = json.dumps([f.asDictionary for f in addFeatures], default=_date_handler) <NEW_LINE> <DEDENT> elif isinstance(addFeatures, FeatureSet): <NEW_LINE> <INDENT> params['adds'] = json.dumps([f.asDictionary for f in addFeatures], default=_date_handler) <NEW_LINE> <DEDENT> if len(updateFeatures) > 0 and isinstance(updateFeatures[0], Feature): <NEW_LINE> <INDENT> params['updates'] = json.dumps([f.asDictionary for f in updateFeatures], default=_date_handler) <NEW_LINE> <DEDENT> if deleteFeatures is not None and isinstance(deleteFeatures, str): <NEW_LINE> <INDENT> params['deletes'] = deleteFeatures <NEW_LINE> <DEDENT> return self._post(url=editURL, param_dict=params, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
This operation adds, updates, and deletes features to the associated feature layer or table in a single call. Inputs: addFeatures - The array of features to be added. These features should be common.general.Feature objects, or they should be a common.general.FeatureSet object. updateFeatures - The array of features to be updateded. These features should be common.Feature objects deleteFeatures - string of OIDs to remove from service gdbVersion - Geodatabase version to apply the edits. rollbackOnFailure - Optional parameter to specify if the edits should be applied only if all submitted edits succeed. If false, the server will apply the edits that succeed even if some of the submitted edits fail. If true, the server will apply the edits only if all edits succeed. The default value is true. Output: dictionary of messages
625941b37c178a314d6ef219
def is_point_blocked(self, p): <NEW_LINE> <INDENT> for obs in self.obstacles: <NEW_LINE> <INDENT> if obs.point_inside(p): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
Determines if the point is blocked or not. p: The point to check. returns: True if the point is blocked by an obstacle, False otherwise.
625941b3dc8b845886cb52f5
def actor(self): <NEW_LINE> <INDENT> return utils.lib.zproc_actor(self._p)
return internal actor, useful for the polling if process died
625941b326068e7796caea99
def retrieve_positions(position_file): <NEW_LINE> <INDENT> position_lines = open(position_file, 'r').readlines() <NEW_LINE> positions = [line.split() for line in position_lines] <NEW_LINE> return positions
This function returns a list of strings in the right format representing the positions that will be read out. [spatialfrequency,xi,xf,y]. Args: position_file (str): The path of the position file. Returns: positions (list,str): List representing the positions that will be read.
625941b3cc0a2c11143dcc5a
def SetMaximumNumberOfIterations(self, *args): <NEW_LINE> <INDENT> return _ITKOptimizersPython.itkSPSAOptimizer_SetMaximumNumberOfIterations(self, *args)
SetMaximumNumberOfIterations(self, unsigned long _arg)
625941b3d8ef3951e32432ff
def testCheckForAutoconnect(self): <NEW_LINE> <INDENT> args = mock.MagicMock() <NEW_LINE> args.autoconnect = True <NEW_LINE> args.no_prompt = False <NEW_LINE> self.Patch(utils, "InteractWithQuestion", return_value="Y") <NEW_LINE> self.Patch(utils, "FindExecutable", return_value=None) <NEW_LINE> self.Patch(os.environ, "get", return_value=None) <NEW_LINE> create._CheckForAutoconnect(args) <NEW_LINE> self.assertEqual(args.autoconnect, False) <NEW_LINE> args.autoconnect = True <NEW_LINE> self.Patch(subprocess, "check_call", return_value=True) <NEW_LINE> self.Patch(os.environ, "get", return_value="/fake_dir2") <NEW_LINE> create._CheckForAutoconnect(args) <NEW_LINE> self.assertEqual(args.autoconnect, True) <NEW_LINE> self.Patch(utils, "InteractWithQuestion", return_value="N") <NEW_LINE> create._CheckForAutoconnect(args) <NEW_LINE> self.assertEqual(args.autoconnect, False)
Test CheckForAutoconnect.
625941b3a05bb46b383ec5ef
def save_image(filename, image, metadata): <NEW_LINE> <INDENT> path = os.path.dirname(filename) <NEW_LINE> assert path == "" or os.path.exists(path), ("Invalid directory name") <NEW_LINE> assert isinstance(image, np.ndarray), ("image must be a numpy.ndarray") <NEW_LINE> assert len(image.shape) == 3, ("image must be an numpy.ndarray with shape (H,W,D)") <NEW_LINE> rows = image.shape[0] <NEW_LINE> cols = image.shape[1] <NEW_LINE> n_bands = image.shape[2] <NEW_LINE> assert isinstance(metadata, Metadata) <NEW_LINE> geotransform = metadata.geotransform <NEW_LINE> assert len(geotransform) == 6, ("Geotransform must be 6 elements") <NEW_LINE> projection = metadata.projection <NEW_LINE> spatial_reference = osr.SpatialReference() <NEW_LINE> spatial_reference.ImportFromWkt(projection) <NEW_LINE> isprojected = spatial_reference.IsProjected() <NEW_LINE> assert isprojected, ("WKT projection not parsed by OGR") <NEW_LINE> x, y, datatype = metadata.create() <NEW_LINE> assert y == rows <NEW_LINE> assert x == cols <NEW_LINE> assert datatype in Metadata.GDAL_DATATYPES, ( "datatype is not recognized as a valid GDAL datatype for GeoTiff.") <NEW_LINE> ndv = metadata.ndv <NEW_LINE> format = "GTiff" <NEW_LINE> driver = gdal.GetDriverByName(format) <NEW_LINE> dataset = driver.Create(filename, x, y, n_bands, datatype) <NEW_LINE> dataset.SetGeoTransform(geotransform) <NEW_LINE> dataset.SetProjection(projection) <NEW_LINE> depth_axis_len = image.shape[2] <NEW_LINE> for depth in range(depth_axis_len): <NEW_LINE> <INDENT> band = depth + 1 <NEW_LINE> dataset.GetRasterBand(band).WriteArray(image[:,:, depth]) <NEW_LINE> if band == 1 and ndv is not None: <NEW_LINE> <INDENT> dataset.GetRasterBand(1).SetNoDataValue(ndv) <NEW_LINE> <DEDENT> <DEDENT> dataset = None
Save an image Saves an image as a GeoTiff. Args: image: a numpy `ndarray` with array shape (H,W,D) metadata: object of class `Metadata` filename: `string` a valid system path Returns: None Raises: AssertionError
625941b3097d151d1a222c25
def testProxyGetProductRatePlanCharge(self): <NEW_LINE> <INDENT> pass
Test ProxyGetProductRatePlanCharge
625941b30383005118ecf3a6
def psirt_query(token): <NEW_LINE> <INDENT> url = 'https://api.cisco.com/security/advisories/cvrf/latest/10' <NEW_LINE> headers = { 'Accept': 'application/json', 'Authorization': 'Bearer ' + token, } <NEW_LINE> last_10_vulns = requests.get(url, headers=headers) <NEW_LINE> logger.info('query response code = ' + str(last_10_vulns.status_code)) <NEW_LINE> logger.debug(last_10_vulns)
Send required information to PSIRT API and return true if vulnerable? {"access_token":"blablablablabla","token_type":"Bearer","expires_in":3599} TODO: Add exception handling :return: bool
625941b34a966d76dd550dcd
def registerPlayer(name): <NEW_LINE> <INDENT> DB = connect() <NEW_LINE> cur = DB.cursor() <NEW_LINE> safe_name = bleach.clean(name) <NEW_LINE> cur.execute("""insert into players(player_name) values(%s)""",(safe_name,)) <NEW_LINE> DB.commit() <NEW_LINE> DB.close()
Adds a player to the tournament database. The database assigns a unique serial id number for the player. (This should be handled by your SQL database schema, not in your Python code.) Args: name: the player's full name (need not be unique).
625941b3377c676e91271f70
def __init__(self): <NEW_LINE> <INDENT> rospy.init_node('red_depth_node') <NEW_LINE> self.image_pub = rospy.Publisher('red_marked_image', Image, queue_size=10) <NEW_LINE> self.marker_pub = rospy.Publisher('red_marker', Marker, queue_size=10) <NEW_LINE> self.cv_bridge = CvBridge() <NEW_LINE> img_sub = message_filters.Subscriber('/camera/rgb/image_color', Image) <NEW_LINE> cloud_sub = message_filters.Subscriber( '/camera/depth_registered/points', PointCloud2) <NEW_LINE> self.kinect_synch = ApproximateTimeSynchronizer([img_sub, cloud_sub], queue_size=10, slop=.02) <NEW_LINE> self.kinect_synch.registerCallback(self.image_points_callback) <NEW_LINE> rospy.spin()
Construct the red-pixel finder node.
625941b3507cdc57c6306a93
def _import_record(self, record): <NEW_LINE> <INDENT> raise NotImplementedError
Import a record directly or delay the import of the record
625941b38e05c05ec3eea132
def train(hps, server): <NEW_LINE> <INDENT> images, labels = input_fn(True, FLAGS.train_data_path, FLAGS.batch_size, FLAGS.num_epochs) <NEW_LINE> model = resnet_model.ResNet(hps, images, labels, FLAGS.mode) <NEW_LINE> model.build_graph() <NEW_LINE> truth = tf.argmax(model.labels, axis=1) <NEW_LINE> predictions = tf.argmax(model.predictions, axis=1) <NEW_LINE> precision = tf.reduce_mean(tf.to_float(tf.equal(predictions, truth))) <NEW_LINE> summary_hook = tf.train.SummarySaverHook( save_steps=100, output_dir=FLAGS.train_dir, summary_op=tf.summary.merge([model.summaries, tf.summary.scalar('Precision', precision)])) <NEW_LINE> logging_hook = tf.train.LoggingTensorHook( tensors={'step': model.global_step, 'loss': model.cost, 'precision': precision}, every_n_iter=40) <NEW_LINE> class _LearningRateSetterHook(tf.train.SessionRunHook): <NEW_LINE> <INDENT> def begin(self): <NEW_LINE> <INDENT> self._lrn_rate = 0.4 <NEW_LINE> <DEDENT> def before_run(self, run_context): <NEW_LINE> <INDENT> return tf.train.SessionRunArgs( model.global_step, feed_dict={model.lrn_rate: self._lrn_rate}) <NEW_LINE> <DEDENT> def after_run(self, run_context, run_values): <NEW_LINE> <INDENT> train_step = run_values.results <NEW_LINE> if train_step < 6240: <NEW_LINE> <INDENT> self._lrn_rate = 0.1 + 0.3*train_step/6240.0 <NEW_LINE> <DEDENT> elif train_step < 37440: <NEW_LINE> <INDENT> self._lrn_rate = 0.4 <NEW_LINE> <DEDENT> elif train_step < 74880: <NEW_LINE> <INDENT> self._lrn_rate = 0.1 * 0.4 <NEW_LINE> <DEDENT> elif train_step < 99840: <NEW_LINE> <INDENT> self._lrn_rate = 0.01 * 0.4 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._lrn_rate = 0.001 * 0.4 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> is_chief = (FLAGS.task_index == 0) <NEW_LINE> with tf.train.MonitoredTrainingSession( master=server.target, is_chief=is_chief, checkpoint_dir=FLAGS.log_root, hooks=[tf.train.StopAtStepHook(last_step=FLAGS.train_steps), logging_hook, _LearningRateSetterHook()], chief_only_hooks=[model.replicas_hook, summary_hook], save_summaries_steps=0, stop_grace_period_secs=120, config=tf.ConfigProto(allow_soft_placement=True)) as mon_sess: <NEW_LINE> <INDENT> while not mon_sess.should_stop(): <NEW_LINE> <INDENT> mon_sess.run(model.train_op)
Training loop.
625941b34d74a7450ccd3f85
def rec_replace(in_str, old, new): <NEW_LINE> <INDENT> if old == new: <NEW_LINE> <INDENT> return in_str <NEW_LINE> <DEDENT> if old not in in_str: <NEW_LINE> <INDENT> return in_str <NEW_LINE> <DEDENT> return rec_replace(in_str.replace(old, new), old, new)
Recursively replace a string in a string
625941b3a17c0f6771cbde16
def show_possible_nodes(self): <NEW_LINE> <INDENT> possible_core_nodes = np.logical_and( self.steep_nodes, self.aspect_close_nodes) <NEW_LINE> figure(1) <NEW_LINE> gridshow.imshow_grid_at_node(self.grid, self.elevs) <NEW_LINE> figure(2) <NEW_LINE> gridshow.imshow_grid_at_node(self.grid, self.slopes) <NEW_LINE> figure(3) <NEW_LINE> gridshow.imshow_grid_at_node(self.grid, self.aspect) <NEW_LINE> figure(4) <NEW_LINE> gridshow.imshow_grid_at_node(self.grid, possible_core_nodes) <NEW_LINE> show()
Once the subsets by aspect and slope have been set, call this function to see both the whole elevation map, and the subset of nodes that will be searched.
625941b34527f215b584c21e
def TransitiveSecondaryParents(self, interface, propagate_event_target): <NEW_LINE> <INDENT> def walk(parents): <NEW_LINE> <INDENT> for parent in parents: <NEW_LINE> <INDENT> parent_name = parent.type.id <NEW_LINE> if IsDartCollectionType(parent_name): <NEW_LINE> <INDENT> result.append(parent_name) <NEW_LINE> continue <NEW_LINE> <DEDENT> if self.HasInterface(parent_name): <NEW_LINE> <INDENT> parent_interface = self.GetInterface(parent_name) <NEW_LINE> result.append(parent_interface) <NEW_LINE> walk(parent_interface.parents) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> result = [] <NEW_LINE> if interface.parents: <NEW_LINE> <INDENT> parent = interface.parents[0] <NEW_LINE> if (IsPureInterface(parent.type.id) or (propagate_event_target and parent.type.id == 'EventTarget')): <NEW_LINE> <INDENT> walk(interface.parents) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> walk(interface.parents[1:]) <NEW_LINE> <DEDENT> <DEDENT> return result
Returns a list of all non-primary parents. The list contains the interface objects for interfaces defined in the database, and the name for undefined interfaces.
625941b3009cb60464c63180
def lit_pix(self): <NEW_LINE> <INDENT> lit = 0 <NEW_LINE> for i in range(len(s.screen)): <NEW_LINE> <INDENT> for j in range(len(s.screen[0])): <NEW_LINE> <INDENT> if s.screen[i][j] == '#': <NEW_LINE> <INDENT> lit += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return lit
Count the number of lit pixels
625941b331939e2706e4cc33
def strip_tags(self): <NEW_LINE> <INDENT> return lib.strip_tags(self.content)
return content field with no html tags included
625941b34527f215b584c21f
def fetchMany(critic, user_ids=None, names=None): <NEW_LINE> <INDENT> import api.impl <NEW_LINE> assert isinstance(critic, api.critic.Critic) <NEW_LINE> assert (user_ids is None) != (names is None) <NEW_LINE> users = api.impl.user.fetchMany(critic, user_ids, names) <NEW_LINE> return users
Fetch many User objects with given user ids or names Exactly one of the 'user_ids' and 'names' arguments can be used. If the value of the provided 'user_ids' or 'names' argument is a set, the return value is a also set of User objects, otherwise it is a list of User objects, in the same order as the argument sequence. Exceptions: InvalidUserIds: if 'user_ids' is used and any element in it is not a valid user id. InvalidUserNames: if 'names' is used and any element in it is not a valid user name.
625941b36aa9bd52df036b64
def make_dict(cir_def,element): <NEW_LINE> <INDENT> e = element <NEW_LINE> volt_dict = {} <NEW_LINE> volt_names = [one_port_element(line).tokens[0] for line in cir_def if one_port_element(line).tokens[0][0].lower()== e] <NEW_LINE> for ind,name in enumerate(volt_names): <NEW_LINE> <INDENT> volt_dict[name] = ind <NEW_LINE> <DEDENT> return volt_dict
Makes a dictionary for each component of the particular type of element
625941b3167d2b6e31218960
def __save_ro_album_artwork(self, data, album): <NEW_LINE> <INDENT> filename = self.get_album_cache_name(album) + ".jpg" <NEW_LINE> store_path = self._STORE_PATH + "/" + filename <NEW_LINE> self._save_pixbuf_from_data(store_path, data) <NEW_LINE> self.clean_album_cache(album) <NEW_LINE> GLib.idle_add(self.album_artwork_update, album.id)
Save artwork for a read only album @param data as bytes @param album as Album
625941b38e71fb1e9831d577
def __init__(self, directory, channels=None, defaultMode=None, systemRotateLength=1000000): <NEW_LINE> <INDENT> self._directory = directory <NEW_LINE> self._system_logger = logfile.LogFile( 'system.logs', directory, systemRotateLength, defaultMode) <NEW_LINE> self._channel_loggers = {} <NEW_LINE> for channel_name in channels: <NEW_LINE> <INDENT> self._channel_loggers[channel_name] = DailyFileLogger( channel_name, directory, defaultMode)
Creates one L{DailyFileLogger} logger for each channel in the list, and one L{twisted.python.logfile.LogFile} (which rotates based on the length of the file) for system messages. @param directory: path where all the log files should go @type directory: C{str} @param channels: a list of channel names @type channels: C{list} @param defaultMode: mode used to create the files. @type defaultMode: C{int} @param systemRotateLength: size of the system log file where it rotates. Default to 1M. @type rotateLength: C{int}
625941b3aad79263cf3907fc
def get_words_in_creator_names(df, creator_column): <NEW_LINE> <INDENT> creators = list(set(df[creator_column].tolist())) <NEW_LINE> words_in_creator_names = [str(creator).split(' ') for creator in creators] <NEW_LINE> words_in_creator_names = [item for sublist in words_in_creator_names for item in sublist] <NEW_LINE> words_in_creator_names = set(words_in_creator_names) <NEW_LINE> return words_in_creator_names
Filters out creator names from words --> see filter_title_words()
625941b321a7993f00bc7aaa
def main(): <NEW_LINE> <INDENT> logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') <NEW_LINE> if len(sys.argv) != 2: <NEW_LINE> <INDENT> logger.critical("Pass the script one directory") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> basedir = sys.argv[1] <NEW_LINE> for filepath in datastore_filepaths(basedir): <NEW_LINE> <INDENT> fix_file(filepath)
Main method, called when you run the script.
625941b394891a1f4081b869
def display(self, new_file = False, done_message = None): <NEW_LINE> <INDENT> if new_file: <NEW_LINE> <INDENT> self.output_labels() <NEW_LINE> self._stdout.write(self.ANSI_save_cursor_pos) <NEW_LINE> self._stdout.flush() <NEW_LINE> return <NEW_LINE> <DEDENT> if not (new_file or done_message) and not self._display_needed(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> timedelta = self.time_current - self.time_start <NEW_LINE> sec_elapsed = timedelta.days * 86400 + timedelta.seconds + float(timedelta.microseconds)/1000000.0 <NEW_LINE> if (sec_elapsed > 0): <NEW_LINE> <INDENT> print_speed = Utils.formatSize((self.current_position - self.initial_position) / sec_elapsed, True, True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print_speed = (0, "") <NEW_LINE> <DEDENT> self._stdout.write(self.ANSI_restore_cursor_pos) <NEW_LINE> self._stdout.write(self.ANSI_erase_to_eol) <NEW_LINE> self._stdout.write("%(current)s of %(total)s %(percent)3d%% in %(elapsed)ds %(speed).2f %(speed_coeff)sB/s" % { "current" : str(self.current_position).rjust(len(str(self.total_size))), "total" : self.total_size, "percent" : self.total_size and (self.current_position * 100 / self.total_size) or 0, "elapsed" : sec_elapsed, "speed" : print_speed[0], "speed_coeff" : print_speed[1] }) <NEW_LINE> if done_message: <NEW_LINE> <INDENT> self._stdout.write(" %s\n" % done_message) <NEW_LINE> <DEDENT> self._stdout.flush()
display(new_file = False[/True], done_message = None)
625941b3d164cc6175782b0f
def on_train_begin(self, logs=None, **kwargs): <NEW_LINE> <INDENT> logs = logs or {} <NEW_LINE> for logger in self.loggers: <NEW_LINE> <INDENT> logger.on_train_begin(logs, **kwargs)
At the start of training Args: logs: dictionary of logs
625941b36fece00bbac2d4fd
def test_api_challenge_get_flags_non_admin(): <NEW_LINE> <INDENT> app = create_kmactf() <NEW_LINE> with app.app_context(): <NEW_LINE> <INDENT> gen_challenge(app.db) <NEW_LINE> with app.test_client() as client: <NEW_LINE> <INDENT> r = client.get("/api/v1/challenges/1/flags", json="") <NEW_LINE> assert r.status_code == 403 <NEW_LINE> <DEDENT> <DEDENT> destroy_kmactf(app)
Can a user get /api/v1/challenges/<challenge_id>/flags if not admin
625941b35510c4643540f1bc
def contentor(generator): <NEW_LINE> <INDENT> for page in generator.pages: <NEW_LINE> <INDENT> if page.summary == page.content: <NEW_LINE> <INDENT> page.get_summary = lambda disable: '' <NEW_LINE> <DEDENT> if hasattr(page, 'image') and hasattr(page, 'type') and page.type.lower() == 'team': <NEW_LINE> <INDENT> image_dest_path = os.path.join(os.path.dirname(page.url), page.image.split('}')[-1]) <NEW_LINE> image_source_pth = os.path.join(os.path.dirname(page.source_path), page.image) <NEW_LINE> image_dest_path = image_source_pth.split("content")[0] + "output/" + image_dest_path <NEW_LINE> if not os.path.exists(os.path.dirname(image_dest_path)): <NEW_LINE> <INDENT> os.makedirs(os.path.dirname(image_dest_path)) <NEW_LINE> <DEDENT> shutil.copy(image_source_pth, image_dest_path) <NEW_LINE> page.image = os.path.join(os.path.dirname(page.url), page.image)
Suppress page summary, if none at all.
625941b32eb69b55b151c66c
def is_version_newer(semver1, semver2): <NEW_LINE> <INDENT> semver1 = tuple(map(int, (re.sub("[^0-9\.]", "", semver1).split(".")))) <NEW_LINE> semver2 = tuple(map(int, (re.sub("[^0-9\.]", "", semver2).split(".")))) <NEW_LINE> return semver1 >= semver2
Compares version strings and checks if the semver1 is newer than semver2. :returns: True if semver1 is latest or matches semver2, False otherwise.
625941b385dfad0860c3ac1b
def api_update(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = self._api_frontend.send(endpoint='Frontend/GetStatus', opts={'timeout': 1}) <NEW_LINE> if list(result.keys())[0] in ['Abort', 'Warning']: <NEW_LINE> <INDENT> self._volume['control'] = False <NEW_LINE> if self._ping_host(): <NEW_LINE> <INDENT> self._state = STATE_UNKNOWN <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._state = STATE_OFF <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> self._frontend = result['FrontendStatus']['State'] <NEW_LINE> if self._frontend['state'] == 'idle': <NEW_LINE> <INDENT> self._state = STATE_IDLE <NEW_LINE> <DEDENT> elif self._frontend['state'].startswith('Watching'): <NEW_LINE> <INDENT> if self._frontend['playspeed'] == '0': <NEW_LINE> <INDENT> self._state = STATE_PAUSED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._state = STATE_PLAYING <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._state = STATE_ON <NEW_LINE> <DEDENT> if 'volume' in self._frontend: <NEW_LINE> <INDENT> self._volume['control'] = True <NEW_LINE> self._volume['level'] = int(self._frontend['volume']) <NEW_LINE> <DEDENT> if 'mute' in self._frontend: <NEW_LINE> <INDENT> self._volume['muted'] = (self._frontend['mute'] != '0') <NEW_LINE> <DEDENT> if self._state not in [STATE_PLAYING, STATE_PAUSED]: <NEW_LINE> <INDENT> self._media_image_url = None <NEW_LINE> <DEDENT> elif self._show_artwork and self._has_playing_media_changed(): <NEW_LINE> <INDENT> self._media_image_url = self._get_artwork() <NEW_LINE> <DEDENT> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> self._state = STATE_OFF <NEW_LINE> _LOGGER.warning("Error with '%s' at %s:%d - %s", self._name, self._host_frontend, self._port_frontend, error) <NEW_LINE> _LOGGER.warning(self._frontend) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True
Use the API to get the latest status.
625941b3e64d504609d74602
def test_uninstall_raise(self): <NEW_LINE> <INDENT> mock_hana_inst = MagicMock() <NEW_LINE> mock_hana_inst.uninstall.side_effect = hanamod.hana.HanaError( 'hana error' ) <NEW_LINE> mock_hana = MagicMock(return_value=mock_hana_inst) <NEW_LINE> with patch.object(hanamod, '_init', mock_hana): <NEW_LINE> <INDENT> with pytest.raises(exceptions.CommandExecutionError) as err: <NEW_LINE> <INDENT> hanamod.uninstall('root', 'pass', None, 'prd', '00', 'pass') <NEW_LINE> <DEDENT> mock_hana.assert_called_once_with('prd', '00', 'pass') <NEW_LINE> mock_hana_inst.uninstall.assert_called_once_with('root', 'pass') <NEW_LINE> assert 'hana error' in str(err)
Test uninstall method - raise
625941b3507cdc57c6306a94
def get_next_state(self, a, i): <NEW_LINE> <INDENT> cs = self.states[i] <NEW_LINE> ac = self.dcoords(a) <NEW_LINE> ns = cs + ac <NEW_LINE> ns = tuple(ns) <NEW_LINE> if self.is_state(ns): <NEW_LINE> <INDENT> return self.state_index(ns) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return i
Fast next state computation for deterministic models.
625941b39b70327d1c4e0b96