text
stringlengths
78
104k
score
float64
0
0.18
async def update(self): """Fetch the latest data from IP Webcam.""" status_data = await self._request('/status.json?show_avail=1') if status_data: self.status_data = status_data sensor_data = await self._request('/sensors.json') if sensor_data: self.sensor_data = sensor_data
0.005666
def get_stp_mst_detail_output_cist_cist_bridge_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_stp_mst_detail = ET.Element("get_stp_mst_detail") config = get_stp_mst_detail output = ET.SubElement(get_stp_mst_detail, "output") cist = ET.SubElement(output, "cist") cist_bridge_id = ET.SubElement(cist, "cist-bridge-id") cist_bridge_id.text = kwargs.pop('cist_bridge_id') callback = kwargs.pop('callback', self._callback) return callback(config)
0.003546
def _copyItemToClipboard(self): """Callback for item menu.""" if self._current_item is None: return dp = getattr(self._current_item, '_dp', None) if dp and dp.archived: path = dp.fullpath.replace(" ", "\\ ") QApplication.clipboard().setText(path, QClipboard.Clipboard) QApplication.clipboard().setText(path, QClipboard.Selection)
0.00489
def _iter_categorized_partners(self, state): ''' Iterator over the partners giving as extra param partners of the same category. ''' # categorize partners into the structure # partner_class -> list of its instances categorized = dict() for partner in state.descriptor.partners: category, index = categorized.get(partner.__class__, (list(), len(categorized))) category.append(partner) categorized[partner.__class__] = tuple([category, index]) for category, (brothers, index) in sorted(categorized.items(), key=lambda x: x[1][1]): for partner in brothers: yield partner, brothers
0.002469
def create_vm(client, name, compute_resource, datastore, disksize, nics, memory, num_cpus, guest_id, host=None): """Create a virtual machine using the specified values. :param name: The name of the VM to create. :type name: str :param compute_resource: The name of a ComputeResource in which to \ create the VM. :type compute_resource: str :param datastore: The name of the datastore on which to create the VM. :type datastore: str :param disksize: The size of the disk, specified in KB, MB or GB. e.g. \ 20971520KB, 20480MB, 20GB. :type disksize: str :param nics: The NICs to create, specified in a list of dict's which \ contain a "network_name" and "type" key. e.g. \ {"network_name": "VM Network", "type": "VirtualE1000"} :type nics: list of dict's :param memory: The amount of memory for the VM. Specified in KB, MB or \ GB. e.g. 2097152KB, 2048MB, 2GB. :type memory: str :param num_cpus: The number of CPUs the VM will have. :type num_cpus: int :param guest_id: The vSphere string of the VM guest you are creating. \ The list of VMs can be found at \ http://pubs.vmware.com/vsphere-50/index.jsp?topic=/com.vmware.wssdk.apiref.doc_50/right-pane.html :type guest_id: str :param host: The name of the host (default: None), if you want to \ provision the VM on a \ specific host. :type host: str """ print("Creating VM %s" % name) # If the host is not set, use the ComputeResource as the target if host is None: target = client.find_entity_view("ComputeResource", filter={"name": compute_resource}) resource_pool = target.resourcePool else: target = client.find_entity_view("HostSystem", filter={"name": host}) resource_pool = target.parent.resourcePool disksize_pattern = re.compile("^\d+[KMG]B") if disksize_pattern.match(disksize) is None: print("Disk size %s is invalid. Try \"12G\" or similar" % disksize) sys.exit(1) if disksize.endswith("GB"): disksize_kb = int(disksize[:-2]) * 1024 * 1024 elif disksize.endswith("MB"): disksize_kb = int(disksize[:-2]) * 1024 elif disksize.endswith("KB"): disksize_kb = int(disksize[:-2]) else: print("Disk size %s is invalid. Try \"12G\" or similar" % disksize) memory_pattern = re.compile("^\d+[KMG]B") if memory_pattern.match(memory) is None: print("Memory size %s is invalid. Try \"12G\" or similar" % memory) sys.exit(1) if memory.endswith("GB"): memory_mb = int(memory[:-2]) * 1024 elif memory.endswith("MB"): memory_mb = int(memory[:-2]) elif memory.endswith("KB"): memory_mb = int(memory[:-2]) / 1024 else: print("Memory size %s is invalid. Try \"12G\" or similar" % memory) # A list of devices to be assigned to the VM vm_devices = [] # Create a disk controller controller = create_controller(client, "VirtualLsiLogicController") vm_devices.append(controller) ds_to_use = None for ds in target.datastore: if ds.name == datastore: ds_to_use = ds break if ds_to_use is None: print("Could not find datastore on %s with name %s" % (target.name, datastore)) sys.exit(1) # Ensure the datastore is accessible and has enough space if ds_to_use.summary.accessible is not True: print("Datastore (%s) exists, but is not accessible" % ds_to_use.summary.name) sys.exit(1) if ds_to_use.summary.freeSpace < disksize_kb * 1024: print("Datastore (%s) exists, but does not have sufficient" " free space." % ds_to_use.summary.name) sys.exit(1) disk = create_disk(client, datastore=ds_to_use, disksize_kb=disksize_kb) vm_devices.append(disk) for nic in nics: nic_spec = create_nic(client, target, nic) if nic_spec is None: print("Could not create spec for NIC") sys.exit(1) # Append the nic spec to the vm_devices list vm_devices.append(nic_spec) vmfi = client.create("VirtualMachineFileInfo") vmfi.vmPathName = "[%s]" % ds_to_use.summary.name vm_config_spec = client.create("VirtualMachineConfigSpec") vm_config_spec.name = name vm_config_spec.memoryMB = memory_mb vm_config_spec.files = vmfi vm_config_spec.annotation = "Auto-provisioned by psphere" vm_config_spec.numCPUs = num_cpus vm_config_spec.guestId = guest_id vm_config_spec.deviceChange = vm_devices # Find the datacenter of the target if target.__class__.__name__ == "HostSystem": datacenter = target.parent.parent.parent else: datacenter = target.parent.parent try: task = datacenter.vmFolder.CreateVM_Task(config=vm_config_spec, pool=resource_pool) except VimFault as e: print("Failed to create %s: " % e) sys.exit() while task.info.state in ["queued", "running"]: time.sleep(5) task.update() print("Waiting 5 more seconds for VM creation") if task.info.state == "success": elapsed_time = task.info.completeTime - task.info.startTime print("Successfully created new VM %s. Server took %s seconds." % (name, elapsed_time.seconds)) elif task.info.state == "error": print("ERROR: The task for creating the VM has finished with" " an error. If an error was reported it will follow.") try: print("ERROR: %s" % task.info.error.localizedMessage) except AttributeError: print("ERROR: There is no error message available.") else: print("UNKNOWN: The task reports an unknown state %s" % task.info.state)
0.001006
def nested_shape(array_or_tuple): """Figures out the shape of tensors possibly embedded in tuples i.e [0,0] returns (2) ([0,0], [0,0]) returns (2,2) (([0,0], [0,0]),[0,0]) returns ((2,2),2) """ if hasattr(array_or_tuple, 'size'): # pytorch tensors use V.size() to get size of tensor return list(array_or_tuple.size()) elif hasattr(array_or_tuple, 'get_shape'): # tensorflow uses V.get_shape() to get size of tensor return array_or_tuple.get_shape().as_list() elif hasattr(array_or_tuple, 'shape'): return array_or_tuple.shape try: # treat object as iterable return [nested_shape(item) for item in list(array_or_tuple)] except TypeError: # object is not actually iterable # LB: Maybe we should throw an error? return []
0.002356
def samples(self, nsamples, rstate=None): """ Draw `nsamples` samples randomly distributed within the unit cube. Returns ------- x : `~numpy.ndarray` with shape (nsamples, ndim) A collection of coordinates within the unit cube. """ if rstate is None: rstate = np.random xs = np.array([self.sample(rstate=rstate) for i in range(nsamples)]) return xs
0.004454
def minor_tick_mark(self): """ Read/write :ref:`XlTickMark` value specifying the type of minor tick mark for this axis. """ minorTickMark = self._element.minorTickMark if minorTickMark is None: return XL_TICK_MARK.CROSS return minorTickMark.val
0.00641
def _load_nested_libraries(self, library_path, target_dict): """Recursively load libraries within path Adds all libraries specified in a given path and stores them into the provided library dictionary. The library entries in the dictionary consist only of the path to the library in the file system. :param library_path: the path to add all libraries from :param target_dict: the target dictionary to store all loaded libraries to """ for library_name in os.listdir(library_path): library_folder_path, library_name = self.check_clean_path_of_library(library_path, library_name) full_library_path = os.path.join(library_path, library_name) if os.path.isdir(full_library_path) and library_name[0] != '.': if os.path.exists(os.path.join(full_library_path, storage.STATEMACHINE_FILE)) \ or os.path.exists(os.path.join(full_library_path, storage.STATEMACHINE_FILE_OLD)): target_dict[library_name] = full_library_path else: target_dict[library_name] = {} self._load_nested_libraries(full_library_path, target_dict[library_name]) target_dict[library_name] = OrderedDict(sorted(target_dict[library_name].items()))
0.007491
def _create_list(value, allow_filename=False): """Create a list from the input value. If the input is a list already, return it. If the input is a comma-separated string, split it. """ if isinstance(value, list): return value elif isinstance(value, string_type): if allow_filename and os.path.isfile(value): with codecs.open(value, 'r', encoding="utf-8") as handle: return handle.read().splitlines() return value.split(',') else: raise ValueError("Can't create list for input {}".format(value))
0.001712
def merge_arena(self, mujoco_arena): """Adds arena model to the MJCF model.""" self.arena = mujoco_arena self.table_top_offset = mujoco_arena.table_top_abs self.table_size = mujoco_arena.table_full_size self.merge(mujoco_arena)
0.007491
def addOntology(self, ontology): """ Add an ontology map to this data repository. """ self._ontologyNameMap[ontology.getName()] = ontology self._ontologyIdMap[ontology.getId()] = ontology self._ontologyIds.append(ontology.getId())
0.007194
def get_services(self): """ Retrieves the list of system services that are currently running in this process. @see: L{System.get_services} @rtype: list( L{win32.ServiceStatusProcessEntry} ) @return: List of service status descriptors. """ self.__load_System_class() pid = self.get_pid() return [d for d in System.get_active_services() if d.ProcessId == pid]
0.004545
def is_hash_in_index(self, filehash): """ Check if there is a document using this file hash """ filehash = (u"%X" % filehash) results = self.__searcher.search( whoosh.query.Term('docfilehash', filehash)) return bool(results)
0.007042
def define_params(self): ''' Define parameters. ''' input_dim = self.input_dim hidden_dim = self.hidden_dim prefix = self.name self.w_matrix = tf.Variable(tf.random_normal([input_dim, 3 * hidden_dim], stddev=0.1), name='/'.join([prefix, 'W'])) self.U = tf.Variable(tf.random_normal([hidden_dim, 3 * hidden_dim], stddev=0.1), name='/'.join([prefix, 'U'])) self.bias = tf.Variable(tf.random_normal([1, 3 * hidden_dim], stddev=0.1), name='/'.join([prefix, 'b'])) return self
0.007716
def delete(vpc_id=None, name=None, vpc_name=None, tags=None, region=None, key=None, keyid=None, profile=None): ''' Given a VPC ID or VPC name, delete the VPC. Returns {deleted: true} if the VPC was deleted and returns {deleted: false} if the VPC was not deleted. CLI Example: .. code-block:: bash salt myminion boto_vpc.delete vpc_id='vpc-6b1fe402' salt myminion boto_vpc.delete name='myvpc' ''' if name: log.warning('boto_vpc.delete: name parameter is deprecated ' 'use vpc_name instead.') vpc_name = name if not _exactly_one((vpc_name, vpc_id)): raise SaltInvocationError('One (but not both) of vpc_name or vpc_id must be ' 'provided.') try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if not vpc_id: vpc_id = _get_id(vpc_name=vpc_name, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if not vpc_id: return {'deleted': False, 'error': {'message': 'VPC {0} not found'.format(vpc_name)}} if conn.delete_vpc(vpc_id): log.info('VPC %s was deleted.', vpc_id) if vpc_name: _cache_id(vpc_name, resource_id=vpc_id, invalidate=True, region=region, key=key, keyid=keyid, profile=profile) return {'deleted': True} else: log.warning('VPC %s was not deleted.', vpc_id) return {'deleted': False} except BotoServerError as e: return {'deleted': False, 'error': __utils__['boto.get_error'](e)}
0.001673
def _convert_entity_to_json(source): ''' Converts an entity object to json to send. The entity format is: { "Address":"Mountain View", "Age":23, "AmountDue":200.23, "[email protected]":"Edm.Guid", "CustomerCode":"c9da6455-213d-42c9-9a79-3e9149a57833", "[email protected]":"Edm.DateTime", "CustomerSince":"2008-07-10T00:00:00", "IsActive":true, "[email protected]":"Edm.Int64", "NumberOfOrders":"255", "PartitionKey":"mypartitionkey", "RowKey":"myrowkey" } ''' properties = {} # set properties type for types we know if value has no type info. # if value has type info, then set the type to value.type for name, value in source.items(): mtype = '' if isinstance(value, EntityProperty): conv = _EDM_TO_ENTITY_CONVERSIONS.get(value.type) if conv is None: raise TypeError( _ERROR_TYPE_NOT_SUPPORTED.format(value.type)) mtype, value = conv(value.value) else: conv = _PYTHON_TO_ENTITY_CONVERSIONS.get(type(value)) if conv is None and sys.version_info >= (3,) and value is None: conv = _to_entity_none if conv is None: raise TypeError( _ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY.format( type(value).__name__)) mtype, value = conv(value) # form the property node properties[name] = value if mtype: properties[name + '@odata.type'] = mtype # generate the entity_body return dumps(properties)
0.000592
def create(self, req, driver): """Create a network Create a new netowrk on special cloud with: :Param req :Type object Request """ response = driver.create_network(req.params) data = { 'action': "create", 'controller': "network", 'cloud': req.environ['calplus.cloud'], 'response': response } return data
0.004608
def close(self): """ Starts closing the HighFive master. The server will be closed and all queued job sets will be cancelled. """ if self._closed: return self._closed = True self._server.close() self._manager.close() for worker in self._workers: worker.close()
0.005571
def compound_crossspec(a_data, tbin, Df=None, pointProcess=False): """ Calculate cross spectra of compound signals. a_data is a list of datasets (a_data = [data1,data2,...]). For each dataset in a_data, the compound signal is calculated and the crossspectra between these compound signals is computed. If pointProcess=True, power spectra are normalized by the length T of the time series. Parameters ---------- a_data : list of numpy.ndarrays Array: 1st axis unit, 2nd axis time. tbin : float Binsize in ms. Df : float/None, Window width of sliding rectangular filter (smoothing), None -> no smoothing. pointProcess : bool If set to True, crossspectrum is normalized to signal length `T` Returns ------- freq : tuple numpy.ndarray of frequencies. CRO : tuple 3 dim numpy.ndarray; 1st axis first compound signal, 2nd axis second compound signal, 3rd axis frequency. Examples -------- >>> compound_crossspec([np.array([analog_sig1, analog_sig2]), np.array([analog_sig3,analog_sig4])], tbin, Df=Df) Out[1]: (freq,CRO) >>> CRO.shape Out[2]: (2,2,len(analog_sig1)) """ a_mdata = [] for data in a_data: a_mdata.append(np.sum(data, axis=0)) # calculate compound signals return crossspec(np.array(a_mdata), tbin, Df, units=False, pointProcess=pointProcess)
0.003307
def sys_call(cmd): """Execute cmd and capture stdout and stderr :param cmd: command to be executed :return: (stdout, stderr) """ p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) return p.stdout.readlines(), p.stderr.readlines()
0.006557
def radintpix(data, dataerr, bcx, bcy, mask=None, pix=None, returnavgpix=False, phi0=0, dphi=0, returnmask=False, symmetric_sector=False, doslice=False, errorpropagation=2, autoqrange_linear=True): """Radial integration (averaging) on the detector plane Inputs: data: scattering pattern matrix (np.ndarray, dtype: np.double) dataerr: error matrix (np.ndarray, dtype: np.double; or None) bcx, bcy: beam position, counting from 1 mask: mask matrix (np.ndarray, dtype: np.uint8) pix: pixel distance values (abscissa) from origin. If None, auto-determine. returnavgpix: if the averaged pixel values should be returned phi0: starting angle (radian) for sector integration. If doslice is True, this is the angle of the slice. dphi: angular width (radian) of the sector or width (pixels) of the slice. If negative or zero, full radial average is requested. returnmask: if the effective mask matrix is to be returned symmetric_sector: the sector defined by phi0+pi is also to be used for integration. doslice: if slicing is to be done instead of sector averaging. autoqrange_linear: if the automatically determined q-range is to be linspace-d. Otherwise log10 spacing will be applied. Outputs: pix, Intensity, [Error], Area, [mask] Error is only returned if dataerr is not None mask is only returned if returnmask is True Relies heavily (completely) on radint(). """ if isinstance(data, np.ndarray): data = data.astype(np.double) if isinstance(dataerr, np.ndarray): dataerr = dataerr.astype(np.double) if isinstance(mask, np.ndarray): mask = mask.astype(np.uint8) return radint(data, dataerr, -1, -1, -1, 1.0 * bcx, 1.0 * bcy, mask, pix, returnavgpix, phi0, dphi, returnmask, symmetric_sector, doslice, False, errorpropagation, autoqrange_linear)
0.001473
def conceptscheme_from_uri(conceptscheme_uri, **kwargs): ''' Read a SKOS Conceptscheme from a :term:`URI` :param string conceptscheme_uri: URI of the conceptscheme. :rtype: skosprovider.skos.ConceptScheme ''' # get the conceptscheme # ensure it only ends in one slash conceptscheme_uri = conceptscheme_uri.strip('/') + '/' s = kwargs.get('session', requests.Session()) graph = uri_to_graph('%s.rdf' % (conceptscheme_uri), session=s) notes = [] labels = [] if graph is not False: for s, p, o in graph.triples((URIRef(conceptscheme_uri), RDFS.label, None)): label = Label(o.toPython(), "prefLabel", 'en') labels.append(label) conceptscheme = ConceptScheme( conceptscheme_uri, labels=labels, notes=notes ) return conceptscheme
0.002361
def validate(config): ''' Validate the beacon configuration ''' if not isinstance(config, list): return False, ('Configuration for network_settings ' 'beacon must be a list.') else: _config = {} list(map(_config.update, config)) interfaces = _config.get('interfaces', {}) if isinstance(interfaces, list): #Old syntax return False, ('interfaces section for network_settings beacon' ' must be a dictionary.') for item in interfaces: if not isinstance(_config['interfaces'][item], dict): return False, ('Interface attributes for network_settings beacon' ' must be a dictionary.') if not all(j in ATTRS for j in _config['interfaces'][item]): return False, ('Invalid attributes in beacon configuration.') return True, 'Valid beacon configuration'
0.003074
def startElement(self, name, attrs): """ Handle opening elements. :param name: Name of the element :type name: String :param attrs: Attributes of the element :type attrs: Dict """ if name in self.ignore_start: return try: handler = getattr(self, '_handle_start_%s' % name) except AttributeError: raise KeyError("Unknown element start '%s'" % name) handler(attrs)
0.004107
async def request(self, method: base.String, data: Optional[Dict] = None, files: Optional[Dict] = None, **kwargs) -> Union[List, Dict, base.Boolean]: """ Make an request to Telegram Bot API https://core.telegram.org/bots/api#making-requests :param method: API method :type method: :obj:`str` :param data: request parameters :type data: :obj:`dict` :param files: files :type files: :obj:`dict` :return: result :rtype: Union[List, Dict] :raise: :obj:`aiogram.exceptions.TelegramApiError` """ return await api.make_request(self.session, self.__token, method, data, files, proxy=self.proxy, proxy_auth=self.proxy_auth, timeout=self.timeout, **kwargs)
0.008314
def run_program(prog_list, debug, shell): """Run a program and check program return code Note that some commands don't work well with Popen. So if this function is specifically called with 'shell=True', then it will run the old 'os.system'. In which case, there is no program output """ try: if not shell: process = Popen(prog_list, stdout=PIPE, stderr=PIPE) stdout, stderr = process.communicate() retcode = process.returncode if debug >= 1: print("Program : ", " ".join(prog_list)) print("Return Code: ", retcode) print("Stdout: ", stdout) print("Stderr: ", stderr) return bool(retcode) else: command = " ".join(prog_list) os.system(command) return True except: return False
0.00565
def insert(self, order_id, card_id, appid, card_ext): """ 制作发票卡券,并放入用户卡包 详情请参考 https://mp.weixin.qq.com/wiki?id=mp1497082828_r1cI2 :param order_id: 订单id,在商户内单笔开票请求的唯一识别号 :param card_id: 发票卡券模板的编号 :param appid: 商户 AppID :param card_ext: 发票具体内容 :type card_ext: dict :return: 随机防重字符串,以及用户 Open ID """ return self._post( 'insert', data={ 'order_id': order_id, 'card_id': card_id, 'appid': appid, 'card_ext': card_ext, }, )
0.00321
def feed_amount(self, amount): '''Calling this function sets the form feed amount to the specified setting. Args: amount: the form feed setting you desire. Options are '1/8', '1/6', 'x/180', and 'x/60', with x being your own desired amount. X must be a minimum of 24 for 'x/180' and 8 for 'x/60' Returns: None Raises: None ''' n = None if amount=='1/8': amount = '0' elif amount=='1/6': amount = '2' elif re.search('/180', amount): n = re.search(r"(\d+)/180", amount) n = n.group(1) amount = '3' elif re.search('/60', amount): n = re.search(r"(\d+)/60", amount) n = n.group(1) amount = 'A' if n: self.send(chr(27)+amount+n) else: self.send(chr(27)+amount)
0.008658
def fd(self): """:return: file descriptor used to create the underlying mapping. **Note:** it is not required to be valid anymore :raise ValueError: if the mapping was not created by a file descriptor""" if isinstance(self._rlist.path_or_fd(), string_types()): raise ValueError("File descriptor queried although mapping was generated from path") # END handle type return self._rlist.path_or_fd()
0.008772
def is_valid(self, qstr=None): """Return True if string is valid""" if qstr is None: qstr = self.currentText() return is_module_or_package(to_text_string(qstr))
0.01
def minimize_source(source): """Remove comments and docstrings from Python `source`, preserving line numbers and syntax of empty blocks. :param str source: The source to minimize. :returns str: The minimized source. """ source = mitogen.core.to_text(source) tokens = tokenize.generate_tokens(StringIO(source).readline) tokens = strip_comments(tokens) tokens = strip_docstrings(tokens) tokens = reindent(tokens) return tokenize.untokenize(tokens)
0.001972
def prompt(youtube_list): ''' Prompts for song number from list of songs ''' option = int(input('\nEnter song number > ')) try: song_url = list(youtube_list.values())[option - 1] song_title = list(youtube_list.keys())[option - 1] except IndexError: log.log_error('Invalid Input') exit() system('clear') print('Download Song: ') print(song_title) print('Y/n?') confirm = input('>') if confirm == '' or confirm.lower() == 'y': pass elif confirm.lower() == 'n': exit() else: log.log_error('Invalid Input') exit() return song_url, song_title
0.001506
def get_image_url(self, selector, by=By.CSS_SELECTOR, timeout=settings.SMALL_TIMEOUT): """ Extracts the URL from an image element on the page. """ if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.get_attribute(selector, attribute='src', by=by, timeout=timeout)
0.007092
def delete_record_set(self, record_set): """Append a record set to the 'deletions' for the change set. :type record_set: :class:`google.cloud.dns.resource_record_set.ResourceRecordSet` :param record_set: the record set to append. :raises: ``ValueError`` if ``record_set`` is not of the required type. """ if not isinstance(record_set, ResourceRecordSet): raise ValueError("Pass a ResourceRecordSet") self._deletions += (record_set,)
0.003891
def get_sequence_rules(self): """Gets the ``SequenceRuleList`` resulting from a search. return: (osid.assessment.authoring.SequenceRuleList) - the sequence rule list raise: IllegalState - list has already been retrieved *compliance: mandatory -- This method must be implemented.* """ if self.retrieved: raise errors.IllegalState('List has already been retrieved.') self.retrieved = True return objects.SequenceRuleList(self._results, runtime=self._runtime)
0.00363
def get_cached(location, **kwargs): """ Simple wrapper that adds Django caching support to 'geocoder.get()'. """ result = cache.get(location) # Result is not cached or wrong if not result or not result.ok: result = geocoder.get(location, **kwargs) if result.ok: cache.set(location, result) return result
0.00277
def read_config(self, config_file): """ Parses the specified configuration file and stores the values. Raises an InvalidConfigurationFile exception if the file is not well-formed. """ cfg = ConfigParser.SafeConfigParser() try: cfg.read(config_file) except ConfigParser.MissingSectionHeaderError as e: # The file exists, but doesn't have the correct format. raise exc.InvalidConfigurationFile(e) def safe_get(section, option, default=None): try: return cfg.get(section, option) except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): return default # A common mistake is including credentials in the config file. If any # values are found, issue a warning so that the developer can correct # this problem. creds_found = False for section in cfg.sections(): if section == "settings": section_name = "default" self._default_set = True else: section_name = section # Check for included credentials for key in ("username", "password", "api_key"): if creds_found: break if safe_get(section, key): creds_found = True dct = self._settings[section_name] = {} dct["region"] = safe_get(section, "region", default_region) ityp = safe_get(section, "identity_type") if ityp: dct["identity_type"] = _id_type(ityp) dct["identity_class"] = _import_identity(ityp) # Handle both the old and new names for this setting. debug = safe_get(section, "debug") if debug is None: debug = safe_get(section, "http_debug", "False") dct["http_debug"] = debug == "True" verify_ssl = safe_get(section, "verify_ssl", "True") dct["verify_ssl"] = verify_ssl == "True" dct["keyring_username"] = safe_get(section, "keyring_username") dct["encoding"] = safe_get(section, "encoding", default_encoding) dct["auth_endpoint"] = safe_get(section, "auth_endpoint") dct["tenant_name"] = safe_get(section, "tenant_name") dct["tenant_id"] = safe_get(section, "tenant_id") use_servicenet = safe_get(section, "use_servicenet", "False") dct["use_servicenet"] = use_servicenet == "True" app_agent = safe_get(section, "custom_user_agent") if app_agent: # Customize the user-agent string with the app name. dct["user_agent"] = "%s %s" % (app_agent, USER_AGENT) else: dct["user_agent"] = USER_AGENT # If this is the first section, make it the default if not self._default_set: self._settings["default"] = self._settings[section] self._default_set = True if creds_found: warnings.warn("Login credentials were detected in your .pyrax.cfg " "file. These have been ignored, but you should remove " "them and either place them in a credential file, or " "consider using another means of authentication. More " "information on the use of credential files can be found " "in the 'docs/getting_started.md' document.")
0.001976
def meta(cls): """Return a dictionary containing meta-information about the given resource.""" if getattr(cls, '__from_class__', None) is not None: cls = cls.__from_class__ attribute_info = {} for name, value in cls.__table__.columns.items(): attribute_info[name] = str(value.type).lower() return {cls.__name__: attribute_info}
0.005
def cutout_shape(self, shape_obj): """ Cut out and return a portion of the data corresponding to `shape_obj`. A masked numpy array is returned, where the pixels not enclosed in the shape are masked out. """ view, mask = self.get_shape_view(shape_obj) # cutout our enclosing (possibly shortened) bbox data = self._slice(view) # mask non-containing members mdata = np.ma.array(data, mask=np.logical_not(mask)) return mdata
0.003914
def _apply_default_values(catalog, default_values): """Aplica valores default a los campos de un catálogo. Si el campo está vacío, aplica el default. Si tiene un valor, deja el valor que estaba. Sólo soporta defaults para las siguientes clases: catalog dataset distribution field Args: catalog (dict): Un catálogo. default_values (dict): Valores default para algunos de los campos del catálogo. { "dataset_issued": "2017-06-22", "distribution_issued": "2017-06-22" } """ for field, default_value in iteritems(default_values): class_metadata = field.split("_")[0] field_json_path = field.split("_")[1:] # valores default de catálogo if class_metadata == "catalog": _set_default_value(catalog, field_json_path, default_value) # valores default de dataset elif class_metadata == "dataset": for dataset in catalog["dataset"]: _set_default_value(dataset, field_json_path, default_value) # valores default de distribución elif class_metadata == "distribution": for dataset in catalog["dataset"]: for distribution in dataset["distribution"]: _set_default_value( distribution, field_json_path, default_value) # valores default de field elif class_metadata == "field": for dataset in catalog["dataset"]: for distribution in dataset["distribution"]: # campo "field" en una "distribution" no es obligatorio if distribution.get("field"): for field in distribution["field"]: _set_default_value( field, field_json_path, default_value)
0.000523
def current_window_handle(self): """ Returns the handle of the current window. :Usage: :: driver.current_window_handle """ if self.w3c: return self.execute(Command.W3C_GET_CURRENT_WINDOW_HANDLE)['value'] else: return self.execute(Command.GET_CURRENT_WINDOW_HANDLE)['value']
0.005333
def create_albaran_automatic(pk, list_lines): """ creamos de forma automatica el albaran """ line_bd = SalesLineAlbaran.objects.filter(line_order__pk__in=list_lines).values_list('line_order__pk') if line_bd.count() == 0 or len(list_lines) != len(line_bd[0]): # solo aquellas lineas de pedidos que no estan ya albarandas if line_bd.count() != 0: for x in line_bd[0]: list_lines.pop(list_lines.index(x)) GenLineProduct.create_albaran_from_order(pk, list_lines)
0.005254
def status_message(self): """Return friendly response from API based on response code. """ msg = None if self.last_ddns_response in response_messages.keys(): return response_messages.get(self.last_ddns_response) if 'good' in self.last_ddns_response: ip = re.search(r'(\d{1,3}\.?){4}', self.last_ddns_response).group() msg = "SUCCESS: DNS hostname IP (%s) successfully updated." % ip elif 'nochg' in self.last_ddns_response: ip = re.search(r'(\d{1,3}\.?){4}', self.last_ddns_response).group() msg = "SUCCESS: IP address (%s) is up to date, nothing was changed. " \ "Additional 'nochg' updates may be considered abusive." % ip else: msg = "ERROR: Ooops! Something went wrong !!!" return msg
0.003584
def resume(self, email, master_token, state=None, sync=True): """Authenticate to Google with the provided master token & sync. Args: email (str): The account to use. master_token (str): The master token. state (dict): Serialized state to load. Raises: LoginException: If there was a problem logging in. """ auth = APIAuth(self.OAUTH_SCOPES) ret = auth.load(email, master_token, android_id=get_mac()) if ret: self.load(auth, state, sync) return ret
0.003478
def __create_proj_mat(self, size): """Create a random projection matrix [1] D. Achlioptas. Database-friendly random projections: Johnson-Lindenstrauss with binary coins. [2] P. Li, et al. Very sparse random projections. http://scikit-learn.org/stable/modules/random_projection.html#sparse-random-projection """ # [1] # return np.random.choice([-np.sqrt(3), 0, np.sqrt(3)], size=size, p=[1 / 6, 2 / 3, 1 / 6]) # [2] s = 1 / self.density return np.random.choice([-np.sqrt(s / self.k), 0, np.sqrt(s / self.k)], size=size, p=[1 / (2 * s), 1 - 1 / s, 1 / (2 * s)])
0.005658
def run(self, realm, users): """ Requests a TGT in the name of the users specified in users. Returns a list of usernames that are in the domain. realm: kerberos realm (domain name of the corp) users: list : list of usernames to test """ existing_users = [] for user in users: logging.debug('Probing user %s' % user) req = KerberosUserEnum.construct_tgt_req(realm, user) rep = self.ksoc.sendrecv(req.dump(), throw = False) if rep.name != 'KRB_ERROR': # user doesnt need preauth, but it exists existing_users.append(user) elif rep.native['error-code'] != KerberosErrorCode.KDC_ERR_PREAUTH_REQUIRED.value: # any other error means user doesnt exist continue else: # preauth needed, only if user exists existing_users.append(user) return existing_users
0.041312
def process_notebook(self, disable_warnings=True): """Process the notebook and create all the pictures and files This method runs the notebook using the :mod:`nbconvert` and :mod:`nbformat` modules. It creates the :attr:`outfile` notebook, a python and a rst file""" infile = self.infile outfile = self.outfile in_dir = os.path.dirname(infile) + os.path.sep odir = os.path.dirname(outfile) + os.path.sep create_dirs(os.path.join(odir, 'images')) ep = nbconvert.preprocessors.ExecutePreprocessor( timeout=300) cp = nbconvert.preprocessors.ClearOutputPreprocessor( timeout=300) self.nb = nb = nbformat.read(infile, nbformat.current_nbformat) # disable warnings in the rst file if disable_warnings: for i, cell in enumerate(nb.cells): if cell['cell_type'] == 'code': cell = cell.copy() break cell = cell.copy() cell.source = """ import logging logging.captureWarnings(True) logging.getLogger('py.warnings').setLevel(logging.ERROR) """ nb.cells.insert(i, cell) # write and process rst_file if self.preprocess: t = dt.datetime.now() logger.info('Processing %s', self.infile) try: ep.preprocess(nb, {'metadata': {'path': in_dir}}) except nbconvert.preprocessors.execute.CellExecutionError: logger.critical( 'Error while processing %s!', self.infile, exc_info=True) else: logger.info('Done. Seconds needed: %i', (dt.datetime.now() - t).seconds) if disable_warnings: nb.cells.pop(i) self.py_file = self.get_out_file('py') if self.remove_tags: tp = nbconvert.preprocessors.TagRemovePreprocessor(timeout=300) for key, val in self.tag_options.items(): setattr(tp, key, set(val)) nb4rst = deepcopy(nb) tp.preprocess(nb4rst, {'metadata': {'path': in_dir}}) else: nb4rst = nb self.create_rst(nb4rst, in_dir, odir) if self.clear: cp.preprocess(nb, {'metadata': {'path': in_dir}}) # write notebook file nbformat.write(nb, outfile) self.create_py(nb)
0.000823
def draw_freehand(self): """ Freehand sketching. """ if _ctx._ns["mousedown"]: x, y = mouse() if self.show_grid: x, y = self.grid.snap(x, y) if self.freehand_move == True: cmd = MOVETO self.freehand_move = False else: cmd = LINETO # Add a new LINETO to the path, # except when starting to draw, # then a MOVETO is added to the path. pt = PathElement() if cmd != MOVETO: pt.freehand = True # Used when mixed with curve drawing. else: pt.freehand = False pt.cmd = cmd pt.x = x pt.y = y pt.ctrl1 = Point(x,y) pt.ctrl2 = Point(x,y) self._points.append(pt) # Draw the current location of the cursor. r = 4 _ctx.nofill() _ctx.stroke(self.handle_color) _ctx.oval(pt.x-r, pt.y-r, r*2, r*2) _ctx.fontsize(9) _ctx.fill(self.handle_color) _ctx.text(" ("+str(int(pt.x))+", "+str(int(pt.y))+")", pt.x+r, pt.y) self._dirty = True else: # Export the updated drawing, # remember to do a MOVETO on the next interaction. self.freehand_move = True if self._dirty: self._points[-1].freehand = False self.export_svg() self._dirty = False
0.009208
def fsencoding(s, encoding=sys.getfilesystemencoding()): """ Ensure the given argument is in filesystem encoding (not unicode) """ if isinstance(s, unicode): s = s.encode(encoding) return s
0.004608
def list_qos_rule_types(self, retrieve_all=True, **_params): """List available qos rule types.""" return self.list('rule_types', self.qos_rule_types_path, retrieve_all, **_params)
0.009091
def output_to_graphviz(file, namer=_graphviz_default_namer, block=None): """ Walk the block and output it in graphviz format to the open file. """ print(block_to_graphviz_string(block, namer), file=file)
0.004739
def output_results(results, split_id='results', output_stream=None): ''' Log `results` readably to `output_stream`, with a header containing `split_id`. :param results: a dictionary of summary statistics from an evaluation :type results: dict(str -> object) :param str split_id: an identifier for the source of `results` (e.g. 'dev') :param file output_stream: the file-like object to which to log the results (default: stdout) :type split_id: str ''' if output_stream is None: output_stream = sys.stdout output_stream.write('----- %s -----\n' % split_id) for name in sorted(results.keys()): output_stream.write('%s: %s\n' % (name, repr(results[name]))) output_stream.flush()
0.001323
def excel(): """ Convert Excel files to LiPD files. LiPD data is returned directly from this function. | Example | 1: lipd.readExcel() | 2: D = lipd.excel() :return dict _d: Metadata """ global files, cwd, settings _d = {} # Turn off verbose. We don't want to clutter the console with extra reading/writing output statements settings["verbose"] = False # Find excel files print("Found " + str(len(files[".xls"])) + " Excel files") logger_start.info("found excel files: {}".format(len(files[".xls"]))) # Start the clock start = clock() # Loop for each excel file for file in files[".xls"]: # Convert excel file to LiPD dsn = excel_main(file) try: # Read the new LiPD file back in, to get fixes, inferred calculations, updates, etc. _d[dsn] = readLipd(os.path.join(file["dir"], dsn + ".lpd")) # Write the modified LiPD file back out again. writeLipd(_d[dsn], cwd) except Exception as e: logger_start.error("excel: Unable to read new LiPD file, {}".format(e)) print("Error: Unable to read new LiPD file: {}, {}".format(dsn, e)) # Time! end = clock() logger_benchmark.info(log_benchmark("excel", start, end)) # Start printing stuff again. settings["verbose"] = True return _d
0.003642
def decode_sql(self, sql): """Base64 decode a string. This should only be used for sql in calls. :param str sql: The base64 encoded form of the original utf-8 string :return str: The decoded utf-8 string """ # JSON is defined as using "unicode", we'll go a step further and # mandate utf-8 (though for the base64 part, it doesn't really matter!) base64_sql_bytes = to_unicode(sql).encode('utf-8') # in python3.x you can pass `validate=True` to b64decode to get this # behavior. if not re.match(b'^[A-Za-z0-9+/]*={0,2}$', base64_sql_bytes): self.raise_invalid_base64(sql) try: sql_bytes = base64.b64decode(base64_sql_bytes) except ValueError: self.raise_invalid_base64(sql) return sql_bytes.decode('utf-8')
0.002358
def forward(self, # pylint: disable=arguments-differ inputs: torch.Tensor, word_inputs: torch.Tensor = None) -> Dict[str, Union[torch.Tensor, List[torch.Tensor]]]: """ Parameters ---------- inputs: ``torch.Tensor``, required. Shape ``(batch_size, timesteps, 50)`` of character ids representing the current batch. word_inputs : ``torch.Tensor``, required. If you passed a cached vocab, you can in addition pass a tensor of shape ``(batch_size, timesteps)``, which represent word ids which have been pre-cached. Returns ------- Dict with keys: ``'elmo_representations'``: ``List[torch.Tensor]`` A ``num_output_representations`` list of ELMo representations for the input sequence. Each representation is shape ``(batch_size, timesteps, embedding_dim)`` ``'mask'``: ``torch.Tensor`` Shape ``(batch_size, timesteps)`` long tensor with sequence mask. """ # reshape the input if needed original_shape = inputs.size() if len(original_shape) > 3: timesteps, num_characters = original_shape[-2:] reshaped_inputs = inputs.view(-1, timesteps, num_characters) else: reshaped_inputs = inputs if word_inputs is not None: original_word_size = word_inputs.size() if self._has_cached_vocab and len(original_word_size) > 2: reshaped_word_inputs = word_inputs.view(-1, original_word_size[-1]) elif not self._has_cached_vocab: logger.warning("Word inputs were passed to ELMo but it does not have a cached vocab.") reshaped_word_inputs = None else: reshaped_word_inputs = word_inputs else: reshaped_word_inputs = word_inputs # run the biLM bilm_output = self._elmo_lstm(reshaped_inputs, reshaped_word_inputs) layer_activations = bilm_output['activations'] mask_with_bos_eos = bilm_output['mask'] # compute the elmo representations representations = [] for i in range(len(self._scalar_mixes)): scalar_mix = getattr(self, 'scalar_mix_{}'.format(i)) representation_with_bos_eos = scalar_mix(layer_activations, mask_with_bos_eos) if self._keep_sentence_boundaries: processed_representation = representation_with_bos_eos processed_mask = mask_with_bos_eos else: representation_without_bos_eos, mask_without_bos_eos = remove_sentence_boundaries( representation_with_bos_eos, mask_with_bos_eos) processed_representation = representation_without_bos_eos processed_mask = mask_without_bos_eos representations.append(self._dropout(processed_representation)) # reshape if necessary if word_inputs is not None and len(original_word_size) > 2: mask = processed_mask.view(original_word_size) elmo_representations = [representation.view(original_word_size + (-1, )) for representation in representations] elif len(original_shape) > 3: mask = processed_mask.view(original_shape[:-1]) elmo_representations = [representation.view(original_shape[:-1] + (-1, )) for representation in representations] else: mask = processed_mask elmo_representations = representations return {'elmo_representations': elmo_representations, 'mask': mask}
0.00432
def gateway_by_type(self, type=None, on_network=None): # @ReservedAssignment """ Return gateways for the specified node. You can also specify type to find only gateways of a specific type. Valid types are: bgp_peering, netlink, ospfv2_area. :param RoutingNode self: the routing node to check :param str type: bgp_peering, netlink, ospfv2_area :param str on_network: if network is specified, should be CIDR and specifies a filter to only return gateways on that network when an interface has multiple :return: tuple of RoutingNode(interface,network,gateway) :rtype: list """ gateways = route_level(self, 'gateway') if not type: for gw in gateways: yield gw else: for node in gateways: #TODO: Change to type == node.related_element_type when # only supporting SMC >= 6.4 if type == node.routing_node_element.typeof: # If the parent is level interface, this is a tunnel interface # where the gateway is bound to interface versus network parent = node._parent if parent.level == 'interface': interface = parent network = None else: network = parent interface = network._parent if on_network is not None: if network and network.ip == on_network: yield (interface, network, node) else: yield (interface, network, node)
0.002465
def set_source_nodes(self, source_nodes): r""" Set the source nodes and compute their t-weights. Parameters ---------- source_nodes : sequence of integers Declare the source nodes via their ids. Notes ----- It does not get checked if one of the supplied source-nodes already has a weight assigned (e.g. by passing it to `set_sink_nodes`). This can occur when the foreground- and background-markers cover the same region. In this case the order of setting the terminal nodes can affect the graph and therefore the graph-cut result. """ self.__snodes = list(source_nodes) # set the source-to-node weights (t-weights) for snode in self.__snodes: self.__tweights[snode] = (self.MAX, 0)
0.00814
def on_unselect(self, item, action): """Add an action to make when an object is unfocused.""" if not isinstance(item, int): item = self.items.index(item) self._on_unselect[item] = action
0.008969
def crop(stream, x, y, width, height, **kwargs): """Crop the input video. Args: x: The horizontal position, in the input video, of the left edge of the output video. y: The vertical position, in the input video, of the top edge of the output video. width: The width of the output video. Must be greater than 0. heigth: The height of the output video. Must be greater than 0. Official documentation: `crop <https://ffmpeg.org/ffmpeg-filters.html#crop>`__ """ return FilterNode( stream, crop.__name__, args=[width, height, x, y], kwargs=kwargs ).stream()
0.003012
def publish(self, key, value): """publish value to status""" self.log.debug( "Publishing status: %s/%s: %s", self.__class__.__name__, key, value) self.core.publish(self.__class__.__name__, key, value)
0.012712
def get_vs_dir_from_tool_dir(self): """ Get the directory of Visual Studio from the directory Tools. """ index = self.tool_dir.find(r'Common7\Tools') return self.tool_dir[:index]
0.00885
def getEmailAddresses(self): """ Return an iterator of all email addresses associated with this person. @return: an iterator of unicode strings in RFC2822 address format. """ return self.store.query( EmailAddress, EmailAddress.person == self).getColumn('address')
0.006098
def show_corrections(self, status=None, nids=None): """ Show the corrections applied to the flow at run-time. Args: status: if not None, only the tasks with this status are select. nids: optional list of node identifiers used to filter the tasks. Return: The number of corrections found. """ nrows, ncols = get_terminal_size() count = 0 for task in self.iflat_tasks(status=status, nids=nids): if task.num_corrections == 0: continue count += 1 print(make_banner(str(task), width=ncols, mark="=")) for corr in task.corrections: pprint(corr) if not count: print("No correction found.") return count
0.005222
def should_run_now(self, force=False): from django_cron.models import CronJobLog cron_job = self.cron_job """ Returns a boolean determining whether this cron should run now or not! """ self.user_time = None self.previously_ran_successful_cron = None # If we pass --force options, we force cron run if force: return True if cron_job.schedule.run_every_mins is not None: # We check last job - success or not last_job = None try: last_job = CronJobLog.objects.filter(code=cron_job.code).latest('start_time') except CronJobLog.DoesNotExist: pass if last_job: if not last_job.is_success and cron_job.schedule.retry_after_failure_mins: if get_current_time() > last_job.start_time + timedelta(minutes=cron_job.schedule.retry_after_failure_mins): return True else: return False try: self.previously_ran_successful_cron = CronJobLog.objects.filter( code=cron_job.code, is_success=True, ran_at_time__isnull=True ).latest('start_time') except CronJobLog.DoesNotExist: pass if self.previously_ran_successful_cron: if get_current_time() > self.previously_ran_successful_cron.start_time + timedelta(minutes=cron_job.schedule.run_every_mins): return True else: return True if cron_job.schedule.run_at_times: for time_data in cron_job.schedule.run_at_times: user_time = time.strptime(time_data, "%H:%M") now = get_current_time() actual_time = time.strptime("%s:%s" % (now.hour, now.minute), "%H:%M") if actual_time >= user_time: qset = CronJobLog.objects.filter( code=cron_job.code, ran_at_time=time_data, is_success=True ).filter( Q(start_time__gt=now) | Q(end_time__gte=now.replace(hour=0, minute=0, second=0, microsecond=0)) ) if not qset: self.user_time = time_data return True return False
0.003604
def compare(self, other, filter_fcn=None): """Returns True if properties can be compared in terms of eq. Entity's Fields can be filtered accordingly to 'filter_fcn'. This callable receives field's name as first parameter and field itself as second parameter. It must return True if field's value should be included on comparison and False otherwise. If not provided field's marked as unique will not be compared by default. 'id' and 'name' are examples of unique fields commonly ignored. Check Entities fields for fields marked with 'unique=True' :param other: entity to compare :param filter_fcn: callable :return: boolean """ if not isinstance(other, type(self)): return False if filter_fcn is None: def filter_unique(_, field): """Filter function for unique fields""" return not field.unique filter_fcn = filter_unique return self.to_json_dict(filter_fcn) == other.to_json_dict(filter_fcn)
0.001833
def r_plokamos_proxy(self): """ Proxy to write to the annotation store :return: response from the remote query store :rtype: {str: Any} """ query = request.data if self.is_authorized(query,NemoOauthPlugin.current_user()['uri']): try: resp = requests.post(self.annotation_update_endpoint, data=query, json=None, headers={"content-type": "application/sparql-update", "accept": "application/sparql-results+json"}) resp.raise_for_status() return resp.content, resp.status_code except requests.exceptions.HTTPError as err: return str(err), resp.status_code else: return "Unauthorized request", 403
0.007194
def fromOpenIDRequest(cls, request): """Instantiate a Request object from the arguments in a C{checkid_*} OpenID message """ self = cls() args = request.message.getArgs(self.ns_uri) is_openid1 = request.message.isOpenID1() if args == {}: return None self.parseExtensionArgs(args, is_openid1) return self
0.005141
def power_source_type(): """ FreeBSD use sysctl hw.acpi.acline to tell if Mains (1) is used or Battery (0). Beware, that on a Desktop machines this hw.acpi.acline oid may not exist. @return: One of common.POWER_TYPE_* @raise: Runtime error if type of power source is not supported """ try: supply=int(subprocess.check_output(["sysctl","-n","hw.acpi.acline"])) except: return common.POWER_TYPE_AC if supply == 1: return common.POWER_TYPE_AC elif supply == 0: return common.POWER_TYPE_BATTERY else: raise RuntimeError("Unknown power source type!")
0.014388
def pre_save(self, model_instance, add): """ Process the source image through the defined processors. """ file = getattr(model_instance, self.attname) if file and not file._committed: image_file = file if self.resize_source_to: file.seek(0) image_file = processors.process(file, self.resize_source_to) image_file = post_processors.process(image_file, self.resize_source_to) filename = str(shortuuid.uuid()) + os.path.splitext(file.name)[1] file.save(filename, image_file, save=False) return file
0.004688
def prepare_input(self, extracted_str): """ Input raw string and do transformations, as set in template file. """ # Remove withspace if self.options['remove_whitespace']: optimized_str = re.sub(' +', '', extracted_str) else: optimized_str = extracted_str # Remove accents if self.options['remove_accents']: optimized_str = unidecode(optimized_str) # convert to lower case if self.options['lowercase']: optimized_str = optimized_str.lower() # specific replace for replace in self.options['replace']: assert len(replace) == 2, 'A replace should be a list of 2 items' optimized_str = optimized_str.replace(replace[0], replace[1]) return optimized_str
0.002415
def enqueue(self, stream_url, offset=0, opaque_token=None): """Adds stream to the queue. Does not impact the currently playing stream.""" directive = self._play_directive('ENQUEUE') audio_item = self._audio_item(stream_url=stream_url, offset=offset, push_buffer=False, opaque_token=opaque_token) audio_item['stream']['expectedPreviousToken'] = current_stream.token directive['audioItem'] = audio_item self._response['directives'].append(directive) return self
0.004762
def get(self, template_ids, session, fields=[]): '''taobao.delivery.template.get 获取用户指定运费模板信息 获取用户指定运费模板信息''' request = TOPRequest('taobao.delivery.template.get') request['template_ids'] = template_ids if not fields: fields = self.fields request['fields'] = fields self.create(self.execute(request, session), fields=['delivery_templates', 'total_results'], models={'delivery_templates':DeliveryTemplate}) return self.delivery_templates
0.009615
def remove_label(self, doc, label, update_index=True): """ Remove a label from a doc. Takes care of updating the index """ doc.remove_label(label) if update_index: self.upd_doc(doc) self.commit()
0.007722
def getDaysToExpire(self): """Returns the days until this certificate expires :returns: Days until the certificate expires :rtype: int """ delta = 0 today = DateTime() valid_from = self.getValidFrom() or today valid_to = self.getValidTo() # one of the fields is not set, return 0 days if not valid_from or not valid_to: return 0 # valid_from comes after valid_to? if valid_from > valid_to: return 0 # calculate the time between today and valid_to, even if valid_from # is in the future. else: delta = valid_to - today return int(math.ceil(delta))
0.002801
def _from_dict(cls, _dict): """Initialize a DocumentAccepted object from a json dictionary.""" args = {} if 'document_id' in _dict: args['document_id'] = _dict.get('document_id') if 'status' in _dict: args['status'] = _dict.get('status') if 'notices' in _dict: args['notices'] = [ Notice._from_dict(x) for x in (_dict.get('notices')) ] return cls(**args)
0.004292
def proxy_config(commands, **kwargs): ''' Send configuration commands over SSH or NX-API commands List of configuration commands no_save_config If True, don't save configuration commands to startup configuration. If False, save configuration to startup configuration. Default: False .. code-block: bash salt '*' nxos.cmd proxy_config 'feature bgp' no_save_config=True salt '*' nxos.cmd proxy_config 'feature bgp' ''' no_save_config = DEVICE_DETAILS['no_save_config'] no_save_config = kwargs.get('no_save_config', no_save_config) if not isinstance(commands, list): commands = [commands] try: if CONNECTION == 'ssh': _sendline_ssh('config terminal') single_cmd = '' for cmd in commands: single_cmd += cmd + ' ; ' ret = _sendline_ssh(single_cmd + 'end') if no_save_config: pass else: _sendline_ssh(COPY_RS) if ret: log.error(ret) elif CONNECTION == 'nxapi': ret = _nxapi_request(commands) if no_save_config: pass else: _nxapi_request(COPY_RS) for each in ret: if 'Failure' in each: log.error(each) except CommandExecutionError as e: log.error(e) raise return [commands, ret]
0.000678
def only_specific_multisets(ent, multisets_to_show): ''' returns a pretty-printed string for specific features in a FeatureCollection ''' out_str = [] for mset_name in multisets_to_show: for key, count in ent[mset_name].items(): out_str.append( '%s - %d: %s' % (mset_name, count, key) ) return '\n'.join(out_str)
0.011236
def backup(self, container, url): """ Backup a container to the given restic url all restic urls are supported :param container: :param url: Url to restic repo examples (file:///path/to/restic/?password=<password>) :return: Json response to the backup job (do .get() to get the snapshot ID """ args = { 'container': container, 'url': url, } return JSONResponse(self._client.raw('corex.backup', args))
0.005556
def create_app(self, apps_path, name): """ Create Trionyx app in given path :param str path: path to create app in. :param str name: name of app :raises FileExistsError: """ app_path = os.path.join(apps_path, name.lower()) shutil.copytree(self.app_path, app_path) self.update_file(app_path, '__init__.py', { 'name': name.lower() }) self.update_file(app_path, 'apps.py', { 'name': name.lower(), 'verbose_name': name.capitalize() })
0.003534
def create(self, resource): """Create the given resource. Args: resource (intern.resource.boss.BossResource): Create a data model object with attributes matching those of the resource. Returns: (intern.resource.boss.BossResource): Returns resource of type requested on success. Raises: requests.HTTPError on failure. """ return self.service.create( resource, self.url_prefix, self.auth, self.session, self.session_send_opts)
0.007477
def get_resources(minify=False): """Find all resources which subclass ResourceBase. Keyword arguments: minify -- select minified resources if available. Returns: Dictionary of available resources. Keys are resource names (part of the config variable names), values are dicts with css and js keys, and tuples of resources as values. """ all_resources = dict() subclasses = resource_base.ResourceBase.__subclasses__() + resource_definitions.ResourceAngular.__subclasses__() for resource in subclasses: obj = resource(minify) all_resources[resource.RESOURCE_NAME] = dict(css=tuple(obj.resources_css), js=tuple(obj.resources_js)) return all_resources
0.00565
def rake(self, strike, dip, rake_angle, *args, **kwargs): """ Plot points representing lineations along planes on the axes. Additional arguments and keyword arguments are passed on to `plot`. Parameters ---------- strike, dip : number or sequences of numbers The strike and dip of the plane(s) in degrees. The dip direction is defined by the strike following the "right-hand rule". rake_angle : number or sequences of numbers The angle of the lineation(s) on the plane(s) measured in degrees downward from horizontal. Zero degrees corresponds to the "right hand" direction indicated by the strike, while negative angles are measured downward from the opposite strike direction. **kwargs Additional arguments are passed on to `plot`. Returns ------- A sequence of Line2D artists representing the point(s) specified by `strike` and `dip`. """ lon, lat = stereonet_math.rake(strike, dip, rake_angle) args, kwargs = self._point_plot_defaults(args, kwargs) return self.plot(lon, lat, *args, **kwargs)
0.001654
def _lookup_nslookup(name, rdtype, timeout=None, server=None): ''' Use nslookup to lookup addresses :param name: Name of record to search :param rdtype: DNS record type :param timeout: server response timeout :param server: server to query :return: [] of records or False if error ''' cmd = 'nslookup -query={0} {1}'.format(rdtype, name) if timeout is not None: cmd += ' -timeout={0}'.format(int(timeout)) if server is not None: cmd += ' {0}'.format(server) cmd = __salt__['cmd.run_all'](cmd, python_shell=False, output_loglevel='quiet') if cmd['retcode'] != 0: log.warning( 'nslookup returned (%s): %s', cmd['retcode'], cmd['stdout'].splitlines()[-1].strip(string.whitespace + ';') ) return False lookup_res = iter(cmd['stdout'].splitlines()) res = [] try: line = next(lookup_res) if 'unknown query type' in line: raise ValueError('Invalid DNS type {}'.format(rdtype)) while True: if name in line: break line = next(lookup_res) while True: line = line.strip() if not line or line.startswith('*'): break elif rdtype != 'CNAME' and 'canonical name' in line: name = line.split()[-1][:-1] line = next(lookup_res) continue elif rdtype == 'SOA': line = line.split('=') elif line.startswith('Name:'): line = next(lookup_res) line = line.split(':', 1) elif line.startswith(name): if '=' in line: line = line.split('=', 1) else: line = line.split(' ') res.append(_data_clean(line[-1])) line = next(lookup_res) except StopIteration: pass if rdtype == 'SOA': return [' '.join(res[1:])] else: return res
0.00098
def linsert(self, key, pivot, value, before=False): """Inserts value in the list stored at key either before or after the reference value pivot. """ where = b'AFTER' if not before else b'BEFORE' return self.execute(b'LINSERT', key, where, pivot, value)
0.006849
def resolve_selector(self): """Resolve the selector variable in place """ effective_selector_list = [] for current_selector in self._selector_list: # INLINE SELECTOR if self.get_type(current_selector) != 'selector_variable': effective_selector_list.append(current_selector) # SELECTOR VARIABLE else: # Make sure the proxy driver have a selector dictionary if self.get_type(current_selector) == 'selector_variable': if not BROME_CONFIG['selector_dict']: raise Exception(""" You must provide a selector dictionary if you want to use the selector variable type """) # Make sure that the selector dictionary # contains the selector variable if self._get_selector(current_selector) \ not in BROME_CONFIG['selector_dict']: raise Exception(""" Cannot find the selector variable (%s) in the selector dictionary """ % self._get_selector(current_selector)) effective_selector = BROME_CONFIG['selector_dict'][self._get_selector(current_selector)] # noqa if type(effective_selector) is dict: current_browser_id = False keys = [key for key in effective_selector.keys() if key not in ['default', 'hr']] for key in keys: for target in key.split('|'): try: re.search( target.lower(), self._pdriver.get_id().lower() ).group(0) current_browser_id = key except AttributeError: pass if current_browser_id: effective_selector_list.append( effective_selector.get(current_browser_id) ) else: effective_selector_list.append( effective_selector.get('default') ) else: if self.get_type(effective_selector) in \ [value for key, value in SELECTOR_DICT.items() if key != 'selector_variable']: effective_selector_list.append(effective_selector) else: raise Exception(""" All selector need to start with either: 'nm:' (name), 'xp:' (xpath), 'cn:' (classname), 'id:' (id), 'cs:' (css), 'tn:' (tag name), 'lt:' (link text), 'pl:' (partial link text) """) return effective_selector_list
0.000952
def mkdir(path, owner=None, grant_perms=None, deny_perms=None, inheritance=True, reset=False): ''' Ensure that the directory is available and permissions are set. Args: path (str): The full path to the directory. owner (str): The owner of the directory. If not passed, it will be the account that created the directory, likely SYSTEM grant_perms (dict): A dictionary containing the user/group and the basic permissions to grant, ie: ``{'user': {'perms': 'basic_permission'}}``. You can also set the ``applies_to`` setting here. The default is ``this_folder_subfolders_files``. Specify another ``applies_to`` setting like this: .. code-block:: yaml {'user': {'perms': 'full_control', 'applies_to': 'this_folder'}} To set advanced permissions use a list for the ``perms`` parameter, ie: .. code-block:: yaml {'user': {'perms': ['read_attributes', 'read_ea'], 'applies_to': 'this_folder'}} deny_perms (dict): A dictionary containing the user/group and permissions to deny along with the ``applies_to`` setting. Use the same format used for the ``grant_perms`` parameter. Remember, deny permissions supersede grant permissions. inheritance (bool): If True the object will inherit permissions from the parent, if ``False``, inheritance will be disabled. Inheritance setting will not apply to parent directories if they must be created. reset (bool): If ``True`` the existing DACL will be cleared and replaced with the settings defined in this function. If ``False``, new entries will be appended to the existing DACL. Default is ``False``. .. versionadded:: 2018.3.0 Returns: bool: True if successful Raises: CommandExecutionError: If unsuccessful CLI Example: .. code-block:: bash # To grant the 'Users' group 'read & execute' permissions. salt '*' file.mkdir C:\\Temp\\ Administrators "{'Users': {'perms': 'read_execute'}}" # Locally using salt call salt-call file.mkdir C:\\Temp\\ Administrators "{'Users': {'perms': 'read_execute', 'applies_to': 'this_folder_only'}}" # Specify advanced attributes with a list salt '*' file.mkdir C:\\Temp\\ Administrators "{'jsnuffy': {'perms': ['read_attributes', 'read_ea'], 'applies_to': 'this_folder_only'}}" ''' # Make sure the drive is valid drive = os.path.splitdrive(path)[0] if not os.path.isdir(drive): raise CommandExecutionError('Drive {0} is not mapped'.format(drive)) path = os.path.expanduser(path) path = os.path.expandvars(path) if not os.path.isdir(path): try: # Make the directory os.mkdir(path) # Set owner if owner: salt.utils.win_dacl.set_owner(obj_name=path, principal=owner) # Set permissions set_perms( path=path, grant_perms=grant_perms, deny_perms=deny_perms, inheritance=inheritance, reset=reset) except WindowsError as exc: raise CommandExecutionError(exc) return True
0.002578
def instruction_BLT(self, opcode, ea): """ Causes a branch if either, but not both, of the N (negative) or V (overflow) bits is set. That is, branch if the sign of a valid twos complement result is, or would be, negative. When used after a subtract or compare operation on twos complement binary values, this instruction will branch if the register was less than the memory register. source code forms: BLT dd; LBLT DDDD CC bits "HNZVC": ----- """ if (self.N ^ self.V) == 1: # N xor V # log.info("$%x BLT branch to $%x, because N XOR V == 1 \t| %s" % ( # self.program_counter, ea, self.cfg.mem_info.get_shortest(ea) # )) self.program_counter.set(ea)
0.007692
def ContextTupleToDict(context): """Convert a tuple representing a context into a dict of (key, value) pairs """ d = {} if not context: return d for k, v in zip(ExceptionWithContext.CONTEXT_PARTS, context): if v != '' and v != None: # Don't ignore int(0), a valid row_num d[k] = v return d
0.011976
def filter_step_asarray(G, covY, pred, yt): """Filtering step of Kalman filter: array version. Parameters ---------- G: (dy, dx) numpy array mean of Y_t | X_t is G * X_t covX: (dx, dx) numpy array covariance of Y_t | X_t pred: MeanAndCov object predictive distribution at time t Returns ------- pred: MeanAndCov object filtering distribution at time t logpyt: float log density of Y_t | Y_{0:t-1} Note ---- This performs the filtering step for N distinctive predictive means: filt.mean should be a (N, dx) or (N) array; pred.mean in the output will have the same shape. """ pm = pred.mean[:, np.newaxis] if pred.mean.ndim == 1 else pred.mean new_pred = MeanAndCov(mean=pm, cov=pred.cov) filt, logpyt = filter_step(G, covY, new_pred, yt) if pred.mean.ndim == 1: filt.mean.squeeze() return filt, logpyt
0.006349
def _run_query(client, query, job_config=None): """Runs a query while printing status updates Args: client (google.cloud.bigquery.client.Client): Client to bundle configuration needed for API requests. query (str): SQL query to be executed. Defaults to the standard SQL dialect. Use the ``job_config`` parameter to change dialects. job_config (google.cloud.bigquery.job.QueryJobConfig, optional): Extra configuration options for the job. Returns: google.cloud.bigquery.job.QueryJob: the query job created Example: >>> client = bigquery.Client() >>> _run_query(client, "SELECT 17") Executing query with job ID: bf633912-af2c-4780-b568-5d868058632b Query executing: 1.66s Query complete after 2.07s 'bf633912-af2c-4780-b568-5d868058632b' """ start_time = time.time() query_job = client.query(query, job_config=job_config) print("Executing query with job ID: {}".format(query_job.job_id)) while True: print("\rQuery executing: {:0.2f}s".format(time.time() - start_time), end="") try: query_job.result(timeout=0.5) break except futures.TimeoutError: continue print("\nQuery complete after {:0.2f}s".format(time.time() - start_time)) return query_job
0.001448
def _keplerian_to_keplerian_circular(cls, coord, center): """Conversion from Mean Keplerian to Keplerian near-circular elements """ a, e, i, Ω, ω, ν = coord ex = e * cos(ω) ey = e * sin(ω) u = ω + ν return np.array([a, ex, ey, i, Ω, u], dtype=float)
0.006515
def get_root_uri(uri): """Return root URI - strip query and fragment.""" chunks = urlsplit(uri) return urlunsplit((chunks.scheme, chunks.netloc, chunks.path, '', ''))
0.005618
def resolution(file_, resolution_string): """ A filter to return the URL for the provided resolution of the thumbnail. """ if sorl_settings.THUMBNAIL_DUMMY: dummy_source = sorl_settings.THUMBNAIL_DUMMY_SOURCE source = dummy_source.replace('%(width)s', '(?P<width>[0-9]+)') source = source.replace('%(height)s', '(?P<height>[0-9]+)') source = re.compile(source) try: resolution = decimal.Decimal(resolution_string.strip('x')) info = source.match(file_).groupdict() info = {dimension: int(int(size) * resolution) for (dimension, size) in info.items()} return dummy_source % info except (AttributeError, TypeError, KeyError): # If we can't manipulate the dummy we shouldn't change it at all return file_ filename, extension = os.path.splitext(file_) return '%s@%s%s' % (filename, resolution_string, extension)
0.002103
def power(self, n): """The matrix power of the channel. Args: n (int): compute the matrix power of the superoperator matrix. Returns: Chi: the matrix power of the SuperOp converted to a Chi channel. Raises: QiskitError: if the input and output dimensions of the QuantumChannel are not equal, or the power is not an integer. """ if n > 0: return super().power(n) return Chi(SuperOp(self).power(n))
0.003883
def clear(self) -> None: """ Clears out the tracked metrics, but keeps the patience and should_decrease settings. """ self._best_so_far = None self._epochs_with_no_improvement = 0 self._is_best_so_far = True self._epoch_number = 0 self.best_epoch = None
0.009464
def assign(self, dst, req, src): """Helper function for assigning into dst depending on requirements.""" if req == 'null': return elif req in ('write', 'inplace'): dst[:] = src elif req == 'add': dst[:] += src
0.00722
def run_normalization(self): """ Run the normalization procedures """ for index, media_file in enumerate( tqdm( self.media_files, desc="File", disable=not self.progress, position=0 )): logger.info("Normalizing file {} ({} of {})".format(media_file, index + 1, self.file_count)) media_file.run_normalization() logger.info("Normalized file written to {}".format(media_file.output_file))
0.007067
def geometry_within_radius(geometry, center, radius): """ To valid whether point or linestring or polygon is inside a radius around a center Keyword arguments: geometry -- point/linstring/polygon geojson object center -- point geojson object radius -- radius if(geometry inside radius) return true else false """ if geometry['type'] == 'Point': return point_distance(geometry, center) <= radius elif geometry['type'] == 'LineString' or geometry['type'] == 'Polygon': point = {} # it's enough to check the exterior ring of the Polygon coordinates = geometry['coordinates'][0] if geometry['type'] == 'Polygon' else geometry['coordinates'] for coordinate in coordinates: point['coordinates'] = coordinate if point_distance(point, center) > radius: return False return True
0.003322
def s_data(nrows_fdata, Nmax, Q): """ I am going to assume we will always have even data. This is pretty safe because it means that we have measured both poles of the sphere and have data that has been continued. nrows_fdata: Number of rows in fdata. Nmax: The largest number of n values desired. Q: A value greater than nrows_fdata + Nmax. This can be selected to be factorable into small primes to increase the speed of the fft (probably not that big of a deal today). """ if np.mod(nrows_fdata, 2) == 1: raise Exception("nrows_fdata must be even.") L1 = nrows_fdata s = np.zeros(Q, dtype=np.complex128) MM = int(L1 / 2) for nu in xrange(-MM, MM + Nmax + 1): if np.mod(nu, 2) == 1: s[nu - MM] = -1j / nu return s
0.006431