text
stringlengths
78
104k
score
float64
0
0.18
def schemas(self): """ Get a listing of all non-system schemas (prefixed with 'pg_') that exist in the database. """ sql = """SELECT schema_name FROM information_schema.schemata ORDER BY schema_name""" schemas = self.query(sql).fetchall() return [s[0] for s in schemas if s[0][:3] != "pg_"]
0.00551
def human_audible(self): """ Returns an estimate of whether this AudioSegment is mostly human audible or not. This is done by taking an FFT of the segment and checking if the SPL of the segment falls below the function `f(x) = 40.11453 - 0.01683697x + 1.406211e-6x^2 - 2.371512e-11x^3`, where x is the most characteristic frequency in the FFT. Note that this method is essentially trying to determine if the estimated SPL of the segment falls below the threshold of human hearing, which changes over frequency. If you graph the threshold over different frequencies, you get what is called an audiogram. The equation above is derived as a curve that tries to fit a typical audiogram, specifically as found in Hearing Thresholds by Yost and Killion, 1997 (see https://www.etymotic.com/media/publications/erl-0096-1997.pdf). Sources of error are: 1) The SPL of an AudioSegment is merely an approximation; 2) this curve is not a perfect fit, and besides, it is only an approximation of a typical audiogram; 3) the algorithm uses a characteristic frequency, which is only really going to be a thing for short segments or for segments which are dominated by a single frequency. :returns: `True` if we estimate that this sound is mostly human audible. `False` if we think it is not. """ hist_bins, hist_vals = self.fft() hist_vals_real_normed = np.abs(hist_vals) / len(hist_vals) f_characteristic = hist_bins[np.argmax(hist_vals_real_normed)] threshold_fc = 40.11453 - (0.01683697 * f_characteristic) + (1.406211e-6 * f_characteristic ** 2) - (2.371512e-11 * f_characteristic ** 3) return self.spl >= threshold_fc
0.008904
def labels(self, *labelvalues, **labelkwargs): """Return the child for the given labelset. All metrics can have labels, allowing grouping of related time series. Taking a counter as an example: from prometheus_client import Counter c = Counter('my_requests_total', 'HTTP Failures', ['method', 'endpoint']) c.labels('get', '/').inc() c.labels('post', '/submit').inc() Labels can also be provided as keyword arguments: from prometheus_client import Counter c = Counter('my_requests_total', 'HTTP Failures', ['method', 'endpoint']) c.labels(method='get', endpoint='/').inc() c.labels(method='post', endpoint='/submit').inc() See the best practices on [naming](http://prometheus.io/docs/practices/naming/) and [labels](http://prometheus.io/docs/practices/instrumentation/#use-labels). """ if not self._labelnames: raise ValueError('No label names were set when constructing %s' % self) if self._labelvalues: raise ValueError('%s already has labels set (%s); can not chain calls to .labels()' % ( self, dict(zip(self._labelnames, self._labelvalues)) )) if labelvalues and labelkwargs: raise ValueError("Can't pass both *args and **kwargs") if labelkwargs: if sorted(labelkwargs) != sorted(self._labelnames): raise ValueError('Incorrect label names') labelvalues = tuple(unicode(labelkwargs[l]) for l in self._labelnames) else: if len(labelvalues) != len(self._labelnames): raise ValueError('Incorrect label count') labelvalues = tuple(unicode(l) for l in labelvalues) with self._lock: if labelvalues not in self._metrics: self._metrics[labelvalues] = self.__class__( self._name, documentation=self._documentation, labelnames=self._labelnames, unit=self._unit, labelvalues=labelvalues, **self._kwargs ) return self._metrics[labelvalues]
0.004833
def get_permissions(self, user_id): """Fetches the permissions object from the graph.""" response = self.request( "{0}/{1}/permissions".format(self.version, user_id), {} )["data"] return {x["permission"] for x in response if x["status"] == "granted"}
0.006803
def create_png(cls_name, meth_name, graph, dir_name='graphs2'): """ Creates a PNG from a given :class:`~androguard.decompiler.dad.graph.Graph`. :param str cls_name: name of the class :param str meth_name: name of the method :param androguard.decompiler.dad.graph.Graph graph: :param str dir_name: output directory """ m_name = ''.join(x for x in meth_name if x.isalnum()) name = ''.join((cls_name.split('/')[-1][:-1], '#', m_name)) graph.draw(name, dir_name)
0.002004
def wait_for_edge(self, pin, edge): """Wait for an edge. Pin should be type IN. Edge must be RISING, FALLING or BOTH. """ self.rpi_gpio.wait_for_edge(pin, self._edge_mapping[edge])
0.009302
def read_documentation(self, fid): """Read documentation from an acclaim skeleton file stream.""" lin = self.read_line(fid) while lin[0] != ':': self.documentation.append(lin) lin = self.read_line(fid) return lin
0.007435
def multiple_replace(dict, text): """ Replace in 'text' all occurences of any key in the given dictionary by its corresponding value. Returns the new string.""" # Function by Xavier Defrang, originally found at: # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81330 # Create a regular expression from the dictionary keys regex = re.compile("(%s)" % "|".join(map(re.escape, dict.keys()))) # For each match, look-up corresponding value in dictionary return regex.sub(lambda mo: dict[mo.string[mo.start():mo.end()]], text)
0.00177
def move_prev(lines=1, file=sys.stdout): """ Move the cursor to the beginning of the line, a number of lines up. Default: 1 Esc[<lines>F """ move.prev(lines).write(file=file)
0.004926
def UploadSignedConfigBlob(content, aff4_path, client_context=None, limit=None, token=None): """Upload a signed blob into the datastore. Args: content: File content to upload. aff4_path: aff4 path to upload to. client_context: The configuration contexts to use. limit: The maximum size of the chunk to use. token: A security token. Raises: IOError: On failure to write. """ if limit is None: limit = config.CONFIG["Datastore.maximum_blob_size"] # Get the values of these parameters which apply to the client running on the # target platform. if client_context is None: # Default to the windows client. client_context = ["Platform:Windows", "Client Context"] config.CONFIG.Validate( parameters="PrivateKeys.executable_signing_private_key") signing_key = config.CONFIG.Get( "PrivateKeys.executable_signing_private_key", context=client_context) verification_key = config.CONFIG.Get( "Client.executable_signing_public_key", context=client_context) signed_binary_utils.WriteSignedBinary( rdfvalue.RDFURN(aff4_path), content, signing_key, public_key=verification_key, chunk_size=limit, token=token) logging.info("Uploaded to %s", aff4_path)
0.008053
def install(name=None, refresh=False, pkgs=None, **kwargs): r''' Install the passed package(s) on the system using winrepo Args: name (str): The name of a single package, or a comma-separated list of packages to install. (no spaces after the commas) refresh (bool): Boolean value representing whether or not to refresh the winrepo db. Default ``False``. pkgs (list): A list of packages to install from a software repository. All packages listed under ``pkgs`` will be installed via a single command. You can specify a version by passing the item as a dict: CLI Example: .. code-block:: bash # will install the latest version of foo and bar salt '*' pkg.install pkgs='["foo", "bar"]' # will install the latest version of foo and version 1.2.3 of bar salt '*' pkg.install pkgs='["foo", {"bar": "1.2.3"}]' Kwargs: version (str): The specific version to install. If omitted, the latest version will be installed. Recommend for use when installing a single package. If passed with a list of packages in the ``pkgs`` parameter, the version will be ignored. CLI Example: .. code-block:: bash # Version is ignored salt '*' pkg.install pkgs="['foo', 'bar']" version=1.2.3 If passed with a comma separated list in the ``name`` parameter, the version will apply to all packages in the list. CLI Example: .. code-block:: bash # Version 1.2.3 will apply to packages foo and bar salt '*' pkg.install foo,bar version=1.2.3 extra_install_flags (str): Additional install flags that will be appended to the ``install_flags`` defined in the software definition file. Only applies when single package is passed. saltenv (str): Salt environment. Default 'base' report_reboot_exit_codes (bool): If the installer exits with a recognized exit code indicating that a reboot is required, the module function *win_system.set_reboot_required_witnessed* will be called, preserving the knowledge of this event for the remainder of the current boot session. For the time being, 3010 is the only recognized exit code. The value of this param defaults to True. .. versionadded:: 2016.11.0 Returns: dict: Return a dict containing the new package names and versions. If the package is already installed, an empty dict is returned. If the package is installed by ``pkg.install``: .. code-block:: cfg {'<package>': {'old': '<old-version>', 'new': '<new-version>'}} The following example will refresh the winrepo and install a single package, 7zip. CLI Example: .. code-block:: bash salt '*' pkg.install 7zip refresh=True CLI Example: .. code-block:: bash salt '*' pkg.install 7zip salt '*' pkg.install 7zip,filezilla salt '*' pkg.install pkgs='["7zip","filezilla"]' WinRepo Definition File Examples: The following example demonstrates the use of ``cache_file``. This would be used if you have multiple installers in the same directory that use the same ``install.ini`` file and you don't want to download the additional installers. .. code-block:: bash ntp: 4.2.8: installer: 'salt://win/repo/ntp/ntp-4.2.8-win32-setup.exe' full_name: Meinberg NTP Windows Client locale: en_US reboot: False cache_file: 'salt://win/repo/ntp/install.ini' install_flags: '/USEFILE=C:\salt\var\cache\salt\minion\files\base\win\repo\ntp\install.ini' uninstaller: 'NTP/uninst.exe' The following example demonstrates the use of ``cache_dir``. It assumes a file named ``install.ini`` resides in the same directory as the installer. .. code-block:: bash ntp: 4.2.8: installer: 'salt://win/repo/ntp/ntp-4.2.8-win32-setup.exe' full_name: Meinberg NTP Windows Client locale: en_US reboot: False cache_dir: True install_flags: '/USEFILE=C:\salt\var\cache\salt\minion\files\base\win\repo\ntp\install.ini' uninstaller: 'NTP/uninst.exe' ''' ret = {} saltenv = kwargs.pop('saltenv', 'base') refresh = salt.utils.data.is_true(refresh) # no need to call _refresh_db_conditional as list_pkgs will do it # Make sure name or pkgs is passed if not name and not pkgs: return 'Must pass a single package or a list of packages' # Ignore pkg_type from parse_targets, Windows does not support the # "sources" argument pkg_params = __salt__['pkg_resource.parse_targets'](name, pkgs, **kwargs)[0] if len(pkg_params) > 1: if kwargs.get('extra_install_flags') is not None: log.warning('\'extra_install_flags\' argument will be ignored for ' 'multiple package targets') # Windows expects an Options dictionary containing 'version' for pkg in pkg_params: pkg_params[pkg] = {'version': pkg_params[pkg]} if not pkg_params: log.error('No package definition found') return {} if not pkgs and len(pkg_params) == 1: # Only use the 'version' param if a single item was passed to the 'name' # parameter pkg_params = { name: { 'version': kwargs.get('version'), 'extra_install_flags': kwargs.get('extra_install_flags') } } # Get a list of currently installed software for comparison at the end old = list_pkgs(saltenv=saltenv, refresh=refresh, versions_as_list=True) # Loop through each package changed = [] for pkg_name, options in six.iteritems(pkg_params): # Load package information for the package pkginfo = _get_package_info(pkg_name, saltenv=saltenv) # Make sure pkginfo was found if not pkginfo: log.error('Unable to locate package %s', pkg_name) ret[pkg_name] = 'Unable to locate package {0}'.format(pkg_name) continue version_num = options.get('version') # Using the salt cmdline with version=5.3 might be interpreted # as a float it must be converted to a string in order for # string matching to work. if not isinstance(version_num, six.string_types) and version_num is not None: version_num = six.text_type(version_num) # If the version was not passed, version_num will be None if not version_num: if pkg_name in old: log.debug('pkg.install: \'%s\' version \'%s\' is already installed', pkg_name, old[pkg_name][0]) continue # Get the most recent version number available from winrepo.p # May also return `latest` or an empty string version_num = _get_latest_pkg_version(pkginfo) if version_num == 'latest' and 'latest' not in pkginfo: # Get the most recent version number available from winrepo.p # May also return `latest` or an empty string version_num = _get_latest_pkg_version(pkginfo) # Check if the version is already installed if version_num in old.get(pkg_name, []): # Desired version number already installed log.debug('pkg.install: \'%s\' version \'%s\' is already installed', pkg_name, version_num) continue # If version number not installed, is the version available? elif version_num != 'latest' and version_num not in pkginfo: log.error('Version %s not found for package %s', version_num, pkg_name) ret[pkg_name] = {'not found': version_num} continue # Get the installer settings from winrepo.p installer = pkginfo[version_num].get('installer', '') cache_dir = pkginfo[version_num].get('cache_dir', False) cache_file = pkginfo[version_num].get('cache_file', '') # Is there an installer configured? if not installer: log.error('No installer configured for version %s of package %s', version_num, pkg_name) ret[pkg_name] = {'no installer': version_num} continue # Is the installer in a location that requires caching if installer.startswith(('salt:', 'http:', 'https:', 'ftp:')): # Check for the 'cache_dir' parameter in the .sls file # If true, the entire directory will be cached instead of the # individual file. This is useful for installations that are not # single files if cache_dir and installer.startswith('salt:'): path, _ = os.path.split(installer) __salt__['cp.cache_dir'](path=path, saltenv=saltenv, include_empty=False, include_pat=None, exclude_pat='[email protected]$') # Check to see if the cache_file is cached... if passed if cache_file and cache_file.startswith('salt:'): # Check to see if the file is cached cached_file = __salt__['cp.is_cached'](cache_file, saltenv) if not cached_file: cached_file = __salt__['cp.cache_file'](cache_file, saltenv) # Make sure the cached file is the same as the source if __salt__['cp.hash_file'](cache_file, saltenv) != \ __salt__['cp.hash_file'](cached_file): cached_file = __salt__['cp.cache_file'](cache_file, saltenv) # Check if the cache_file was cached successfully if not cached_file: log.error('Unable to cache %s', cache_file) ret[pkg_name] = { 'failed to cache cache_file': cache_file } continue # Check to see if the installer is cached cached_pkg = __salt__['cp.is_cached'](installer, saltenv) if not cached_pkg: # It's not cached. Cache it, mate. cached_pkg = __salt__['cp.cache_file'](installer, saltenv) # Check if the installer was cached successfully if not cached_pkg: log.error( 'Unable to cache file %s from saltenv: %s', installer, saltenv ) ret[pkg_name] = {'unable to cache': installer} continue # Compare the hash of the cached installer to the source only if the # file is hosted on salt: if installer.startswith('salt:'): if __salt__['cp.hash_file'](installer, saltenv) != \ __salt__['cp.hash_file'](cached_pkg): try: cached_pkg = __salt__['cp.cache_file'](installer, saltenv) except MinionError as exc: return '{0}: {1}'.format(exc, installer) # Check if the installer was cached successfully if not cached_pkg: log.error('Unable to cache %s', installer) ret[pkg_name] = {'unable to cache': installer} continue else: # Run the installer directly (not hosted on salt:, https:, etc.) cached_pkg = installer # Fix non-windows slashes cached_pkg = cached_pkg.replace('/', '\\') cache_path = os.path.dirname(cached_pkg) # Compare the hash sums source_hash = pkginfo[version_num].get('source_hash', False) if source_hash: source_sum = _get_source_sum(source_hash, cached_pkg, saltenv) log.debug('pkg.install: Source %s hash: %s', source_sum['hash_type'], source_sum['hsum']) cached_pkg_sum = salt.utils.hashutils.get_hash(cached_pkg, source_sum['hash_type']) log.debug('pkg.install: Package %s hash: %s', source_sum['hash_type'], cached_pkg_sum) if source_sum['hsum'] != cached_pkg_sum: raise SaltInvocationError( ("Source hash '{0}' does not match package hash" " '{1}'").format(source_sum['hsum'], cached_pkg_sum) ) log.debug('pkg.install: Source hash matches package hash.') # Get install flags install_flags = pkginfo[version_num].get('install_flags', '') if options and options.get('extra_install_flags'): install_flags = '{0} {1}'.format( install_flags, options.get('extra_install_flags', '') ) # Compute msiexec string use_msiexec, msiexec = _get_msiexec(pkginfo[version_num].get('msiexec', False)) # Build cmd and arguments # cmd and arguments must be separated for use with the task scheduler cmd_shell = os.getenv('ComSpec', '{0}\\system32\\cmd.exe'.format(os.getenv('WINDIR'))) if use_msiexec: arguments = '"{0}" /I "{1}"'.format(msiexec, cached_pkg) if pkginfo[version_num].get('allusers', True): arguments = '{0} ALLUSERS=1'.format(arguments) else: arguments = '"{0}"'.format(cached_pkg) if install_flags: arguments = '{0} {1}'.format(arguments, install_flags) # Install the software # Check Use Scheduler Option if pkginfo[version_num].get('use_scheduler', False): # Create Scheduled Task __salt__['task.create_task'](name='update-salt-software', user_name='System', force=True, action_type='Execute', cmd=cmd_shell, arguments='/s /c "{0}"'.format(arguments), start_in=cache_path, trigger_type='Once', start_date='1975-01-01', start_time='01:00', ac_only=False, stop_if_on_batteries=False) # Run Scheduled Task # Special handling for installing salt if re.search(r'salt[\s_.-]*minion', pkg_name, flags=re.IGNORECASE + re.UNICODE) is not None: ret[pkg_name] = {'install status': 'task started'} if not __salt__['task.run'](name='update-salt-software'): log.error('Failed to install %s', pkg_name) log.error('Scheduled Task failed to run') ret[pkg_name] = {'install status': 'failed'} else: # Make sure the task is running, try for 5 secs t_end = time.time() + 5 while time.time() < t_end: time.sleep(0.25) task_running = __salt__['task.status']( 'update-salt-software') == 'Running' if task_running: break if not task_running: log.error('Failed to install %s', pkg_name) log.error('Scheduled Task failed to run') ret[pkg_name] = {'install status': 'failed'} # All other packages run with task scheduler else: if not __salt__['task.run_wait'](name='update-salt-software'): log.error('Failed to install %s', pkg_name) log.error('Scheduled Task failed to run') ret[pkg_name] = {'install status': 'failed'} else: # Launch the command result = __salt__['cmd.run_all']('"{0}" /s /c "{1}"'.format(cmd_shell, arguments), cache_path, output_loglevel='trace', python_shell=False, redirect_stderr=True) if not result['retcode']: ret[pkg_name] = {'install status': 'success'} changed.append(pkg_name) elif result['retcode'] == 3010: # 3010 is ERROR_SUCCESS_REBOOT_REQUIRED report_reboot_exit_codes = kwargs.pop( 'report_reboot_exit_codes', True) if report_reboot_exit_codes: __salt__['system.set_reboot_required_witnessed']() ret[pkg_name] = {'install status': 'success, reboot required'} changed.append(pkg_name) elif result['retcode'] == 1641: # 1641 is ERROR_SUCCESS_REBOOT_INITIATED ret[pkg_name] = {'install status': 'success, reboot initiated'} changed.append(pkg_name) else: log.error('Failed to install %s', pkg_name) log.error('retcode %s', result['retcode']) log.error('installer output: %s', result['stdout']) ret[pkg_name] = {'install status': 'failed'} # Get a new list of installed software new = list_pkgs(saltenv=saltenv, refresh=False) # Take the "old" package list and convert the values to strings in # preparation for the comparison below. __salt__['pkg_resource.stringify'](old) # Check for changes in the registry difference = salt.utils.data.compare_dicts(old, new) # Compare the software list before and after # Add the difference to ret ret.update(difference) return ret
0.001122
def _marshaled_dispatch(self, data, dispatch_method = None, path = None): """Dispatches an XML-RPC method from marshalled (XML) data. XML-RPC methods are dispatched from the marshalled (XML) data using the _dispatch method and the result is returned as marshalled data. For backwards compatibility, a dispatch function can be provided as an argument (see comment in SimpleXMLRPCRequestHandler.do_POST) but overriding the existing method through subclassing is the preferred means of changing method dispatch behavior. """ try: params, method = loads(data, use_builtin_types=self.use_builtin_types) # generate response if dispatch_method is not None: response = dispatch_method(method, params) else: response = self._dispatch(method, params) # wrap response in a singleton tuple response = (response,) response = dumps(response, methodresponse=1, allow_none=self.allow_none, encoding=self.encoding) except Fault as fault: response = dumps(fault, allow_none=self.allow_none, encoding=self.encoding) except: # report exception back to server exc_type, exc_value, exc_tb = sys.exc_info() response = dumps( Fault(1, "%s:%s" % (exc_type, exc_value)), encoding=self.encoding, allow_none=self.allow_none, ) return response.encode(self.encoding)
0.005576
def get_collection(self, id_or_uri, filter=''): """ Retrieves a collection of resources. Use this function when the 'start' and 'count' parameters are not allowed in the GET call. Otherwise, use get_all instead. Optional filtering criteria may be specified. Args: id_or_uri: Can be either the resource ID or the resource URI. filter (list or str): General filter/query string. Returns: Collection of the requested resource. """ if filter: filter = self.__make_query_filter(filter) filter = "?" + filter[1:] uri = "{uri}{filter}".format(uri=self.build_uri(id_or_uri), filter=filter) logger.debug('Get resource collection (uri = %s)' % uri) response = self._connection.get(uri) return self.__get_members(response)
0.00454
def setup(self, app): """ Make sure that other installed plugins don't affect the same keyword argument and check if metadata is available.""" for other in app.plugins: if not isinstance(other, AuthPlugin): continue if other.keyword == self.keyword: raise bottle.PluginError("Found another auth plugin " "with conflicting settings (" "non-unique keyword).")
0.003824
def pet_get(self, **kwargs): """ pet.get wrapper. Returns a record dict for the requested pet. :rtype: dict :returns: The pet's record dict. """ root = self._do_api_call("pet.get", kwargs) return self._parse_pet_record(root.find("pet"))
0.006803
def share_with_link(self, share_type='view', share_scope='anonymous'): """ Creates or returns a link you can share with others :param str share_type: 'view' to allow only view access, 'edit' to allow editions, and 'embed' to allow the DriveItem to be embedded :param str share_scope: 'anonymous': anyone with the link can access. 'organization' Only organization members can access :return: link to share :rtype: DriveItemPermission """ if not self.object_id: return None url = self.build_url( self._endpoints.get('share_link').format(id=self.object_id)) data = { 'type': share_type, 'scope': share_scope } response = self.con.post(url, data=data) if not response: return None data = response.json() # return data.get('link', {}).get('webUrl') return DriveItemPermission(parent=self, **{self._cloud_data_key: data})
0.001944
def ensembl_to_kegg(organism,kegg_db): """ Looks up KEGG mappings of KEGG ids to ensembl ids :param organism: an organisms as listed in organismsKEGG() :param kegg_db: a matching KEGG db as reported in databasesKEGG :returns: a Pandas dataframe of with 'KEGGid' and 'ENSid'. """ print("KEGG API: http://rest.genome.jp/link/"+kegg_db+"/"+organism) sys.stdout.flush() kegg_ens=urlopen("http://rest.genome.jp/link/"+kegg_db+"/"+organism).read() kegg_ens=kegg_ens.split("\n") final=[] for i in kegg_ens: final.append(i.split("\t")) df=pd.DataFrame(final[0:len(final)-1])[[0,1]] ens_id=pd.DataFrame(df[1].str.split(":").tolist())[1] df=pd.concat([df,ens_id],axis=1) df.columns=['KEGGid','ensDB','ENSid'] df=df[['KEGGid','ENSid']] return df
0.019584
def select_action(self, q_values): """Return the selected action # Arguments q_values (np.ndarray): List of the estimations of Q for each action # Returns Selection action """ # We can't use BGE during testing, since we don't have access to the # action_counts at the end of training. assert self.agent.training, "BoltzmannGumbelQPolicy should only be used for training, not testing" assert q_values.ndim == 1, q_values.ndim q_values = q_values.astype('float64') # If we are starting training, we should reset the action_counts. # Otherwise, action_counts should already be initialized, since we # always do so when we begin training. if self.agent.step == 0: self.action_counts = np.ones(q_values.shape) assert self.action_counts is not None, self.agent.step assert self.action_counts.shape == q_values.shape, (self.action_counts.shape, q_values.shape) beta = self.C/np.sqrt(self.action_counts) Z = np.random.gumbel(size=q_values.shape) perturbation = beta * Z perturbed_q_values = q_values + perturbation action = np.argmax(perturbed_q_values) self.action_counts[action] += 1 return action
0.003053
def body(self): """get the contents of the script""" if not hasattr(self, '_body'): self._body = inspect.getsource(self.module) return self._body
0.01105
def imap_unordered(self, jobs, timeout=0.5): """A iterator over a set of jobs. :param jobs: the items to pass through our function :param timeout: timeout between polling queues Results are yielded as soon as they are available in the output queue (up to the discretisation provided by timeout). Since the queues can be specified to have a maximum length, the consumption of both the input jobs iterable and memory use in the output queues are controlled. """ timeout = max(timeout, 0.5) jobs_iter = iter(jobs) out_jobs = 0 job = None while True: if not self.closed and job is None: # Get a job try: job = jobs_iter.next() except StopIteration: job = None self.close() if job is not None: # Put any job try: self.put(job, True, timeout) except Queue.Full: pass # we'll try again next time around else: job = None for result in self.get_finished(): yield result # Input and yielded everything? if self.closed and self._items == 0: break sleep(timeout)
0.002823
def clean_chars(value): "Hack to remove non-ASCII data. Should convert to Unicode: code page 437?" value = value.replace('\xb9', ' ') value = value.replace('\xf8', ' ') value = value.replace('\xab', ' ') value = value.replace('\xa7', ' ') value = value.replace('\xa8', ' ') value = value.replace('\xfb', ' ') value = value.replace('\xfc', ' ') return value
0.002551
def encoder_config(self, pin_a, pin_b, cb=None): """ This command enables the rotary encoder (2 pin + ground) and will enable encoder reporting. NOTE: This command is not currently part of standard arduino firmata, but is provided for legacy support of CodeShield on an Arduino UNO. Encoder data is retrieved by performing a digital_read from pin a (encoder pin 1) :param pin_a: Encoder pin 1. :param pin_b: Encoder pin 2. :param cb: callback function to report encoder changes :return: No return value """ data = [pin_a, pin_b] self._command_handler.digital_response_table[pin_a][self._command_handler.RESPONSE_TABLE_MODE] \ = self.ENCODER self._command_handler.digital_response_table[pin_a][self._command_handler.RESPONSE_TABLE_CALLBACK] = cb self.enable_digital_reporting(pin_a) self._command_handler.digital_response_table[pin_b][self._command_handler.RESPONSE_TABLE_MODE] \ = self.ENCODER self._command_handler.digital_response_table[pin_b][self._command_handler.RESPONSE_TABLE_CALLBACK] = cb self.enable_digital_reporting(pin_b) self._command_handler.send_sysex(self._command_handler.ENCODER_CONFIG, data)
0.00695
def save(self, *args, **kwargs): """ call synchronizer "after_external_layer_saved" method for any additional operation that must be executed after save """ after_save = kwargs.pop('after_save', True) super(LayerExternal, self).save(*args, **kwargs) # call after_external_layer_saved method of synchronizer if after_save: try: synchronizer = self.synchronizer except ImproperlyConfigured: pass else: if synchronizer: synchronizer.after_external_layer_saved(self.config) # reload schema self._reload_schema()
0.002894
def _choice_stmt(self, stmt: Statement, sctx: SchemaContext) -> None: """Handle choice statement.""" self._handle_child(ChoiceNode(), stmt, sctx)
0.012422
def get(self, resource, **params): """ Generic TeleSign REST API GET handler. :param resource: The partial resource URI to perform the request against, as a string. :param params: Body params to perform the GET request with, as a dictionary. :return: The RestClient Response object. """ return self._execute(self.session.get, 'GET', resource, **params)
0.00978
def anchor(self, value): """ Setter for **self.__anchor** attribute. :param value: Attribute value. :type value: int """ if value is not None: assert type(value) is int, "'{0}' attribute: '{1}' type is not 'int'!".format("anchor", value) assert value in range( 0, 9), "'{0}' attribute: '{1}' need to be in '0' to '8' range!".format("anchor", value) self.__anchor = value
0.008547
def cmd(self, *args, **kwargs): """ Execute tmux command and return output. Returns ------- :class:`common.tmux_cmd` Notes ----- .. versionchanged:: 0.8 Renamed from ``.tmux`` to ``.cmd``. """ args = list(args) if self.socket_name: args.insert(0, '-L{0}'.format(self.socket_name)) if self.socket_path: args.insert(0, '-S{0}'.format(self.socket_path)) if self.config_file: args.insert(0, '-f{0}'.format(self.config_file)) if self.colors: if self.colors == 256: args.insert(0, '-2') elif self.colors == 88: args.insert(0, '-8') else: raise ValueError('Server.colors must equal 88 or 256') return tmux_cmd(*args, **kwargs)
0.002283
def _detect_or_validate(self, val): ''' Detect the version used from the row content, or validate against the version if given. ''' if isinstance(val, list) \ or isinstance(val, dict) \ or isinstance(val, SortableDict) \ or isinstance(val, Grid): # Project Haystack 3.0 type. self._assert_version(VER_3_0)
0.004796
def get_what_txt(self): """ Overrides the base behaviour defined in ValidationError in order to add details about the class field. :return: """ return 'field [{field}] for class [{clazz}]'.format(field=self.get_variable_str(), clazz=self.validator.get_validated_class_display_name())
0.013055
def compute_savings_list(self, graph): """Compute Clarke and Wright savings list A saving list is a matrix containing the saving amount S between i and j S is calculated by S = d(0,i) + d(0,j) - d(i,j) (CLARKE; WRIGHT, 1964) Args ---- graph: :networkx:`NetworkX Graph Obj< >` A NetworkX graaph is used. Returns ------- :any:`list` of `Node` List of nodes sorted by its savings """ savings_list = {} for i, j in graph.edges(): # t = (i, j) if repr(i) < repr(j): t = (i, j) else: t = (j, i) if i == graph.depot() or j == graph.depot(): continue savings_list[t] = graph.distance(graph.depot(), i) + graph.distance(graph.depot(), j) - graph.distance(i, j) sorted_savings_list = sorted(list(savings_list.items()), key=operator.itemgetter(1), reverse=True) return [nodes for nodes, saving in sorted_savings_list]
0.006506
def login(self) -> bool: """Return True if log in request succeeds""" user_check = isinstance(self.username, str) and len(self.username) > 0 pass_check = isinstance(self.password, str) and len(self.password) > 0 if user_check and pass_check: response, _ = helpers.call_api( '/cloud/v1/user/login', 'post', json=helpers.req_body(self, 'login') ) if response and helpers.check_response(response, 'login'): self.token = response['result']['token'] self.account_id = response['result']['accountID'] self.enabled = True return True else: logger.error('Error logging in with username and password') return False else: if user_check is False: logger.error('Username invalid') if pass_check is False: logger.error('Password invalid') return False
0.001925
def normalized(self): """Return a normalized version of the histogram where the values sum to one. """ total = self.total() result = Histogram() for value, count in iteritems(self): try: result[value] = count / float(total) except UnorderableElements as e: result = Histogram.from_dict(dict(result), key=hash) result[value] = count / float(total) return result
0.00409
def set_configuration(self, ip_address, network_mask, from_ip_address, to_ip_address): """configures the server in ip_address of type str server IP address in network_mask of type str server network mask in from_ip_address of type str server From IP address for address range in to_ip_address of type str server To IP address for address range raises :class:`OleErrorInvalidarg` invalid configuration supplied """ if not isinstance(ip_address, basestring): raise TypeError("ip_address can only be an instance of type basestring") if not isinstance(network_mask, basestring): raise TypeError("network_mask can only be an instance of type basestring") if not isinstance(from_ip_address, basestring): raise TypeError("from_ip_address can only be an instance of type basestring") if not isinstance(to_ip_address, basestring): raise TypeError("to_ip_address can only be an instance of type basestring") self._call("setConfiguration", in_p=[ip_address, network_mask, from_ip_address, to_ip_address])
0.008117
def set(self, value): """This parameter method attempts to set a specific value for this parameter. The value will be validated first, and if it can not be set. An error message will be set in the error property of this parameter""" if self.validate(value): #print "Parameter " + self.id + " successfully set to " + repr(value) self.hasvalue = True self.value = value return True else: #print "Parameter " + self.id + " COULD NOT BE set to " + repr(value) return False
0.012411
def reflection_matrix(point, normal): """Return matrix to mirror at plane defined by point and normal vector. >>> v0 = np.random.random(4) - 0.5 >>> v0[3] = 1. >>> v1 = np.random.random(3) - 0.5 >>> R = reflection_matrix(v0, v1) >>> np.allclose(2, np.trace(R)) True >>> np.allclose(v0, np.dot(R, v0)) True >>> v2 = v0.copy() >>> v2[:3] += v1 >>> v3 = v0.copy() >>> v2[:3] -= v1 >>> np.allclose(v2, np.dot(R, v3)) True """ normal = unit_vector(normal[:3]) M = np.identity(4) M[:3, :3] -= 2.0 * np.outer(normal, normal) M[:3, 3] = (2.0 * np.dot(point[:3], normal)) * normal return M
0.001502
def _ipython_display_(self): """Display Jupyter Notebook widget""" from IPython.display import display self.build_widget() display(self.widget())
0.011236
def treebeard_js(): """ Template tag to print out the proper <script/> tag to include a custom .js """ path = get_static_url() js_file = urljoin(path, 'treebeard/treebeard-admin.js') jquery_ui = urljoin(path, 'treebeard/jquery-ui-1.8.5.custom.min.js') # Jquery UI is needed to call disableSelection() on drag and drop so # text selections arent marked while dragging a table row # http://www.lokkju.com/blog/archives/143 TEMPLATE = ( '<script type="text/javascript" src="{}"></script>' '<script type="text/javascript" src="{}"></script>' '<script>' '(function($){{jQuery = $.noConflict(true);}})(django.jQuery);' '</script>' '<script type="text/javascript" src="{}"></script>') return format_html( TEMPLATE, "jsi18n", mark_safe(js_file), mark_safe(jquery_ui))
0.002315
def macho_dependencies_list(target_path, header_magic=None): """ Generates a list of libraries the given Mach-O file depends on. In that list a single library is represented by its "install path": for some libraries it would be a full file path, and for others it would be a relative path (sometimes with dyld templates like @executable_path or @rpath in it). Note: I don't know any reason why would some architectures of a fat Mach-O depend on certain libraries while others don't, but *it's technically possible*. So that's why you may want to specify the `header_magic` value for a particular header. Returns an object with two properties: `weak` and `strong` that hold lists of weak and strong dependencies respectively. """ MachODeprendencies = namedtuple("MachODeprendecies", "weak strong") # Convert the magic value into macholib representation if needed if isinstance(header_magic, basestring): header_magic = _MH_MAGIC_from_string(header_magic) macho = MachO(target_path) # Obtain a list of headers for the required magic value (if any) suggestions = filter(lambda t: t.header.magic == header_magic or # just add all headers if user didn't specifiy the magic header_magic == None, macho.headers) header = None if len(suggestions) <= 0 else suggestions[0] # filter() above *always* returns a list, so we have to check if it's empty if header is None: raise Exception("Unable to find a header for the given MAGIC value in that Mach-O file") return None def decodeLoadCommandData(data): # Also ignore trailing zeros return data[:data.find(b"\x00")].decode(sys.getfilesystemencoding()) def strongReferencesFromHeader(h): # List of LC_LOAD_DYLIB commands list = filter(lambda (lc,cmd,data): lc.cmd == LC_LOAD_DYLIB, h.commands) # Their contents (aka data) as a file path return map(lambda (lc,cmd,data): decodeLoadCommandData(data), list) def weakReferencesFromHeader(h): list = filter(lambda (lc,cmd,data): lc.cmd == LC_LOAD_WEAK_DYLIB, h.commands) return map(lambda (lc,cmd,data): decodeLoadCommandData(data), list) strongRefs = strongReferencesFromHeader(header) weakRefs = weakReferencesFromHeader(header) return MachODeprendencies(weak = weakRefs, strong = strongRefs)
0.027344
def _get_session_for_table(self, base_session): """ Only present session for modeling when doses were dropped if it's succesful; otherwise show the original modeling session. """ if base_session.recommended_model is None and base_session.doses_dropped > 0: return base_session.doses_dropped_sessions[0] return base_session
0.010471
def ishex(obj): """ Test if the argument is a string representing a valid hexadecimal digit. :param obj: Object :type obj: any :rtype: boolean """ return isinstance(obj, str) and (len(obj) == 1) and (obj in string.hexdigits)
0.007843
def update(name, connection_uri="", id_file="", o=[], config=None): """ Enhanced version of the edit command featuring multiple edits using regular expressions to match entries """ storm_ = get_storm_instance(config) settings = {} if id_file != "": settings['identityfile'] = id_file for option in o: k, v = option.split("=") settings[k] = v try: storm_.update_entry(name, **settings) print(get_formatted_message( '"{0}" updated successfully.'.format( name ), 'success')) except ValueError as error: print(get_formatted_message(error, 'error'), file=sys.stderr) sys.exit(1)
0.002805
def generate_page_title(self, data_slug): """Generates remainder of page title specific to data_slug (tag).""" tag = Tag.objects.filter(slug=data_slug) return tag[0].word
0.010309
def pull_repo(repo_name): """Pull from origin for repo_name.""" repo = ClonedRepo.objects.get(pk=repo_name) repo.pull()
0.007634
def shape(self) -> Tuple[int, ...]: """Shape of histogram's data. Returns ------- One-element tuple with the number of bins along each axis. """ return tuple(bins.bin_count for bins in self._binnings)
0.008032
def find(self, title): """Return the first worksheet with the given title. Args: title(str): title/name of the worksheet to return Returns: WorkSheet: contained worksheet object Raises: KeyError: if the spreadsheet has no no worksheet with the given ``title`` """ if title not in self._titles: raise KeyError(title) return self._titles[title][0]
0.006667
def get_tree(self, request, tree_id, item_id=None): """Fetches Tree for current or given TreeItem.""" if tree_id is None: tree_id = self.get_object(request, item_id).tree_id self.tree = MODEL_TREE_CLASS._default_manager.get(pk=tree_id) self.tree.verbose_name_plural = self.tree._meta.verbose_name_plural self.tree.urls = _TREE_URLS return self.tree
0.004902
def depth_file_for_rgb_file(rgb_filename, rgb_file_list, depth_file_list): """Returns the *closest* depth file from an RGB filename""" (root, filename) = os.path.split(rgb_filename) rgb_timestamps = np.array(Kinect.timestamps_from_file_list(rgb_file_list)) depth_timestamps = np.array(Kinect.timestamps_from_file_list(depth_file_list)) needle_ts = rgb_timestamps[rgb_file_list.index(rgb_filename)] haystack_idx = np.argmin(np.abs(depth_timestamps - needle_ts)) depth_filename = depth_file_list[haystack_idx] return depth_filename
0.006745
def post(self): """Dump current profiler statistics into a file.""" filename = self.get_argument('filename', 'dump.prof') CProfileWrapper.profiler.dump_stats(filename) self.finish()
0.00939
def _create_PmtInf_node(self): """ Method to create the blank payment information nodes as a dict. """ ED = dict() # ED is element dict ED['PmtInfNode'] = ET.Element("PmtInf") ED['PmtInfIdNode'] = ET.Element("PmtInfId") ED['PmtMtdNode'] = ET.Element("PmtMtd") ED['BtchBookgNode'] = ET.Element("BtchBookg") ED['NbOfTxsNode'] = ET.Element("NbOfTxs") ED['CtrlSumNode'] = ET.Element("CtrlSum") ED['PmtTpInfNode'] = ET.Element("PmtTpInf") ED['SvcLvlNode'] = ET.Element("SvcLvl") ED['Cd_SvcLvl_Node'] = ET.Element("Cd") ED['LclInstrmNode'] = ET.Element("LclInstrm") ED['Cd_LclInstrm_Node'] = ET.Element("Cd") ED['SeqTpNode'] = ET.Element("SeqTp") ED['ReqdColltnDtNode'] = ET.Element("ReqdColltnDt") ED['CdtrNode'] = ET.Element("Cdtr") ED['Nm_Cdtr_Node'] = ET.Element("Nm") ED['CdtrAcctNode'] = ET.Element("CdtrAcct") ED['Id_CdtrAcct_Node'] = ET.Element("Id") ED['IBAN_CdtrAcct_Node'] = ET.Element("IBAN") ED['CdtrAgtNode'] = ET.Element("CdtrAgt") ED['FinInstnId_CdtrAgt_Node'] = ET.Element("FinInstnId") if 'BIC' in self._config: ED['BIC_CdtrAgt_Node'] = ET.Element("BIC") ED['ChrgBrNode'] = ET.Element("ChrgBr") ED['CdtrSchmeIdNode'] = ET.Element("CdtrSchmeId") ED['Nm_CdtrSchmeId_Node'] = ET.Element("Nm") ED['Id_CdtrSchmeId_Node'] = ET.Element("Id") ED['PrvtIdNode'] = ET.Element("PrvtId") ED['OthrNode'] = ET.Element("Othr") ED['Id_Othr_Node'] = ET.Element("Id") ED['SchmeNmNode'] = ET.Element("SchmeNm") ED['PrtryNode'] = ET.Element("Prtry") return ED
0.001147
def permute(self, qubits: Qubits) -> 'Density': """Return a copy of this state with qubit labels permuted""" vec = self.vec.permute(qubits) return Density(vec.tensor, vec.qubits, self._memory)
0.009259
def _register_default_option(nsobj, opt): """ Register default ConfigOption value if it doesn't exist. If does exist, update the description if needed """ item = ConfigItem.get(nsobj.namespace_prefix, opt.name) if not item: logger.info('Adding {} ({}) = {} to {}'.format( opt.name, opt.type, opt.default_value, nsobj.namespace_prefix )) item = ConfigItem() item.namespace_prefix = nsobj.namespace_prefix item.key = opt.name item.value = opt.default_value item.type = opt.type item.description = opt.description nsobj.config_items.append(item) else: if item.description != opt.description: logger.info('Updating description of {} / {}'.format(item.namespace_prefix, item.key)) item.description = opt.description db.session.add(item)
0.003293
def _current_color(self, which=0): """Returns a color for the queue. Parameters ---------- which : int (optional, default: ``0``) Specifies the type of color to return. Returns ------- color : list Returns a RGBA color that is represented as a list with 4 entries where each entry can be any floating point number between 0 and 1. * If ``which`` is 1 then it returns the color of the edge as if it were a self loop. This is specified in ``colors['edge_loop_color']``. * If ``which`` is 2 then it returns the color of the vertex pen color (defined as color/vertex_color in :meth:`.QueueNetworkDiGraph.graph_draw`). This is specified in ``colors['vertex_color']``. * If ``which`` is anything else, then it returns the a shade of the edge that is proportional to the number of agents in the system -- which includes those being servered and those waiting to be served. More agents correspond to darker edge colors. Uses ``colors['vertex_fill_color']`` if the queue sits on a loop, and ``colors['edge_color']`` otherwise. """ if which == 1: color = self.colors['edge_loop_color'] elif which == 2: color = self.colors['vertex_color'] else: div = self.coloring_sensitivity * self.num_servers + 1. tmp = 1. - min(self.num_system / div, 1) if self.edge[0] == self.edge[1]: color = [i * tmp for i in self.colors['vertex_fill_color']] color[3] = 1.0 else: color = [i * tmp for i in self.colors['edge_color']] color[3] = 1 / 2. return color
0.001049
def make_tree(self): """ Generates the merkle tree. """ self.tree['is_ready'] = False leaf_count = len(self.tree['leaves']) if leaf_count > 0: # skip this whole process if there are no leaves added to the tree self._unshift(self.tree['levels'], self.tree['leaves']) while len(self.tree['levels'][0]) > 1: self._unshift(self.tree['levels'], self._calculate_next_level()) self.tree['is_ready'] = True
0.005929
def _get_music_services_data_xml(soco=None): """Fetch the music services data xml from a Sonos device. Args: soco (SoCo): a SoCo instance to query. If none is specified, a random device will be used. Defaults to `None`. Returns: str: a string containing the music services data xml """ device = soco or discovery.any_soco() log.debug("Fetching music services data from %s", device) available_services = device.musicServices.ListAvailableServices() descriptor_list_xml = available_services[ 'AvailableServiceDescriptorList'] log.debug("Services descriptor list: %s", descriptor_list_xml) return descriptor_list_xml
0.002692
def deploy(self, id_networkv6): """Deploy network in equipments and set column 'active = 1' in tables redeipv6 ] :param id_networkv6: ID for NetworkIPv6 :return: Equipments configuration output """ data = dict() uri = 'api/networkv6/%s/equipments/' % id_networkv6 return super(ApiNetworkIPv6, self).post(uri, data=data)
0.007916
def _parse_banners(self): """Parses the global config and returns the value for both motd and login banners. Returns: dict: The configure value for modtd and login banners. If the banner is not set it will return a value of None for that key. The returned dict object is intendd to be merged into the resource dict """ motd_value = login_value = None matches = re.findall('^banner\s+(login|motd)\s?$\n(.*?)$\nEOF$\n', self.config, re.DOTALL | re.M) for match in matches: if match[0].strip() == "motd": motd_value = match[1] elif match[0].strip() == "login": login_value = match[1] return dict(banner_motd=motd_value, banner_login=login_value)
0.00464
def _collect_uncolored_outputs(unspent_outputs, amount): """ Returns a list of uncolored outputs for the specified amount. :param list[SpendableOutput] unspent_outputs: The list of available outputs. :param int amount: The amount to collect. :return: A list of outputs, and the total amount collected. :rtype: (list[SpendableOutput], int) """ total_amount = 0 result = [] for output in unspent_outputs: if output.output.asset_id is None: result.append(output) total_amount += output.output.value if total_amount >= amount: return result, total_amount raise InsufficientFundsError
0.004043
def describe(self, fields=None, **kwargs): """ :param fields: dict where the keys are field names that should be returned, and values should be set to True (by default, all fields are returned) :type fields: dict :returns: Description of the analysis :rtype: dict Returns a hash with key-value pairs containing information about the analysis """ describe_input = {} if fields is not None: describe_input['fields'] = fields self._desc = dxpy.api.analysis_describe(self._dxid, describe_input, **kwargs) return self._desc
0.004601
def private_ip(self): """ Private ip_address """ ip = None for eth in self.networks['v4']: if eth['type'] == 'private': ip = eth['ip_address'] break if ip is None: raise ValueError("No private IP found") return ip
0.006154
def actions(self): """Gets the list of allowed actions :rtype: list[str] """ r = self.session.query(models.Action).all() return [x.type_name for x in r]
0.010363
async def listTasks(self, *args, **kwargs): """ List Tasks List the tasks immediately under a given namespace. This endpoint lists up to 1000 tasks. If more tasks are present, a `continuationToken` will be returned, which can be given in the next request. For the initial request, the payload should be an empty JSON object. **Remark**, this end-point is designed for humans browsing for tasks, not services, as that makes little sense. This method gives output: ``v1/list-tasks-response.json#`` This method is ``stable`` """ return await self._makeApiCall(self.funcinfo["listTasks"], *args, **kwargs)
0.005563
def make_decoder(base_depth, activation, input_size, output_shape): """Creates the decoder function. Args: base_depth: Layer base depth in decoder net. activation: Activation function in hidden layers. input_size: The flattened latent input shape as an int. output_shape: The output image shape as a list. Returns: decoder: A `callable` mapping a `Tensor` of encodings to a `tfd.Distribution` instance over images. """ deconv = functools.partial( tf.keras.layers.Conv2DTranspose, padding="SAME", activation=activation) conv = functools.partial( tf.keras.layers.Conv2D, padding="SAME", activation=activation) decoder_net = tf.keras.Sequential([ tf.keras.layers.Reshape((1, 1, input_size)), deconv(2 * base_depth, 7, padding="VALID"), deconv(2 * base_depth, 5), deconv(2 * base_depth, 5, 2), deconv(base_depth, 5), deconv(base_depth, 5, 2), deconv(base_depth, 5), conv(output_shape[-1], 5, activation=None), tf.keras.layers.Reshape(output_shape), ]) def decoder(codes): """Builds a distribution over images given codes. Args: codes: A `Tensor` representing the inputs to be decoded, of shape `[..., code_size]`. Returns: decoder_distribution: A multivariate `Bernoulli` distribution. """ logits = decoder_net(codes) return tfd.Independent(tfd.Bernoulli(logits=logits), reinterpreted_batch_ndims=len(output_shape), name="decoder_distribution") return decoder
0.004456
def _get_user_groups(self, user): ''' Get user groups. :param user: :return: ''' return [g.gr_name for g in grp.getgrall() if user in g.gr_mem] + [grp.getgrgid(pwd.getpwnam(user).pw_gid).gr_name]
0.011538
def PopupNonBlocking(*args, button_type=POPUP_BUTTONS_OK, button_color=None, background_color=None, text_color=None, auto_close=False, auto_close_duration=None, non_blocking=True, icon=DEFAULT_WINDOW_ICON, line_width=None, font=None, no_titlebar=False, grab_anywhere=False, keep_on_top=False, location=(None, None)): """ Show Popup box and immediately return (does not block) :param args: :param button_type: :param button_color: :param background_color: :param text_color: :param auto_close: :param auto_close_duration: :param non_blocking: :param icon: :param line_width: :param font: :param no_titlebar: :param grab_anywhere: :param keep_on_top: :param location: :return: """ Popup(*args, button_color=button_color, background_color=background_color, text_color=text_color, button_type=button_type, auto_close=auto_close, auto_close_duration=auto_close_duration, non_blocking=non_blocking, icon=icon, line_width=line_width, font=font, no_titlebar=no_titlebar, grab_anywhere=grab_anywhere, keep_on_top=keep_on_top, location=location)
0.005747
def remove(self, processor_identity): """Removes all of the Processors for a particular transaction processor zeromq identity. Args: processor_identity (str): The zeromq identity of the transaction processor. """ with self._condition: processor_types = self._identities.get(processor_identity) if processor_types is None: LOGGER.warning("transaction processor with identity %s tried " "to unregister but was not registered", processor_identity) return for processor_type in processor_types: if processor_type not in self._processors: LOGGER.warning("processor type %s not a known processor " "type but is associated with identity %s", processor_type, processor_identity) continue self._processors[processor_type].remove_processor( processor_identity=processor_identity) if not self._processors[processor_type]: del self._processors[processor_type]
0.001564
def hash(self): "signatures are non deterministic" if self.sender is None: raise MissingSignatureError() class HashSerializable(rlp.Serializable): fields = [(field, sedes) for field, sedes in self.fields if field not in ('v', 'r', 's')] + [('_sender', binary)] _sedes = None return sha3(rlp.encode(self, HashSerializable))
0.004843
def sparse_cross_entropy(input_, labels, name=PROVIDED, loss_weight=None, per_example_weights=None): """Calculates the Cross Entropy of input_ vs labels. Args: input_: A rank 2 `Tensor` or a Pretty Tensor holding the logits. labels: A rank 1 integer `Tensor` with class ordinals name: The optional name. loss_weight: A weight to scale the loss. Used when there are multiple losses. per_example_weights: A weighting for each example. Returns: A loss. Raises: ValueError: if labels is None or the type is not float or double. """ if labels is None: raise ValueError('Labels must be set') if per_example_weights is not None: per_example_weights = _convert_and_assert_per_example_weights_compatible( input_, per_example_weights, dtype=input_.dtype) return apply_regression( input_, tf.contrib.nn.deprecated_flipped_sparse_softmax_cross_entropy_with_logits, labels, [], name='%s_loss' % name, loss_weight=loss_weight, per_example_weights=per_example_weights)
0.005085
def validate(input_schema=None, output_schema=None, input_example=None, output_example=None, validator_cls=None, format_checker=None, on_empty_404=False, use_defaults=False): """Parameterized decorator for schema validation :type validator_cls: IValidator class :type format_checker: jsonschema.FormatChecker or None :type on_empty_404: bool :param on_empty_404: If this is set, and the result from the decorated method is a falsy value, a 404 will be raised. :type use_defaults: bool :param use_defaults: If this is set, will put 'default' keys from schema to self.body (If schema type is object). Example: { 'published': {'type': 'bool', 'default': False} } self.body will contains 'published' key with value False if no one comes from request, also works with nested schemas. """ @container def _validate(rh_method): """Decorator for RequestHandler schema validation This decorator: - Validates request body against input schema of the method - Calls the ``rh_method`` and gets output from it - Validates output against output schema of the method - Calls ``JSendMixin.success`` to write the validated output :type rh_method: function :param rh_method: The RequestHandler method to be decorated :returns: The decorated method :raises ValidationError: If input is invalid as per the schema or malformed :raises TypeError: If the output is invalid as per the schema or malformed :raises APIError: If the output is a falsy value and on_empty_404 is True, an HTTP 404 error is returned """ @wraps(rh_method) @tornado.gen.coroutine def _wrapper(self, *args, **kwargs): # In case the specified input_schema is ``None``, we # don't json.loads the input, but just set it to ``None`` # instead. if input_schema is not None: # Attempt to json.loads the input try: # TODO: Assuming UTF-8 encoding for all requests, # find a nice way of determining this from charset # in headers if provided encoding = "UTF-8" input_ = json.loads(self.request.body.decode(encoding)) except ValueError as e: raise jsonschema.ValidationError( "Input is malformed; could not decode JSON object." ) if use_defaults: input_ = input_schema_clean(input_, input_schema) # Validate the received input jsonschema.validate( input_, input_schema, cls=validator_cls, format_checker=format_checker ) else: input_ = None # A json.loads'd version of self.request["body"] is now available # as self.body setattr(self, "body", input_) # Call the requesthandler method output = rh_method(self, *args, **kwargs) # If the rh_method returned a Future a la `raise Return(value)` # we grab the output. if is_future(output): output = yield output # if output is empty, auto return the error 404. if not output and on_empty_404: raise APIError(404, "Resource not found.") if output_schema is not None: # We wrap output in an object before validating in case # output is a string (and ergo not a validatable JSON object) try: jsonschema.validate( {"result": output}, { "type": "object", "properties": { "result": output_schema }, "required": ["result"] } ) except jsonschema.ValidationError as e: # We essentially re-raise this as a TypeError because # we don't want this error data passed back to the client # because it's a fault on our end. The client should # only see a 500 - Internal Server Error. raise TypeError(str(e)) # If no ValidationError has been raised up until here, we write # back output self.success(output) setattr(_wrapper, "input_schema", input_schema) setattr(_wrapper, "output_schema", output_schema) setattr(_wrapper, "input_example", input_example) setattr(_wrapper, "output_example", output_example) return _wrapper return _validate
0.000196
def read_from_buffer(cls, buf, identifier_str=None): """Load the context from a buffer.""" try: return cls._read_from_buffer(buf, identifier_str) except Exception as e: cls._load_error(e, identifier_str)
0.007968
def get_process_log(self, pid=None, start=0, limit=1000): ''' get_process_log(self, pid=None, start=0, limit=1000 Get process logs :Parameters: * *pid* (`string`) -- Identifier of an existing process * *pid* (`string`) -- start index to retrieve logs from * *pid* (`string`) -- maximum number of entities to retrieve :return: Process log entries ''' pid = self._get_pid(pid) data = self._call_rest_api('get', '/processes/'+pid+'/log?start={}&limit={}'.format(start,limit), error='Failed to fetch process log') return data['list']
0.006359
def cart_clear(self, CartId=None, HMAC=None, **kwargs): """CartClear. Removes all items from cart :param CartId: Id of cart :param HMAC: HMAC of cart. Do not use url encoded :return: An :class:`~.AmazonCart`. """ if not CartId or not HMAC: raise CartException('CartId required for CartClear call') response = self.api.CartClear(CartId=CartId, HMAC=HMAC, **kwargs) root = objectify.fromstring(response) new_cart = AmazonCart(root) self._check_for_cart_error(new_cart) return new_cart
0.003425
def _check_groups(s, groups): """Ensures that all particles are included in exactly 1 group""" ans = [] for g in groups: ans.extend(g) if np.unique(ans).size != np.size(ans): return False elif np.unique(ans).size != s.obj_get_positions().shape[0]: return False else: return (np.arange(s.obj_get_radii().size) == np.sort(ans)).all()
0.002584
def authenticate_heat_admin(self, keystone): """Authenticates the admin user with heat.""" self.log.debug('Authenticating heat admin...') ep = keystone.service_catalog.url_for(service_type='orchestration', interface='publicURL') if keystone.session: return heat_client.Client(endpoint=ep, session=keystone.session) else: return heat_client.Client(endpoint=ep, token=keystone.auth_token)
0.004032
def contains_all(set1, set2, warn): """ Checks if all elements from set2 are in set1. :param set1: a set of values :param set2: a set of values :param warn: the error message that should be thrown when the sets are not containd :return: returns true if all values of set2 are in set1 """ for elem in set2: if elem not in set1: raise ValueError(warn) return True
0.004695
def body_as_str(self, encoding='UTF-8'): """ The body of the event data as a string if the data is of a compatible type. :param encoding: The encoding to use for decoding message data. Default is 'UTF-8' :rtype: str or unicode """ data = self.body try: return "".join(b.decode(encoding) for b in data) except TypeError: return six.text_type(data) except: # pylint: disable=bare-except pass try: return data.decode(encoding) except Exception as e: raise TypeError("Message data is not compatible with string type: {}".format(e))
0.005764
def BHI(self, params): """ BHI label Branch to the instruction at label if the C flag is set and the Z flag is not set """ label = self.get_one_parameter(self.ONE_PARAMETER, params) self.check_arguments(label_exists=(label,)) # BHI label def BHI_func(): if self.is_C_set() and not self.is_Z_set(): self.register['PC'] = self.labels[label] return BHI_func
0.00655
def download(self, uuid, output_format='gzip'): """ Download pre-prepared data by UUID :type uuid: str :param uuid: Data UUID :type output_format: str :param output_format: Output format of the data, either "gzip" or "text" :rtype: str :return: The downloaded content """ if output_format.lower() not in ('gzip', 'text'): raise Exception("output_format must be one of file, text") data = { 'format': output_format, 'uuid': uuid, } return self.get('download', get_params=data, is_json=False)
0.004724
def service_define(self, service, ty): """ Add a service variable of type ``ty`` to this model :param str service: variable name :param type ty: variable type :return: None """ assert service not in self._data assert service not in self._algebs + self._states self._service.append(service) self._service_ty.append(ty)
0.005
def signals(): """Show all signal types.""" signalbus = current_app.extensions['signalbus'] for signal_model in signalbus.get_signal_models(): click.echo(signal_model.__name__)
0.005076
def create_from_array(self, blockname, array, Nfile=None, memorylimit=1024 * 1024 * 256): """ create a block from array like objects The operation is well defined only if array is at most 2d. Parameters ---------- array : array_like, array shall have a scalar dtype. blockname : string name of the block Nfile : int or None number of physical files. if None, 32M items per file is used. memorylimit : int number of bytes to use for the buffering. relevant only if indexing on array returns a copy (e.g. IO or dask array) """ size = len(array) # sane value -- 32 million items per physical file sizeperfile = 32 * 1024 * 1024 if Nfile is None: Nfile = (size + sizeperfile - 1) // sizeperfile dtype = numpy.dtype((array.dtype, array.shape[1:])) itemsize = dtype.itemsize # we will do some chunking # write memorylimit bytes at most (256M bytes) # round to 1024 items itemlimit = memorylimit // dtype.itemsize // 1024 * 1024 with self.create(blockname, dtype, size, Nfile) as b: for i in range(0, len(array), itemlimit): b.write(i, numpy.array(array[i:i+itemlimit])) return self.open(blockname)
0.002791
def to_lst(self): """Cycle all items and puts them in a list :return: list representation """ out = [] node = self.head while node is not None: out.append(node.val) node = node.next_node return out
0.007143
def get(self): """API endpoint to get the related blocks for a transaction. Return: A ``list`` of ``block_id``s that contain the given transaction. The list may be filtered when provided a status query parameter: "valid", "invalid", "undecided". """ parser = reqparse.RequestParser() parser.add_argument('transaction_id', type=str, required=True) args = parser.parse_args(strict=True) tx_id = args['transaction_id'] pool = current_app.config['bigchain_pool'] with pool() as bigchain: blocks = bigchain.get_block_containing_tx(tx_id) return blocks
0.002946
def check_filepath(self, path, filename): """ Check and return the final filepath to settings Args: path (str): Directory path where to search for settings file. filename (str): Filename to use to search for settings file. Raises: boussole.exceptions.SettingsBackendError: If determined filepath does not exists or is a directory. Returns: string: Settings file path, joining given path and filename. """ settings_path = os.path.join(path, filename) if not os.path.exists(settings_path) or \ not os.path.isfile(settings_path): msg = "Unable to find settings file: {}" raise SettingsBackendError(msg.format(settings_path)) return settings_path
0.002442
def proxy_upload(self, path, filename, content_type=None, content_encoding=None, cb=None, num_cb=None): """ This is the main function that uploads. We assume the bucket and key (== path) exists. What we do here is simple. Calculate the headers we will need, (e.g. md5, content-type, etc). Then we ask the self.get_proxy_config method to fill in the authentication information and tell us which remote host we should talk to for the upload. From there, the rest is ripped from boto.key.Key.send_file """ from boto.connection import AWSAuthConnection import mimetypes from hashlib import md5 import base64 BufferSize = 65536 ## set to something very small to make sure ## chunking is working properly fp = open(filename) headers = { 'Content-Type': content_type } if content_type is None: content_type = mimetypes.guess_type(filename)[0] or "text/plain" headers['Content-Type'] = content_type if content_encoding is not None: headers['Content-Encoding'] = content_encoding m = md5() fp.seek(0) s = fp.read(BufferSize) while s: m.update(s) s = fp.read(BufferSize) self.size = fp.tell() fp.seek(0) self.md5 = m.hexdigest() headers['Content-MD5'] = base64.encodestring(m.digest()).rstrip('\n') headers['Content-Length'] = str(self.size) headers['Expect'] = '100-Continue' host, headers = self.get_proxy_config(headers, path) ### how to do this same thing with curl instead... print("curl -i --trace-ascii foo.log -T %s -H %s https://%s%s" % ( filename, " -H ".join("'%s: %s'" % (k,v) for k,v in headers.items()), host, path )) def sender(http_conn, method, path, data, headers): http_conn.putrequest(method, path) for key in headers: http_conn.putheader(key, headers[key]) http_conn.endheaders() fp.seek(0) http_conn.set_debuglevel(0) ### XXX set to e.g. 4 to see what going on if cb: if num_cb > 2: cb_count = self.size / BufferSize / (num_cb-2) elif num_cb < 0: cb_count = -1 else: cb_count = 0 i = total_bytes = 0 cb(total_bytes, self.size) l = fp.read(BufferSize) while len(l) > 0: http_conn.send(l) if cb: total_bytes += len(l) i += 1 if i == cb_count or cb_count == -1: cb(total_bytes, self.size) i = 0 l = fp.read(BufferSize) if cb: cb(total_bytes, self.size) response = http_conn.getresponse() body = response.read() fp.seek(0) if response.status == 500 or response.status == 503 or \ response.getheader('location'): # we'll try again return response elif response.status >= 200 and response.status <= 299: self.etag = response.getheader('etag') if self.etag != '"%s"' % self.md5: raise Exception('ETag from S3 did not match computed MD5') return response else: #raise provider.storage_response_error( # response.status, response.reason, body) raise Exception(response.status, response.reason, body) awsc = AWSAuthConnection(host, aws_access_key_id="key_id", aws_secret_access_key="secret") awsc._mexe('PUT', path, None, headers, sender=sender)
0.005215
def delete_saml_provider(name, region=None, key=None, keyid=None, profile=None): ''' Delete SAML provider CLI Example: .. code-block:: bash salt myminion boto_iam.delete_saml_provider my_saml_provider_name ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: saml_provider_arn = get_saml_provider_arn(name, region=region, key=key, keyid=keyid, profile=profile) if not saml_provider_arn: log.info('SAML provider %s not found.', name) return True conn.delete_saml_provider(saml_provider_arn) log.info('Successfully deleted SAML provider %s.', name) return True except boto.exception.BotoServerError as e: aws = __utils__['boto.get_error'](e) log.debug(aws) log.error('Failed to delete SAML provider %s.', name) return False
0.003382
def list_vdirs(site, app=_DEFAULT_APP): ''' Get all configured IIS virtual directories for the specified site, or for the combination of site and application. Args: site (str): The IIS site name. app (str): The IIS application. Returns: dict: A dictionary of the virtual directory names and properties. CLI Example: .. code-block:: bash salt '*' win_iis.list_vdirs site ''' ret = dict() ps_cmd = ['Get-WebVirtualDirectory', '-Site', r"'{0}'".format(site), '-Application', r"'{0}'".format(app), '|', "Select-Object PhysicalPath, @{ Name = 'name';", r"Expression = { $_.path.Split('/')[-1] } }"] cmd_ret = _srvmgr(cmd=ps_cmd, return_json=True) try: items = salt.utils.json.loads(cmd_ret['stdout'], strict=False) except ValueError: raise CommandExecutionError('Unable to parse return data as Json.') for item in items: ret[item['name']] = {'sourcepath': item['physicalPath']} if not ret: log.warning('No vdirs found in output: %s', cmd_ret) return ret
0.000875
def get_signature(self, content): """Get signature from inspect reply content""" data = content.get('data', {}) text = data.get('text/plain', '') if text: text = ANSI_OR_SPECIAL_PATTERN.sub('', text) self._control.current_prompt_pos = self._prompt_pos line = self._control.get_current_line_to_cursor() name = line[:-1].split('(')[-1] # Take last token after a ( name = name.split('.')[-1] # Then take last token after a . # Clean name from invalid chars try: name = self.clean_invalid_var_chars(name).split('_')[-1] except: pass argspec = getargspecfromtext(text) if argspec: # This covers cases like np.abs, whose docstring is # the same as np.absolute and because of that a proper # signature can't be obtained correctly signature = name + argspec else: signature = getsignaturefromtext(text, name) # Remove docstring for uniformity with editor signature = signature.split('Docstring:')[0] return signature else: return ''
0.002368
def delete_key(self, key_id): """ :: DELETE /:login/keys/:key :param key_id: identifier for an individual key record for the account :type key_id: :py:class:`basestring` Deletes an SSH key from the server identified by `key_id`. """ j, r = self.request('DELETE', '/keys/' + str(key_id)) r.raise_for_status() return j
0.01171
def run_fusion_caller(job, star_bam, univ_options, fusion_options): """ This module will run a fusion caller on DNA bams. This module will be implemented in the future. This module corresponds to node 10 on the tree """ job.fileStore.logToMaster('Running FUSION on %s' % univ_options['patient']) fusion_file = job.fileStore.getLocalTempFile() output_file = job.fileStore.writeGlobalFile(fusion_file) return output_file
0.002193
def get_abs_url(self, rel_url): """ Create an absolute url from a relative one. >>> url_formatter = URLFormatter("example.com", 80) >>> url_formatter.get_abs_url("kml_master.kml") 'http://example.com:80/kml_master.kml' """ rel_url = rel_url.lstrip("/") return "{}://{}:{}/{}".format(self.url_scheme, self.host, self.port, rel_url)
0.007595
def stsci(hdulist): """For STScI GEIS files, need to do extra steps.""" instrument = hdulist[0].header.get('INSTRUME', '') # Update extension header keywords if instrument in ("WFPC2", "FOC"): rootname = hdulist[0].header.get('ROOTNAME', '') filetype = hdulist[0].header.get('FILETYPE', '') for i in range(1, len(hdulist)): # Add name and extver attributes to match PyFITS data structure hdulist[i].name = filetype hdulist[i]._extver = i # Add extension keywords for this chip to extension hdulist[i].header['EXPNAME'] = (rootname, "9 character exposure identifier") hdulist[i].header['EXTVER']= (i, "extension version number") hdulist[i].header['EXTNAME'] = (filetype, "extension name") hdulist[i].header['INHERIT'] = (True, "inherit the primary header") hdulist[i].header['ROOTNAME'] = (rootname, "rootname of the observation set")
0.004069
def qual_name(self) -> QualName: """Return the receiver's qualified name.""" p, s, loc = self._key.partition(":") return (loc, p) if s else (p, self.namespace)
0.010929
def get_ruleset_dirs(): """ Get the directory with ruleset files First directory to check: ./rulesets Second directory to check: $HOME/.local/share/colin/rulesets Third directory to check: /usr/local/share/colin/rulesets :return: str """ ruleset_dirs = [] cwd_rulesets = os.path.join(".", RULESET_DIRECTORY_NAME) if os.path.isdir(cwd_rulesets): logger.debug("Ruleset directory found in current directory ('{}').".format(cwd_rulesets)) ruleset_dirs.append(cwd_rulesets) if "VIRTUAL_ENV" in os.environ: venv_local_share = os.path.join(os.environ["VIRTUAL_ENV"], RULESET_DIRECTORY) if os.path.isdir(venv_local_share): logger.debug("Virtual env ruleset directory found ('{}').".format(venv_local_share)) ruleset_dirs.append(venv_local_share) local_share = os.path.join(os.path.expanduser("~"), ".local", RULESET_DIRECTORY) if os.path.isdir(local_share): logger.debug("Local ruleset directory found ('{}').".format(local_share)) ruleset_dirs.append(local_share) usr_local_share = os.path.join("/usr/local", RULESET_DIRECTORY) if os.path.isdir(usr_local_share): logger.debug("Global ruleset directory found ('{}').".format(usr_local_share)) ruleset_dirs.append(usr_local_share) if not ruleset_dirs: msg = "Ruleset directory cannot be found." logger.warning(msg) raise ColinRulesetException(msg) return ruleset_dirs
0.003127
def magnification(self, x, y, kwargs, diff=diff): """ computes the magnification :return: potential """ f_xx, f_xy, f_yx, f_yy = self.hessian(x, y, kwargs, diff=diff) det_A = (1 - f_xx) * (1 - f_yy) - f_xy*f_yx return 1/det_A
0.007117
def checkAndCreateClasses(self, classes): """ Function checkAndCreateClasses Check and add puppet class @param classes: The classes ids list @return RETURN: boolean """ actual_classes = self['puppetclasses'].keys() for i in classes: if i not in actual_classes: self['puppetclasses'].append(i) self.reload() return set(classes).issubset(set((self['puppetclasses'].keys())))
0.004228
def write(self, face, data, viewport=None, *, alignment=1) -> None: ''' Update the content of the texture. Args: face (int): The face to update. data (bytes): The pixel data. viewport (tuple): The viewport. Keyword Args: alignment (int): The byte alignment of the pixels. ''' if type(data) is Buffer: data = data.mglo self.mglo.write(face, data, viewport, alignment)
0.003891
def scrub_py2_sys_modules(): """ Removes any Python 2 standard library modules from ``sys.modules`` that would interfere with Py3-style imports using import hooks. Examples are modules with the same names (like urllib or email). (Note that currently import hooks are disabled for modules like these with ambiguous names anyway ...) """ if PY3: return {} scrubbed = {} for modulename in REPLACED_MODULES & set(RENAMES.keys()): if not modulename in sys.modules: continue module = sys.modules[modulename] if is_py2_stdlib_module(module): flog.debug('Deleting (Py2) {} from sys.modules'.format(modulename)) scrubbed[modulename] = sys.modules[modulename] del sys.modules[modulename] return scrubbed
0.002439
def slice_sequence(sequence, length, pad_last=False, pad_val=C.PAD_TOKEN, overlap=0): """Slice a flat sequence of tokens into sequences tokens, with each inner sequence's length equal to the specified `length`, taking into account the requested sequence overlap. Parameters ---------- sequence : list of object A flat list of tokens. length : int The length of each of the samples. pad_last : bool, default False Whether to pad the last sequence when its length doesn't align. If the last sequence's length doesn't align and ``pad_last`` is False, it will be dropped. pad_val : object, default The padding value to use when the padding of the last sequence is enabled. In general, the type of ``pad_val`` should be the same as the tokens. overlap : int, default 0 The extra number of items in current sample that should overlap with the next sample. Returns ------- List of list of tokens, with the length of each inner list equal to `length`. """ if length <= overlap: raise ValueError('length needs to be larger than overlap') if pad_last: pad_len = _slice_pad_length(len(sequence), length, overlap) sequence = sequence + [pad_val] * pad_len num_samples = (len(sequence) - length) // (length - overlap) + 1 return [sequence[i * (length - overlap): ((i + 1) * length - i * overlap)] for i in range(num_samples)]
0.004701
def _set_log_format(color, include_caller): """ Set log format :param color: Log message is colored :param include_caller: At the end, put a [caller:line-of-code], e.g. [script:123] :return: string of log format """ level_name = '* %(levelname)1s' time = '%(asctime)s,%(msecs)03d' message = '%(message)s' color_start = '%(color)s' color_end = '%(end_color)s' caller = '[%(module)s:%(lineno)d]' if color: if include_caller: return '{}{}{} {} {} {}'.format(color_start, level_name, color_end, time, message, caller) else: return '{}{}{} {} {}'.format(color_start, level_name, color_end, time, message) else: if include_caller: return '{} {} {} {}'.format(level_name, time, message, caller) else: return '{} {} {}'.format(level_name, time, message)
0.004494
def get_cfws(value): """CFWS = (1*([FWS] comment) [FWS]) / FWS """ cfws = CFWSList() while value and value[0] in CFWS_LEADER: if value[0] in WSP: token, value = get_fws(value) else: token, value = get_comment(value) cfws.append(token) return cfws, value
0.003106