text
stringlengths
78
104k
score
float64
0
0.18
def _retrieve_output_mode(self): """Save the state of the output mode so it can be reset at the end of the session.""" reg_mode = re.compile(r"output\s+:\s+(?P<mode>.*)\s+\n") output = self.send_command("get system console") result_mode_re = reg_mode.search(output) if result_mode_re: result_mode = result_mode_re.group("mode").strip() if result_mode in ["more", "standard"]: self._output_mode = result_mode
0.00616
def numericallySortFilenames(names): """ Sort (ascending) a list of file names by their numerical prefixes. The number sorted on is the numeric prefix of the basename of the given filename. E.g., '../output/1.json.bz2' will sort before '../output/10.json.bz2'. @param: A C{list} of file names, each of whose basename starts with a string of digits. @return: The sorted C{list} of full file names. """ def numericPrefix(name): """ Find any numeric prefix of C{name} and return it as an C{int}. @param: A C{str} file name, whose name possibly starts with digits. @return: The C{int} number at the start of the name, else 0 if there are no leading digits in the name. """ count = 0 for ch in name: if ch in string.digits: count += 1 else: break return 0 if count == 0 else int(name[0:count]) return sorted(names, key=lambda name: numericPrefix(basename(name)))
0.000961
def _ensure_filepath(filename): """ Ensure that the directory exists before trying to write to the file. """ filepath = os.path.dirname(filename) if not os.path.exists(filepath): os.makedirs(filepath)
0.007905
def create_actor_delaunay(pts, color, **kwargs): """ Creates a VTK actor for rendering triangulated plots using Delaunay triangulation. Keyword Arguments: * ``d3d``: flag to choose between Delaunay2D (``False``) and Delaunay3D (``True``). *Default: False* :param pts: points :type pts: vtkFloatArray :param color: actor color :type color: list :return: a VTK actor :rtype: vtkActor """ # Keyword arguments array_name = kwargs.get('name', "") array_index = kwargs.get('index', 0) use_delaunay3d = kwargs.get("d3d", False) # Create points points = vtk.vtkPoints() points.SetData(pts) # Create a PolyData object and add points polydata = vtk.vtkPolyData() polydata.SetPoints(points) # Apply Delaunay triangulation on the poly data object triangulation = vtk.vtkDelaunay3D() if use_delaunay3d else vtk.vtkDelaunay2D() triangulation.SetInputData(polydata) # Map triangulated surface to the graphics primitives mapper = vtk.vtkDataSetMapper() mapper.SetInputConnection(triangulation.GetOutputPort()) mapper.SetArrayName(array_name) mapper.SetArrayId(array_index) # Create an actor and set its properties actor = vtk.vtkActor() actor.SetMapper(mapper) actor.GetProperty().SetColor(*color) # Return the actor return actor
0.002939
def units(self): """ Tuple of the units for length, time and mass. Can be set in any order, and strings are not case-sensitive. See ipython_examples/Units.ipynb for more information. You can check the units' exact values and add Additional units in rebound/rebound/units.py. Units should be set before adding particles to the simulation (will give error otherwise). Currently supported Units ------------------------- Times: Hr : Hours Yr : Julian years Jyr : Julian years Sidereal_yr : Sidereal year Yr2pi : Year divided by 2pi, with year defined as orbital period of planet at 1AU around 1Msun star Kyr : Kiloyears (Julian) Myr : Megayears (Julian) Gyr : Gigayears (Julian) Lengths: M : Meters Cm : Centimeters Km : Kilometers AU : Astronomical Units Masses: Kg : Kilograms Msun : Solar masses Mmercury : Mercury masses Mvenus : Venus masses Mearth : Earth masses Mmars : Mars masses Mjupiter : Jupiter masses Msaturn : Saturn masses Muranus : Neptune masses Mpluto : Pluto masses Examples -------- >>> sim = rebound.Simulation() >>> sim.units = ('yr', 'AU', 'Msun') """ return {'length':hash_to_unit(self.python_unit_l), 'mass':hash_to_unit(self.python_unit_m), 'time':hash_to_unit(self.python_unit_t)}
0.007203
def dispatch_request(self, *args, **kwargs): """Dispatch the request. Its the actual ``view`` flask will use. """ if request.method in ('POST', 'PUT'): return_url, context = self.post(*args, **kwargs) if return_url is not None: return redirect(return_url) elif request.method in ('GET', 'HEAD'): context = self.get(*args, **kwargs) return self.render_response(self.context(context))
0.004149
def _download(url, dest): """ Downloads a URL to a directory :param url: The URL to download :param dest: The path to the directory to save the file in :return: The filesystem path to the saved file """ print('Downloading %s' % url) filename = os.path.basename(url) dest_path = os.path.join(dest, filename) if sys.platform == 'win32': powershell_exe = os.path.join('system32\\WindowsPowerShell\\v1.0\\powershell.exe') code = "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12;" code += "(New-Object Net.WebClient).DownloadFile('%s', '%s');" % (url, dest_path) _execute([powershell_exe, '-Command', code], dest) else: _execute(['curl', '-L', '--silent', '--show-error', '-O', url], dest) return dest_path
0.004619
def new_filename(prefix, extension=None, flag_minimal=True): """returns a file name that does not exist yet, e.g. prefix.0001.extension Args: prefix: extension: examples: "dat", ".dat" (leading dot will be detected, does not repeat dot in name) flag_minimal: - True: will try to be as "clean" as possible - False: will generate filenames in a simple, same-length pattern Example: ``new_filename("molecules-", "dat", True)`` In the example above, the first attempt will be "molecules.dat", then "molecules-0000.dat". If flag_minimal were True, it would skip the first attempt. """ if extension is None: extension = "" if len(extension) > 0 and extension[0] == '.': extension = extension[1:] # extension-sensitive format for filename fmt = '{0!s}-{1:04d}.{2!s}' if extension else '{0!s}-{1:04d}' # Removes tailing dash because it would look funny (but will be re-added in format string) prefix_ = prefix[:-1] if prefix.endswith("-") else prefix i = -1 while True: if i == -1: if flag_minimal: ret = "{}.{}".format(prefix_, extension) if extension else prefix_ else: ret = fmt.format(prefix_, i, extension) if not os.path.exists(ret): break i += 1 if i > 9999: raise RuntimeError("Could not make a new file name for (prefix='{0!s}', extension='{1!s}')".format(prefix, extension)) return ret
0.003839
def finish(self): """ Respond to nsqd that you’ve processed this message successfully (or would like to silently discard it). """ if self._has_responded: raise NSQException('already responded') self._has_responded = True self.on_finish.send(self)
0.006369
def format(self, format_str): """Returns a formatted version of format_str. The only named replacement fields supported by this method and their corresponding API calls are: * {num} group_num * {name} group_name * {symbol} group_symbol * {variant} group_variant * {current_data} group_data * {nums} groups_nums * {names} groups_names * {symbols} groups_symbols * {variants} groups_variants * {all_data} groups_data Passing other replacement fields will result in raising exceptions. :param format_str: a new style format string :rtype: str """ return format_str.format(**{ "num": self.group_num, "name": self.group_name, "symbol": self.group_symbol, "variant": self.group_variant, "current_data": self.group_data, "count": self.groups_count, "names": self.groups_names, "symbols": self.groups_symbols, "variants": self.groups_variants, "all_data": self.groups_data})
0.001668
def replace_launch_config(self, scaling_group, launch_config_type, server_name, image, flavor, disk_config=None, metadata=None, personality=None, networks=None, load_balancers=None, key_name=None): """ Replace an existing launch configuration. All of the attributes must be specified. If you wish to delete any of the optional attributes, pass them in as None. """ return self._manager.replace_launch_config(scaling_group, launch_config_type, server_name, image, flavor, disk_config=disk_config, metadata=metadata, personality=personality, networks=networks, load_balancers=load_balancers, key_name=key_name)
0.011905
def write_otu_file(otu_ids, fp): """ Write out a file containing only the list of OTU IDs from the kraken data. One line per ID. :type otu_ids: list or iterable :param otu_ids: The OTU identifiers that will be written to file. :type fp: str :param fp: The path to the output file. """ fpdir = osp.split(fp)[0] if not fpdir == "" and not osp.isdir(fpdir): raise RuntimeError("Specified path does not exist: {}".format(fpdir)) with open(fp, 'wt') as outf: outf.write('\n'.join(otu_ids))
0.001828
def update_lipd_v1_1(d): """ Update LiPD v1.0 to v1.1 - chronData entry is a list that allows multiple tables - paleoData entry is a list that allows multiple tables - chronData now allows measurement, model, summary, modelTable, ensemble, calibratedAges tables - Added 'lipdVersion' key :param dict d: Metadata v1.0 :return dict d: Metadata v1.1 """ logger_versions.info("enter update_lipd_v1_1") tmp_all = [] try: # ChronData is the only structure update if "chronData" in d: # As of v1.1, ChronData should have an extra level of abstraction. # No longer shares the same structure of paleoData # If no measurement table, then make a measurement table list with the table as the entry for table in d["chronData"]: if "chronMeasurementTable" not in table: tmp_all.append({"chronMeasurementTable": [table]}) # If the table exists, but it is a dictionary, then turn it into a list with one entry elif "chronMeasurementTable" in table: if isinstance(table["chronMeasurementTable"], dict): tmp_all.append({"chronMeasurementTable": [table["chronMeasurementTable"]]}) if tmp_all: d["chronData"] = tmp_all # Log that this is now a v1.1 structured file d["lipdVersion"] = 1.1 except Exception as e: logger_versions.error("update_lipd_v1_1: Exception: {}".format(e)) logger_versions.info("exit update_lipd_v1_1") return d
0.003115
def json_error_response(error, response, status_code=400): """ Formats an error as a response containing a JSON body. """ msg = {"error": error.error, "error_description": error.explanation} response.status_code = status_code response.add_header("Content-Type", "application/json") response.body = json.dumps(msg) return response
0.002755
def init_app(self, app=None, blueprint=None, additional_blueprints=None): """Update flask application with our api :param Application app: a flask application """ if app is not None: self.app = app if blueprint is not None: self.blueprint = blueprint for resource in self.resources: self.route(resource['resource'], resource['view'], *resource['urls'], url_rule_options=resource['url_rule_options']) if self.blueprint is not None: self.app.register_blueprint(self.blueprint) if additional_blueprints is not None: for blueprint in additional_blueprints: self.app.register_blueprint(blueprint) self.app.config.setdefault('PAGE_SIZE', 30)
0.002331
def fetchone(table, cols="*", where=(), group="", order=(), limit=(), **kwargs): """Convenience wrapper for database SELECT and fetch one.""" return select(table, cols, where, group, order, limit, **kwargs).fetchone()
0.008811
def pair(self): """ Return tuple (address, port), where address is a string (empty string if self.address() is None) and port is an integer (zero if self.port() is None). Mainly, this tuple is used with python socket module (like in bind method) :return: 2 value tuple of str and int. """ address = str(self.__address) if self.__address is not None else '' port = int(self.__port) if self.__port is not None else 0 return address, port
0.026549
def dbRestore(self, db_value, context=None): """ Converts a stored database value to Python. :param py_value: <variant> :param context: <orb.Context> :return: <variant> """ if db_value is None: return None elif isinstance(db_value, (str, unicode)): return self.valueFromString(db_value, context=context) else: return super(AbstractDatetimeColumn, self).dbRestore(db_value, context=context)
0.006012
def validate(self): """Iterate over all triples in the graph and validate each one appropriately """ log.info("{}\nValidating against {}" .format("-" * 100, self.schema_def.__class__.__name__)) if not self.schema_def: raise ValueError("No schema definition supplied.") self.checked_attributes = [] # TODO - this should maybe choose the actually used namespace, not just # the first one in the list result = ValidationResult(self.allowed_namespaces[0], self.schema_def.__class__.__name__) for subject, predicate, object_ in self.graph: log.info("\nsubj: {subj}\npred: {pred}\n obj: {obj}" .format(subj=subject, pred=predicate, obj=object_.encode('utf-8'))) result.add_error(self._check_triple((subject, predicate, object_))) return result
0.002064
def remove_flow_controller(cls, name): """ Removes a flow controller. Args: name (string): Name of the controller to remove. Raises: KeyError: If the controller to remove was not registered. """ if name not in cls.registered_controllers: raise KeyError("Flow controller not found: %s" % name) del cls.registered_controllers[name]
0.004751
def det_to_src(self, angle, dparam): """Direction from a detector location to the source. The direction vector is computed as follows:: dir = rotation_matrix(angle).dot(detector.surface_normal(dparam)) Note that for flat detectors, ``surface_normal`` does not depend on the parameter ``dparam``, hence this function is constant in that variable. Parameters ---------- angle : `array-like` or sequence One or several (Euler) angles in radians at which to evaluate. If ``motion_params.ndim >= 2``, a sequence of that length must be provided. dparam : `array-like` or sequence Detector parameter(s) at which to evaluate. If ``det_params.ndim >= 2``, a sequence of that length must be provided. Returns ------- det_to_src : `numpy.ndarray` Vector(s) pointing from a detector point to the source (at infinity). The shape of the returned array is obtained from the (broadcast) shapes of ``angle`` and ``dparam``, and broadcasting is supported within both parameters and between them. The precise definition of the shape is ``broadcast(bcast_angle, bcast_dparam).shape + (ndim,)``, where ``bcast_angle`` is - ``angle`` if `motion_params` is 1D, - ``broadcast(*angle)`` otherwise, and ``bcast_dparam`` defined analogously. Examples -------- The method works with single parameter values, in which case a single vector is returned: >>> apart = odl.uniform_partition(0, np.pi, 10) >>> dpart = odl.uniform_partition(-1, 1, 20) >>> geom = odl.tomo.Parallel2dGeometry(apart, dpart) >>> geom.det_to_src(0, 0) array([ 0., -1.]) >>> geom.det_to_src(0, 1) array([ 0., -1.]) >>> dir = geom.det_to_src(np.pi / 2, 0) >>> np.allclose(dir, [1, 0]) True >>> dir = geom.det_to_src(np.pi / 2, 1) >>> np.allclose(dir, [1, 0]) True Both variables support vectorized calls, i.e., stacks of parameters can be provided. The order of axes in the output (left of the ``ndim`` axis for the vector dimension) corresponds to the order of arguments: >>> dirs = geom.det_to_src(0, [-1, 0, 0.5, 1]) >>> dirs array([[ 0., -1.], [ 0., -1.], [ 0., -1.], [ 0., -1.]]) >>> dirs.shape # (num_dparams, ndim) (4, 2) >>> dirs = geom.det_to_src([0, np.pi / 2, np.pi], 0) >>> np.allclose(dirs, [[0, -1], ... [1, 0], ... [0, 1]]) True >>> dirs.shape # (num_angles, ndim) (3, 2) >>> # Providing 3 pairs of parameters, resulting in 3 vectors >>> dirs = geom.det_to_src([0, np.pi / 2, np.pi], [-1, 0, 1]) >>> dirs[0] # Corresponds to angle = 0, dparam = -1 array([ 0., -1.]) >>> dirs.shape (3, 2) >>> # Pairs of parameters arranged in arrays of same size >>> geom.det_to_src(np.zeros((4, 5)), np.zeros((4, 5))).shape (4, 5, 2) >>> # "Outer product" type evaluation using broadcasting >>> geom.det_to_src(np.zeros((4, 1)), np.zeros((1, 5))).shape (4, 5, 2) """ # Always call the downstream methods with vectorized arguments # to be able to reliably manipulate the final axes of the result if self.motion_params.ndim == 1: squeeze_angle = (np.shape(angle) == ()) angle = np.array(angle, dtype=float, copy=False, ndmin=1) matrix = self.rotation_matrix(angle) # shape (m, ndim, ndim) else: squeeze_angle = (np.broadcast(*angle).shape == ()) angle = tuple(np.array(a, dtype=float, copy=False, ndmin=1) for a in angle) matrix = self.rotation_matrix(angle) # shape (m, ndim, ndim) if self.det_params.ndim == 1: squeeze_dparam = (np.shape(dparam) == ()) dparam = np.array(dparam, dtype=float, copy=False, ndmin=1) else: squeeze_dparam = (np.broadcast(*dparam).shape == ()) dparam = tuple(np.array(p, dtype=float, copy=False, ndmin=1) for p in dparam) normal = self.detector.surface_normal(dparam) # shape (d, ndim) # Perform matrix-vector multiplication along the last axis of both # `matrix` and `normal` while "zipping" all axes that do not # participate in the matrix-vector product. In other words, the axes # are labelled # [0, 1, ..., r-1, r, r+1] for `matrix` and # [0, 1, ..., r-1, r+1] for `normal`, and the output axes are set to # [0, 1, ..., r-1, r]. This automatically supports broadcasting # along the axes 0, ..., r-1. matrix_axes = list(range(matrix.ndim)) normal_axes = list(range(matrix.ndim - 2)) + [matrix_axes[-1]] out_axes = list(range(matrix.ndim - 1)) det_to_src = np.einsum(matrix, matrix_axes, normal, normal_axes, out_axes) if squeeze_angle and squeeze_dparam: det_to_src = det_to_src.squeeze() return det_to_src
0.000367
def fit(self, X): """Fit the PyNNDescent transformer to build KNN graphs with neighbors given by the dataset X. Parameters ---------- X : array-like, shape (n_samples, n_features) Sample data Returns ------- transformer : PyNNDescentTransformer The trained transformer """ self.n_samples_fit = X.shape[0] if self.metric_kwds is None: metric_kwds = {} else: metric_kwds = self.metric_kwds self.pynndescent_ = NNDescent( X, self.metric, metric_kwds, self.n_neighbors, self.n_trees, self.leaf_size, self.pruning_level, self.tree_init, self.random_state, self.algorithm, self.max_candidates, self.n_iters, self.early_termination_value, self.sampling_rate, ) return self
0.001988
def market_open(self, session, mins) -> Session: """ Time intervals for market open Args: session: [allday, day, am, pm, night] mins: mintues after open Returns: Session of start_time and end_time """ if session not in self.exch: return SessNA start_time = self.exch[session][0] return Session(start_time, shift_time(start_time, int(mins)))
0.006787
def put(self, local_path, destination_s3_path, **kwargs): """ Put an object stored locally to an S3 path. :param local_path: Path to source local file :param destination_s3_path: URL for target S3 location :param kwargs: Keyword arguments are passed to the boto function `put_object` """ self._check_deprecated_argument(**kwargs) # put the file self.put_multipart(local_path, destination_s3_path, **kwargs)
0.006263
def scrub(value, scrub_text=_keep_whitespace, scrub_number=_scrub_number): """ REMOVE/REPLACE VALUES THAT CAN NOT BE JSON-IZED """ return _scrub(value, set(), [], scrub_text=scrub_text, scrub_number=scrub_number)
0.008772
def run(self): """Start the recurring task.""" if self.init_sec: sleep(self.init_sec) self._functime = time() while self._running: start = time() self._func() self._functime += self.interval_sec if self._functime - start > 0: sleep(self._functime - start)
0.00551
def api_endpoints(obj): """Iterator over all API endpoint names and callbacks.""" for name in dir(obj): attr = getattr(obj, name) api_path = getattr(attr, 'api_path', None) if api_path: yield ( '%s%s' % (obj.api_path_prefix, api_path), attr, ) for api_provider in obj.api_providers: for api_path, attr in api_endpoints(api_provider): yield (api_path, attr)
0.002137
def at(self, for_time, counter_offset=0): """ Accepts either a Unix timestamp integer or a Time object. Time objects will be adjusted to UTC automatically @param [Time/Integer] time the time to generate an OTP for @param [Integer] counter_offset an amount of ticks to add to the time counter """ if not isinstance(for_time, datetime.datetime): for_time = datetime.datetime.fromtimestamp(int(for_time)) return self.generate_otp(self.timecode(for_time) + counter_offset)
0.005515
def _get_git_tracked_files(rootdir='.'): """Parsing .gitignore rules is hard. However, a way we can get around this problem by just listing all currently tracked git files, and start our search from there. After all, if it isn't in the git repo, we're not concerned about it, because secrets aren't being entered in a shared place. :type rootdir: str :param rootdir: root directory of where you want to list files from :rtype: set|None :returns: filepaths to files which git currently tracks (locally) """ try: with open(os.devnull, 'w') as fnull: git_files = subprocess.check_output( [ 'git', 'ls-files', rootdir, ], stderr=fnull, ) return set(git_files.decode('utf-8').split()) except subprocess.CalledProcessError: return None
0.001067
def grandparent_path(self): """ return grandparent's path string """ return os.path.basename(os.path.join(self.path, '../..'))
0.014085
def __insert_represented_points(self, cluster): """! @brief Insert representation points to the k-d tree. @param[in] cluster (cure_cluster): Cluster whose representation points should be inserted. """ for point in cluster.rep: self.__tree.insert(point, cluster)
0.017192
def _get_dirint_coeffs(): """ Here be a large multi-dimensional matrix of dirint coefficients. Returns: Array with shape ``(6, 6, 7, 5)``. Ordering is ``[kt_prime_bin, zenith_bin, delta_kt_prime_bin, w_bin]`` """ coeffs = [[0 for i in range(6)] for j in range(6)] coeffs[0][0] = [ [0.385230, 0.385230, 0.385230, 0.462880, 0.317440], [0.338390, 0.338390, 0.221270, 0.316730, 0.503650], [0.235680, 0.235680, 0.241280, 0.157830, 0.269440], [0.830130, 0.830130, 0.171970, 0.841070, 0.457370], [0.548010, 0.548010, 0.478000, 0.966880, 1.036370], [0.548010, 0.548010, 1.000000, 3.012370, 1.976540], [0.582690, 0.582690, 0.229720, 0.892710, 0.569950]] coeffs[0][1] = [ [0.131280, 0.131280, 0.385460, 0.511070, 0.127940], [0.223710, 0.223710, 0.193560, 0.304560, 0.193940], [0.229970, 0.229970, 0.275020, 0.312730, 0.244610], [0.090100, 0.184580, 0.260500, 0.687480, 0.579440], [0.131530, 0.131530, 0.370190, 1.380350, 1.052270], [1.116250, 1.116250, 0.928030, 3.525490, 2.316920], [0.090100, 0.237000, 0.300040, 0.812470, 0.664970]] coeffs[0][2] = [ [0.587510, 0.130000, 0.400000, 0.537210, 0.832490], [0.306210, 0.129830, 0.204460, 0.500000, 0.681640], [0.224020, 0.260620, 0.334080, 0.501040, 0.350470], [0.421540, 0.753970, 0.750660, 3.706840, 0.983790], [0.706680, 0.373530, 1.245670, 0.864860, 1.992630], [4.864400, 0.117390, 0.265180, 0.359180, 3.310820], [0.392080, 0.493290, 0.651560, 1.932780, 0.898730]] coeffs[0][3] = [ [0.126970, 0.126970, 0.126970, 0.126970, 0.126970], [0.810820, 0.810820, 0.810820, 0.810820, 0.810820], [3.241680, 2.500000, 2.291440, 2.291440, 2.291440], [4.000000, 3.000000, 2.000000, 0.975430, 1.965570], [12.494170, 12.494170, 8.000000, 5.083520, 8.792390], [21.744240, 21.744240, 21.744240, 21.744240, 21.744240], [3.241680, 12.494170, 1.620760, 1.375250, 2.331620]] coeffs[0][4] = [ [0.126970, 0.126970, 0.126970, 0.126970, 0.126970], [0.810820, 0.810820, 0.810820, 0.810820, 0.810820], [3.241680, 2.500000, 2.291440, 2.291440, 2.291440], [4.000000, 3.000000, 2.000000, 0.975430, 1.965570], [12.494170, 12.494170, 8.000000, 5.083520, 8.792390], [21.744240, 21.744240, 21.744240, 21.744240, 21.744240], [3.241680, 12.494170, 1.620760, 1.375250, 2.331620]] coeffs[0][5] = [ [0.126970, 0.126970, 0.126970, 0.126970, 0.126970], [0.810820, 0.810820, 0.810820, 0.810820, 0.810820], [3.241680, 2.500000, 2.291440, 2.291440, 2.291440], [4.000000, 3.000000, 2.000000, 0.975430, 1.965570], [12.494170, 12.494170, 8.000000, 5.083520, 8.792390], [21.744240, 21.744240, 21.744240, 21.744240, 21.744240], [3.241680, 12.494170, 1.620760, 1.375250, 2.331620]] coeffs[1][0] = [ [0.337440, 0.337440, 0.969110, 1.097190, 1.116080], [0.337440, 0.337440, 0.969110, 1.116030, 0.623900], [0.337440, 0.337440, 1.530590, 1.024420, 0.908480], [0.584040, 0.584040, 0.847250, 0.914940, 1.289300], [0.337440, 0.337440, 0.310240, 1.435020, 1.852830], [0.337440, 0.337440, 1.015010, 1.097190, 2.117230], [0.337440, 0.337440, 0.969110, 1.145730, 1.476400]] coeffs[1][1] = [ [0.300000, 0.300000, 0.700000, 1.100000, 0.796940], [0.219870, 0.219870, 0.526530, 0.809610, 0.649300], [0.386650, 0.386650, 0.119320, 0.576120, 0.685460], [0.746730, 0.399830, 0.470970, 0.986530, 0.785370], [0.575420, 0.936700, 1.649200, 1.495840, 1.335590], [1.319670, 4.002570, 1.276390, 2.644550, 2.518670], [0.665190, 0.678910, 1.012360, 1.199940, 0.986580]] coeffs[1][2] = [ [0.378870, 0.974060, 0.500000, 0.491880, 0.665290], [0.105210, 0.263470, 0.407040, 0.553460, 0.582590], [0.312900, 0.345240, 1.144180, 0.854790, 0.612280], [0.119070, 0.365120, 0.560520, 0.793720, 0.802600], [0.781610, 0.837390, 1.270420, 1.537980, 1.292950], [1.152290, 1.152290, 1.492080, 1.245370, 2.177100], [0.424660, 0.529550, 0.966910, 1.033460, 0.958730]] coeffs[1][3] = [ [0.310590, 0.714410, 0.252450, 0.500000, 0.607600], [0.975190, 0.363420, 0.500000, 0.400000, 0.502800], [0.175580, 0.196250, 0.476360, 1.072470, 0.490510], [0.719280, 0.698620, 0.657770, 1.190840, 0.681110], [0.426240, 1.464840, 0.678550, 1.157730, 0.978430], [2.501120, 1.789130, 1.387090, 2.394180, 2.394180], [0.491640, 0.677610, 0.685610, 1.082400, 0.735410]] coeffs[1][4] = [ [0.597000, 0.500000, 0.300000, 0.310050, 0.413510], [0.314790, 0.336310, 0.400000, 0.400000, 0.442460], [0.166510, 0.460440, 0.552570, 1.000000, 0.461610], [0.401020, 0.559110, 0.403630, 1.016710, 0.671490], [0.400360, 0.750830, 0.842640, 1.802600, 1.023830], [3.315300, 1.510380, 2.443650, 1.638820, 2.133990], [0.530790, 0.745850, 0.693050, 1.458040, 0.804500]] coeffs[1][5] = [ [0.597000, 0.500000, 0.300000, 0.310050, 0.800920], [0.314790, 0.336310, 0.400000, 0.400000, 0.237040], [0.166510, 0.460440, 0.552570, 1.000000, 0.581990], [0.401020, 0.559110, 0.403630, 1.016710, 0.898570], [0.400360, 0.750830, 0.842640, 1.802600, 3.400390], [3.315300, 1.510380, 2.443650, 1.638820, 2.508780], [0.204340, 1.157740, 2.003080, 2.622080, 1.409380]] coeffs[2][0] = [ [1.242210, 1.242210, 1.242210, 1.242210, 1.242210], [0.056980, 0.056980, 0.656990, 0.656990, 0.925160], [0.089090, 0.089090, 1.040430, 1.232480, 1.205300], [1.053850, 1.053850, 1.399690, 1.084640, 1.233340], [1.151540, 1.151540, 1.118290, 1.531640, 1.411840], [1.494980, 1.494980, 1.700000, 1.800810, 1.671600], [1.018450, 1.018450, 1.153600, 1.321890, 1.294670]] coeffs[2][1] = [ [0.700000, 0.700000, 1.023460, 0.700000, 0.945830], [0.886300, 0.886300, 1.333620, 0.800000, 1.066620], [0.902180, 0.902180, 0.954330, 1.126690, 1.097310], [1.095300, 1.075060, 1.176490, 1.139470, 1.096110], [1.201660, 1.201660, 1.438200, 1.256280, 1.198060], [1.525850, 1.525850, 1.869160, 1.985410, 1.911590], [1.288220, 1.082810, 1.286370, 1.166170, 1.119330]] coeffs[2][2] = [ [0.600000, 1.029910, 0.859890, 0.550000, 0.813600], [0.604450, 1.029910, 0.859890, 0.656700, 0.928840], [0.455850, 0.750580, 0.804930, 0.823000, 0.911000], [0.526580, 0.932310, 0.908620, 0.983520, 0.988090], [1.036110, 1.100690, 0.848380, 1.035270, 1.042380], [1.048440, 1.652720, 0.900000, 2.350410, 1.082950], [0.817410, 0.976160, 0.861300, 0.974780, 1.004580]] coeffs[2][3] = [ [0.782110, 0.564280, 0.600000, 0.600000, 0.665740], [0.894480, 0.680730, 0.541990, 0.800000, 0.669140], [0.487460, 0.818950, 0.841830, 0.872540, 0.709040], [0.709310, 0.872780, 0.908480, 0.953290, 0.844350], [0.863920, 0.947770, 0.876220, 1.078750, 0.936910], [1.280350, 0.866720, 0.769790, 1.078750, 0.975130], [0.725420, 0.869970, 0.868810, 0.951190, 0.829220]] coeffs[2][4] = [ [0.791750, 0.654040, 0.483170, 0.409000, 0.597180], [0.566140, 0.948990, 0.971820, 0.653570, 0.718550], [0.648710, 0.637730, 0.870510, 0.860600, 0.694300], [0.637630, 0.767610, 0.925670, 0.990310, 0.847670], [0.736380, 0.946060, 1.117590, 1.029340, 0.947020], [1.180970, 0.850000, 1.050000, 0.950000, 0.888580], [0.700560, 0.801440, 0.961970, 0.906140, 0.823880]] coeffs[2][5] = [ [0.500000, 0.500000, 0.586770, 0.470550, 0.629790], [0.500000, 0.500000, 1.056220, 1.260140, 0.658140], [0.500000, 0.500000, 0.631830, 0.842620, 0.582780], [0.554710, 0.734730, 0.985820, 0.915640, 0.898260], [0.712510, 1.205990, 0.909510, 1.078260, 0.885610], [1.899260, 1.559710, 1.000000, 1.150000, 1.120390], [0.653880, 0.793120, 0.903320, 0.944070, 0.796130]] coeffs[3][0] = [ [1.000000, 1.000000, 1.050000, 1.170380, 1.178090], [0.960580, 0.960580, 1.059530, 1.179030, 1.131690], [0.871470, 0.871470, 0.995860, 1.141910, 1.114600], [1.201590, 1.201590, 0.993610, 1.109380, 1.126320], [1.065010, 1.065010, 0.828660, 0.939970, 1.017930], [1.065010, 1.065010, 0.623690, 1.119620, 1.132260], [1.071570, 1.071570, 0.958070, 1.114130, 1.127110]] coeffs[3][1] = [ [0.950000, 0.973390, 0.852520, 1.092200, 1.096590], [0.804120, 0.913870, 0.980990, 1.094580, 1.042420], [0.737540, 0.935970, 0.999940, 1.056490, 1.050060], [1.032980, 1.034540, 0.968460, 1.032080, 1.015780], [0.900000, 0.977210, 0.945960, 1.008840, 0.969960], [0.600000, 0.750000, 0.750000, 0.844710, 0.899100], [0.926800, 0.965030, 0.968520, 1.044910, 1.032310]] coeffs[3][2] = [ [0.850000, 1.029710, 0.961100, 1.055670, 1.009700], [0.818530, 0.960010, 0.996450, 1.081970, 1.036470], [0.765380, 0.953500, 0.948260, 1.052110, 1.000140], [0.775610, 0.909610, 0.927800, 0.987800, 0.952100], [1.000990, 0.881880, 0.875950, 0.949100, 0.893690], [0.902370, 0.875960, 0.807990, 0.942410, 0.917920], [0.856580, 0.928270, 0.946820, 1.032260, 0.972990]] coeffs[3][3] = [ [0.750000, 0.857930, 0.983800, 1.056540, 0.980240], [0.750000, 0.987010, 1.013730, 1.133780, 1.038250], [0.800000, 0.947380, 1.012380, 1.091270, 0.999840], [0.800000, 0.914550, 0.908570, 0.999190, 0.915230], [0.778540, 0.800590, 0.799070, 0.902180, 0.851560], [0.680190, 0.317410, 0.507680, 0.388910, 0.646710], [0.794920, 0.912780, 0.960830, 1.057110, 0.947950]] coeffs[3][4] = [ [0.750000, 0.833890, 0.867530, 1.059890, 0.932840], [0.979700, 0.971470, 0.995510, 1.068490, 1.030150], [0.858850, 0.987920, 1.043220, 1.108700, 1.044900], [0.802400, 0.955110, 0.911660, 1.045070, 0.944470], [0.884890, 0.766210, 0.885390, 0.859070, 0.818190], [0.615680, 0.700000, 0.850000, 0.624620, 0.669300], [0.835570, 0.946150, 0.977090, 1.049350, 0.979970]] coeffs[3][5] = [ [0.689220, 0.809600, 0.900000, 0.789500, 0.853990], [0.854660, 0.852840, 0.938200, 0.923110, 0.955010], [0.938600, 0.932980, 1.010390, 1.043950, 1.041640], [0.843620, 0.981300, 0.951590, 0.946100, 0.966330], [0.694740, 0.814690, 0.572650, 0.400000, 0.726830], [0.211370, 0.671780, 0.416340, 0.297290, 0.498050], [0.843540, 0.882330, 0.911760, 0.898420, 0.960210]] coeffs[4][0] = [ [1.054880, 1.075210, 1.068460, 1.153370, 1.069220], [1.000000, 1.062220, 1.013470, 1.088170, 1.046200], [0.885090, 0.993530, 0.942590, 1.054990, 1.012740], [0.920000, 0.950000, 0.978720, 1.020280, 0.984440], [0.850000, 0.908500, 0.839940, 0.985570, 0.962180], [0.800000, 0.800000, 0.810080, 0.950000, 0.961550], [1.038590, 1.063200, 1.034440, 1.112780, 1.037800]] coeffs[4][1] = [ [1.017610, 1.028360, 1.058960, 1.133180, 1.045620], [0.920000, 0.998970, 1.033590, 1.089030, 1.022060], [0.912370, 0.949930, 0.979770, 1.020420, 0.981770], [0.847160, 0.935300, 0.930540, 0.955050, 0.946560], [0.880260, 0.867110, 0.874130, 0.972650, 0.883420], [0.627150, 0.627150, 0.700000, 0.774070, 0.845130], [0.973700, 1.006240, 1.026190, 1.071960, 1.017240]] coeffs[4][2] = [ [1.028710, 1.017570, 1.025900, 1.081790, 1.024240], [0.924980, 0.985500, 1.014100, 1.092210, 0.999610], [0.828570, 0.934920, 0.994950, 1.024590, 0.949710], [0.900810, 0.901330, 0.928830, 0.979570, 0.913100], [0.761030, 0.845150, 0.805360, 0.936790, 0.853460], [0.626400, 0.546750, 0.730500, 0.850000, 0.689050], [0.957630, 0.985480, 0.991790, 1.050220, 0.987900]] coeffs[4][3] = [ [0.992730, 0.993880, 1.017150, 1.059120, 1.017450], [0.975610, 0.987160, 1.026820, 1.075440, 1.007250], [0.871090, 0.933190, 0.974690, 0.979840, 0.952730], [0.828750, 0.868090, 0.834920, 0.905510, 0.871530], [0.781540, 0.782470, 0.767910, 0.764140, 0.795890], [0.743460, 0.693390, 0.514870, 0.630150, 0.715660], [0.934760, 0.957870, 0.959640, 0.972510, 0.981640]] coeffs[4][4] = [ [0.965840, 0.941240, 0.987100, 1.022540, 1.011160], [0.988630, 0.994770, 0.976590, 0.950000, 1.034840], [0.958200, 1.018080, 0.974480, 0.920000, 0.989870], [0.811720, 0.869090, 0.812020, 0.850000, 0.821050], [0.682030, 0.679480, 0.632450, 0.746580, 0.738550], [0.668290, 0.445860, 0.500000, 0.678920, 0.696510], [0.926940, 0.953350, 0.959050, 0.876210, 0.991490]] coeffs[4][5] = [ [0.948940, 0.997760, 0.850000, 0.826520, 0.998470], [1.017860, 0.970000, 0.850000, 0.700000, 0.988560], [1.000000, 0.950000, 0.850000, 0.606240, 0.947260], [1.000000, 0.746140, 0.751740, 0.598390, 0.725230], [0.922210, 0.500000, 0.376800, 0.517110, 0.548630], [0.500000, 0.450000, 0.429970, 0.404490, 0.539940], [0.960430, 0.881630, 0.775640, 0.596350, 0.937680]] coeffs[5][0] = [ [1.030000, 1.040000, 1.000000, 1.000000, 1.049510], [1.050000, 0.990000, 0.990000, 0.950000, 0.996530], [1.050000, 0.990000, 0.990000, 0.820000, 0.971940], [1.050000, 0.790000, 0.880000, 0.820000, 0.951840], [1.000000, 0.530000, 0.440000, 0.710000, 0.928730], [0.540000, 0.470000, 0.500000, 0.550000, 0.773950], [1.038270, 0.920180, 0.910930, 0.821140, 1.034560]] coeffs[5][1] = [ [1.041020, 0.997520, 0.961600, 1.000000, 1.035780], [0.948030, 0.980000, 0.900000, 0.950360, 0.977460], [0.950000, 0.977250, 0.869270, 0.800000, 0.951680], [0.951870, 0.850000, 0.748770, 0.700000, 0.883850], [0.900000, 0.823190, 0.727450, 0.600000, 0.839870], [0.850000, 0.805020, 0.692310, 0.500000, 0.788410], [1.010090, 0.895270, 0.773030, 0.816280, 1.011680]] coeffs[5][2] = [ [1.022450, 1.004600, 0.983650, 1.000000, 1.032940], [0.943960, 0.999240, 0.983920, 0.905990, 0.978150], [0.936240, 0.946480, 0.850000, 0.850000, 0.930320], [0.816420, 0.885000, 0.644950, 0.817650, 0.865310], [0.742960, 0.765690, 0.561520, 0.700000, 0.827140], [0.643870, 0.596710, 0.474460, 0.600000, 0.651200], [0.971740, 0.940560, 0.714880, 0.864380, 1.001650]] coeffs[5][3] = [ [0.995260, 0.977010, 1.000000, 1.000000, 1.035250], [0.939810, 0.975250, 0.939980, 0.950000, 0.982550], [0.876870, 0.879440, 0.850000, 0.900000, 0.917810], [0.873480, 0.873450, 0.751470, 0.850000, 0.863040], [0.761470, 0.702360, 0.638770, 0.750000, 0.783120], [0.734080, 0.650000, 0.600000, 0.650000, 0.715660], [0.942160, 0.919100, 0.770340, 0.731170, 0.995180]] coeffs[5][4] = [ [0.952560, 0.916780, 0.920000, 0.900000, 1.005880], [0.928620, 0.994420, 0.900000, 0.900000, 0.983720], [0.913070, 0.850000, 0.850000, 0.800000, 0.924280], [0.868090, 0.807170, 0.823550, 0.600000, 0.844520], [0.769570, 0.719870, 0.650000, 0.550000, 0.733500], [0.580250, 0.650000, 0.600000, 0.500000, 0.628850], [0.904770, 0.852650, 0.708370, 0.493730, 0.949030]] coeffs[5][5] = [ [0.911970, 0.800000, 0.800000, 0.800000, 0.956320], [0.912620, 0.682610, 0.750000, 0.700000, 0.950110], [0.653450, 0.659330, 0.700000, 0.600000, 0.856110], [0.648440, 0.600000, 0.641120, 0.500000, 0.695780], [0.570000, 0.550000, 0.598800, 0.400000, 0.560150], [0.475230, 0.500000, 0.518640, 0.339970, 0.520230], [0.743440, 0.592190, 0.603060, 0.316930, 0.794390]] return coeffs
0.000062
def _get(url, profile): '''Get a specific dashboard.''' request_url = "{0}/api/dashboards/{1}".format(profile.get('grafana_url'), url) response = requests.get( request_url, headers={ "Accept": "application/json", "Authorization": "Bearer {0}".format(profile.get('grafana_token')) }, timeout=profile.get('grafana_timeout', 3), ) data = response.json() if data.get('message') == 'Not found': return None if 'dashboard' not in data: return None return data['dashboard']
0.001613
def remove_unreachable_symbols(grammar, inplace=False): # type: (Grammar, bool) -> Grammar """ Remove unreachable symbols from the gramar :param grammar: Grammar where to symbols remove :param inplace: True if transformation should be performed in place. False by default. :return: Grammar without unreachable symbols. """ # copy if required if inplace is False: grammar = copy(grammar) # check if start symbol is set if grammar.start is None: raise StartSymbolNotSetException() # create process sets reachable = {grammar.start} rules = grammar.rules.copy() # begin iterations while True: # create sets for current iteration active = reachable.copy() # loop the working rules for rule in rules.copy(): # lf left part of rule already in reachable symbols if rule.fromSymbol in reachable: # set symbols on the right as reachable for symbol in rule.right: active.add(symbol) # remove rule from the next iteration rules.remove(rule) # end of rules loop # if current and previous iterations are same, we are done if active == reachable: break # otherwise swap the sets reachable = active # remove the symbols nonterminals_to_remove = grammar.nonterminals.difference(reachable) terminals_to_remove = grammar.terminals.difference(reachable) grammar.nonterminals.remove(*nonterminals_to_remove) grammar.terminals.remove(*terminals_to_remove) # return grammar return grammar
0.001198
def get_encoded_query_params(self): """Return encoded query params to be used in proxied request""" get_data = encode_items(self.request.GET.lists()) return urlencode(get_data)
0.01
def on_evaluate_request(self, py_db, request): ''' :param EvaluateRequest request: ''' # : :type arguments: EvaluateArguments arguments = request.arguments thread_id = py_db.suspended_frames_manager.get_thread_id_for_variable_reference( arguments.frameId) self.api.request_exec_or_evaluate_json( py_db, request, thread_id)
0.007426
def from_PyCMDS(filepath, name=None, parent=None, verbose=True) -> Data: """Create a data object from a single PyCMDS output file. Parameters ---------- filepath : path-like Path to the .data file Can be either a local or remote file (http/ftp). Can be compressed with gz/bz2, decompression based on file name. name : str or None (optional) The name to be applied to the new data object. If None, name is read from file. parent : WrightTools.Collection (optional) Collection to place new data object within. Default is None. verbose : bool (optional) Toggle talkback. Default is True. Returns ------- data A Data instance. """ filestr = os.fspath(filepath) filepath = pathlib.Path(filepath) # header ds = np.DataSource(None) file_ = ds.open(filestr, "rt") headers = tidy_headers.read(file_) file_.seek(0) # name if name is None: # name not given in method arguments data_name = headers["data name"] else: data_name = name if data_name == "": # name not given in PyCMDS data_name = headers["data origin"] # create data object kwargs = { "name": data_name, "kind": "PyCMDS", "source": filestr, "created": headers["file created"], } if parent is not None: data = parent.create_data(**kwargs) else: data = Data(**kwargs) # array arr = np.genfromtxt(file_).T file_.close() # get axes and scanned variables axes = [] for name, identity, units in zip( headers["axis names"], headers["axis identities"], headers["axis units"] ): # points and centers points = np.array(headers[name + " points"]) if name + " centers" in headers.keys(): centers = headers[name + " centers"] else: centers = None # create axis = { "points": points, "units": units, "name": name, "identity": identity, "centers": centers, } axes.append(axis) shape = tuple([a["points"].size for a in axes]) for i, ax in enumerate(axes): sh = [1] * len(shape) sh[i] = len(ax["points"]) data.create_variable( name=ax["name"] + "_points", values=np.array(ax["points"]).reshape(sh) ) if ax["centers"] is not None: sh = list(shape) sh[i] = 1 data.create_variable( name=ax["name"] + "_centers", values=np.array(ax["centers"]).reshape(sh) ) # get assorted remaining things # variables and channels try: signed = iter(headers["channel signed"]) except KeyError: signed = itertools.repeat(False) for index, kind, name in zip(range(len(arr)), headers["kind"], headers["name"]): values = np.full(np.prod(shape), np.nan) values[: len(arr[index])] = arr[index] values.shape = shape if name == "time": data.create_variable(name="labtime", values=values) if kind == "hardware": # sadly, recorded tolerances are not reliable # so a bit of hard-coded hacking is needed # if this ends up being too fragile, we might have to use the points arrays # ---Blaise 2018-01-09 units = headers["units"][index] label = headers["label"][index] if ( "w" in name and name.startswith(tuple(data.variable_names)) and name not in headers["axis names"] ): inherited_shape = data[name.split("_")[0]].shape for i, s in enumerate(inherited_shape): if s == 1: values = np.mean(values, axis=i) values = np.expand_dims(values, i) else: tolerance = headers["tolerance"][index] units = headers["units"][index] for i in range(len(shape)): if tolerance is None: break if "d" in name: # This is a hack because delay is particularly # unreliable in tolerance. And 3 fs vs 3 ps is a huge # difference... KFS 2019-2-27 if units == "fs": tolerance = 3. else: tolerance = 0.1 if "zero" in name: tolerance = 1e-10 try: assert i == headers["axis names"].index(name) tolerance = 0 except (ValueError, AssertionError): if ( name in headers["axis names"] and "%s_centers" % name not in data.variable_names ): tolerance = np.inf mean = np.nanmean(values, axis=i) mean = np.expand_dims(mean, i) values, meanexp = wt_kit.share_nans(values, mean) if np.allclose(meanexp, values, atol=tolerance, equal_nan=True): values = mean if name in headers["axis names"]: points = np.array(headers[name + " points"]) pointsshape = [1] * values.ndim for i, ax in enumerate(axes): if ax["name"] == name: pointsshape[i] = len(points) break points.shape = pointsshape points = wt_units.converter(points, headers["axis units"][i], units) for i in range(points.ndim): if points.shape[i] == 1: points = np.repeat(points, values.shape[i], axis=i) if points.size <= values.size: values[np.isnan(values)] = points[np.isnan(values)] data.create_variable(name, values=values, units=units, label=label) if kind == "channel": data.create_channel(name=name, values=values, shape=values.shape, signed=next(signed)) # axes for a in axes: expression = a["identity"] if expression.startswith("D"): expression = expression[1:] expression.replace("=D", "=") a["expression"] = expression data.transform(*[a["expression"] for a in axes]) for a, u in zip(data.axes, headers["axis units"]): if u is not None: a.convert(u) if ( headers["system name"] == "fs" and int(headers["PyCMDS version"].split(".")[0]) == 0 and int(headers["PyCMDS version"].split(".")[1]) < 10 ): # in versions of PyCMDS up to (and including) 0.9.0 # there was an incorrect hard-coded conversion factor between mm and fs # this ONLY applied to Newport MFA stages # we apply this correction knowing that Newport MFAs were only used on the "fs" system # and knowing that the Newport MFAs were always assigned as "d1", "d2" and "d3" # ---Blaise 2019-04-09 for delay in ("d1", "d2", "d3", "d1_points", "d2_points", "d3_points"): if delay not in data.variable_names: continue data[delay][:] *= 6000.671281903963041 / 6671.281903963041 if verbose: print(f"Correction factor applied to {delay}") # return if verbose: print("data created at {0}".format(data.fullpath)) print(" axes: {0}".format(data.axis_names)) print(" shape: {0}".format(data.shape)) return data
0.00141
def get_credential_cache(): '''if the user has specified settings to provide a cache for credentials files, initialize it. The root for the folder is created if it doesn't exist. The path for the specific client is returned, and it's not assumed to be either a folder or a file (this is up to the developer of the client). ''' from sregistry.defaults import ( CREDENTIAL_CACHE, SREGISTRY_CLIENT ) client_credential_cache = None # Check 1: user can disable a credential cache on the client level if CREDENTIAL_CACHE is not None: env = 'SREGISTRY_DISABLE_CREDENTIAL_%s' %SREGISTRY_CLIENT.upper() if os.environ.get(env) is not None: bot.debug('[%s] cache disabled' %SREGISTRY_CLIENT) CREDENTIAL_CACHE = None # Check 2: user can disable a credential cache on the client level if CREDENTIAL_CACHE is not None: if not os.path.exists(CREDENTIAL_CACHE): mkdir_p(CREDENTIAL_CACHE) client_credential_cache = '%s/%s' %(CREDENTIAL_CACHE, SREGISTRY_CLIENT) if client_credential_cache is not None: bot.debug('credentials cache') return client_credential_cache
0.005868
def validate_event_type(sender, event, created): """Verify that the Event's code is a valid one.""" if event.code not in sender.event_codes(): raise ValueError("The Event.code '{}' is not a valid Event " "code.".format(event.code))
0.003676
def set_property_value(self, name, value, dry_run=False): """Set a property value or remove a property. value == None means 'remove property'. Raise HTTP_FORBIDDEN if property is read-only, or not supported. When dry_run is True, this function should raise errors, as in a real run, but MUST NOT change any data. This default implementation - raises HTTP_FORBIDDEN, if trying to modify a locking property - raises HTTP_FORBIDDEN, if trying to modify an immutable {DAV:} property - handles Windows' Win32LastModifiedTime to set the getlastmodified property, if enabled - stores everything else as dead property, if a property manager is present. - raises HTTP_FORBIDDEN, else Removing a non-existing prop is NOT an error. Note: RFC 4918 states that {DAV:}displayname 'SHOULD NOT be protected' A resource provider may override this method, to update supported custom live properties. """ assert value is None or xml_tools.is_etree_element(value) if name in _lockPropertyNames: # Locking properties are always read-only raise DAVError( HTTP_FORBIDDEN, err_condition=PRECONDITION_CODE_ProtectedProperty ) # Live property config = self.environ["wsgidav.config"] # hotfixes = config.get("hotfixes", {}) mutableLiveProps = config.get("mutable_live_props", []) # Accept custom live property updates on resources if configured. if ( name.startswith("{DAV:}") and name in _standardLivePropNames and name in mutableLiveProps ): # Please note that some properties should not be mutable according # to RFC4918. This includes the 'getlastmodified' property, which # it may still make sense to make mutable in order to support time # stamp changes from e.g. utime calls or the touch or rsync -a # commands. if name in ("{DAV:}getlastmodified", "{DAV:}last_modified"): try: return self.set_last_modified(self.path, value.text, dry_run) except Exception: _logger.warning( "Provider does not support set_last_modified on {}.".format( self.path ) ) # Unsupported or not allowed raise DAVError(HTTP_FORBIDDEN) # Handle MS Windows Win32LastModifiedTime, if enabled. # Note that the WebDAV client in Win7 and earler has issues and can't be used # with this so we ignore older clients. Others pre-Win10 should be tested. if name.startswith("{urn:schemas-microsoft-com:}"): agent = self.environ.get("HTTP_USER_AGENT", "None") win32_emu = config.get("hotfixes", {}).get("emulate_win32_lastmod", False) if win32_emu and "MiniRedir/6.1" not in agent: if "Win32LastModifiedTime" in name: return self.set_last_modified(self.path, value.text, dry_run) elif "Win32FileAttributes" in name: return True elif "Win32CreationTime" in name: return True elif "Win32LastAccessTime" in name: return True # Dead property pm = self.provider.prop_manager if pm and not name.startswith("{DAV:}"): refUrl = self.get_ref_url() if value is None: return pm.remove_property(refUrl, name, dry_run, self.environ) else: value = etree.tostring(value) return pm.write_property(refUrl, name, value, dry_run, self.environ) raise DAVError(HTTP_FORBIDDEN)
0.002806
def __clean_and_tokenize(self, doc_list): """Method to clean and tokenize the document list. :param doc_list: Document list to clean and tokenize. :return: Cleaned and tokenized document list. """ # Some repositories fill entire documentation in description. We ignore # such repositories for cleaner tokens. doc_list = filter( lambda x: x is not None and len(x) <= GitSuggest.MAX_DESC_LEN, doc_list, ) cleaned_doc_list = list() # Regular expression to remove out all punctuations, numbers and other # un-necessary text substrings like emojis etc. tokenizer = RegexpTokenizer(r"[a-zA-Z]+") # Get stop words. stopwords = self.__get_words_to_ignore() # Get english words. dict_words = self.__get_words_to_consider() for doc in doc_list: # Lowercase doc. lower = doc.lower() # Tokenize removing numbers and punctuation. tokens = tokenizer.tokenize(lower) # Include meaningful words. tokens = [tok for tok in tokens if tok in dict_words] # Remove stopwords. tokens = [tok for tok in tokens if tok not in stopwords] # Filter Nones if any are introduced. tokens = [tok for tok in tokens if tok is not None] cleaned_doc_list.append(tokens) return cleaned_doc_list
0.001362
def get_attachment_model(): """ Returns the Attachment model that is active in this project. """ try: from .models import AbstractAttachment klass = apps.get_model(config["attachment_model"]) if not issubclass(klass, AbstractAttachment): raise ImproperlyConfigured( "SUMMERNOTE_CONFIG['attachment_model'] refers to model '%s' that is not " "inherited from 'django_summernote.models.AbstractAttachment'" % config["attachment_model"] ) return klass except ValueError: raise ImproperlyConfigured("SUMMERNOTE_CONFIG['attachment_model'] must be of the form 'app_label.model_name'") except LookupError: raise ImproperlyConfigured( "SUMMERNOTE_CONFIG['attachment_model'] refers to model '%s' that has not been installed" % config["attachment_model"] )
0.005599
def save_features(paths: List[str], datas: List[np.ndarray], compressed: bool = False) -> List: """ Save features specified with absolute paths. :param paths: List of files specified with paths. :param datas: List of numpy ndarrays to save into the respective files :param compressed: Use numpy compression :return: A list of file names. """ fnames = [] # type: List[str] for path, data in zip(paths, datas): fnames.append(save_feature(path, data, compressed)) return fnames
0.001845
def _check_peptide_inputs(self, peptides): """ Check peptide sequences to make sure they are valid for this predictor. """ require_iterable_of(peptides, string_types) check_X = not self.allow_X_in_peptides check_lower = not self.allow_lowercase_in_peptides check_min_length = self.min_peptide_length is not None min_length = self.min_peptide_length check_max_length = self.max_peptide_length is not None max_length = self.max_peptide_length for p in peptides: if not p.isalpha(): raise ValueError("Invalid characters in peptide '%s'" % p) elif check_X and "X" in p: raise ValueError("Invalid character 'X' in peptide '%s'" % p) elif check_lower and not p.isupper(): raise ValueError("Invalid lowercase letters in peptide '%s'" % p) elif check_min_length and len(p) < min_length: raise ValueError( "Peptide '%s' too short (%d chars), must be at least %d" % ( p, len(p), min_length)) elif check_max_length and len(p) > max_length: raise ValueError( "Peptide '%s' too long (%d chars), must be at least %d" % ( p, len(p), max_length))
0.002972
def receiveData(self, connection, data): """ Receives some data for the given protocol. """ try: protocol = self._protocols[connection] except KeyError: raise NoSuchConnection() protocol.dataReceived(data) return {}
0.006757
def import_lv_stations(self, session): """ Import lv_stations within the given load_area Parameters ---------- session : sqlalchemy.orm.session.Session Database session Returns ------- lv_stations: :pandas:`pandas.DataFrame<dataframe>` Table of lv_stations """ # get ding0s' standard CRS (SRID) srid = str(int(cfg_ding0.get('geo', 'srid'))) # get list of mv grid districts mv_grid_districts = list(self.get_mvgd_lvla_lvgd_obj_from_id()[0]) lv_stations_sqla = session.query(self.orm['orm_lv_stations'].mvlv_subst_id, self.orm['orm_lv_stations'].la_id, func.ST_AsText(func.ST_Transform( self.orm['orm_lv_stations'].geom, srid)). \ label('geom')).\ filter(self.orm['orm_lv_stations'].subst_id.in_(mv_grid_districts)). \ filter(self.orm['version_condition_mvlvst']) # read data from db lv_grid_stations = pd.read_sql_query(lv_stations_sqla.statement, session.bind, index_col='mvlv_subst_id') return lv_grid_stations
0.004425
def is_metal(self, efermi_tol=1e-4): """ Check if the band structure indicates a metal by looking if the fermi level crosses a band. Returns: True if a metal, False if not """ for spin, values in self.bands.items(): for i in range(self.nb_bands): if np.any(values[i, :] - self.efermi < -efermi_tol) and \ np.any(values[i, :] - self.efermi > efermi_tol): return True return False
0.003854
def batch_watch(parameterized, run=True): """ Context manager to batch watcher events on a parameterized object. The context manager will queue any events triggered by setting a parameter on the supplied parameterized object and dispatch them all at once when the context manager exits. If run=False the queued events are not dispatched and should be processed manually. """ BATCH_WATCH = parameterized.param._BATCH_WATCH parameterized.param._BATCH_WATCH = True try: yield finally: parameterized.param._BATCH_WATCH = BATCH_WATCH if run and not BATCH_WATCH: parameterized.param._batch_call_watchers()
0.001473
def docstring_section_lines(docstring, section_name): """ Return a section of a numpydoc string Paramters --------- docstring : str Docstring section_name : str Name of section to return Returns ------- section : str Section minus the header """ lines = [] inside_section = False underline = '-' * len(section_name) expect_underline = False for line in docstring.splitlines(): _line = line.strip().lower() if expect_underline: expect_underline = False if _line == underline: inside_section = True continue if _line == section_name: expect_underline = True elif _line in DOCSTRING_SECTIONS: # next section break elif inside_section: lines.append(line) return '\n'.join(lines)
0.001099
def set_active_scheme(self,scheme,case_sensitive=0): """Set the currently active scheme. Names are by default compared in a case-insensitive way, but this can be changed by setting the parameter case_sensitive to true.""" scheme_names = self.keys() if case_sensitive: valid_schemes = scheme_names scheme_test = scheme else: valid_schemes = [s.lower() for s in scheme_names] scheme_test = scheme.lower() try: scheme_idx = valid_schemes.index(scheme_test) except ValueError: raise ValueError,'Unrecognized color scheme: ' + scheme + \ '\nValid schemes: '+str(scheme_names).replace("'', ",'') else: active = scheme_names[scheme_idx] self.active_scheme_name = active self.active_colors = self[active].colors # Now allow using '' as an index for the current active scheme self[''] = self[active]
0.006903
def list_leases(self, uuid=None): """ List current subnet leases Args: uuid(str): Filter the leases by uuid Returns: list of :class:~Lease: current leases """ try: lease_files = os.listdir(self.path) except OSError as e: raise_from( LagoSubnetLeaseBadPermissionsException(self.path, e.strerror), e ) leases = [ self.create_lease_object_from_idx(lease_file.split('.')[0]) for lease_file in lease_files if lease_file != LOCK_NAME ] if not uuid: return leases else: return [lease for lease in leases if lease.uuid == uuid]
0.002663
def calculate_bearing( place_geometry, latitude, longitude, earthquake_hazard=None, place_exposure=None ): """Simple postprocessor where we compute the bearing angle between two points. :param place_geometry: Geometry of place. :type place_geometry: QgsGeometry :param latitude: The latitude to use. :type latitude: float :param longitude: The longitude to use. :type longitude: float :param earthquake_hazard: The hazard to use. :type earthquake_hazard: str :param place_exposure: The exposure to use. :type place_exposure: str :return: Bearing angle :rtype: float """ _ = earthquake_hazard, place_exposure # NOQA epicenter = QgsPointXY(longitude, latitude) place_point = place_geometry.asPoint() bearing = place_point.azimuth(epicenter) return bearing
0.001135
def _set_relative_pythonpath(self, value): """Set PYTHONPATH list relative paths""" self.pythonpath = [osp.abspath(osp.join(self.root_path, path)) for path in value]
0.009479
def traverse(self, root="ROOT", indent="", transform=None, stream=sys.stdout): ''' Traverses the C{View} tree and prints its nodes. The nodes are printed converting them to string but other transformations can be specified by providing a method name as the C{transform} parameter. @type root: L{View} @param root: the root node from where the traverse starts @type indent: str @param indent: the indentation string to use to print the nodes @type transform: method @param transform: a method to use to transform the node before is printed ''' if transform is None: # this cannot be a default value, otherwise # TypeError: 'staticmethod' object is not callable # is raised transform = ViewClient.TRAVERSE_CIT if type(root) == types.StringType and root == "ROOT": root = self.root return ViewClient.__traverse(root, indent, transform, stream)
0.004916
def remove_graphic(self, graphic: Graphics.Graphic, *, safe: bool=False) -> typing.Optional[typing.Sequence]: """Remove a graphic, but do it through the container, so dependencies can be tracked.""" return self.remove_model_item(self, "graphics", graphic, safe=safe)
0.021277
def _compute_anom_score_between_two_windows(self, i): """ Compute distance difference between two windows' chunk frequencies, which is then marked as the anomaly score of the data point on the window boundary in the middle. :param int i: index of the data point between two windows. :return float: the anomaly score. """ lag_window_chunk_dict = self.lag_dicts[i] future_window_chunk_dict = self.fut_dicts[i] score = 0 for chunk in lag_window_chunk_dict: if chunk in future_window_chunk_dict: score += math.pow(future_window_chunk_dict[chunk] - lag_window_chunk_dict[chunk], 2) else: score += math.pow(lag_window_chunk_dict[chunk], 2) for chunk in future_window_chunk_dict: if chunk not in lag_window_chunk_dict: score += math.pow(future_window_chunk_dict[chunk], 2) return score
0.004175
def get_broadcast(cast_name, onto_name): """ Get a single broadcast. Broadcasts are stored data about how to do a Pandas join. A Broadcast object is a namedtuple with these attributes: - cast: the name of the table being broadcast - onto: the name of the table onto which "cast" is broadcast - cast_on: The optional name of a column on which to join. None if the table index will be used instead. - onto_on: The optional name of a column on which to join. None if the table index will be used instead. - cast_index: True if the table index should be used for the join. - onto_index: True if the table index should be used for the join. Parameters ---------- cast_name : str The name of the table being braodcast. onto_name : str The name of the table onto which `cast_name` is broadcast. Returns ------- broadcast : Broadcast """ if is_broadcast(cast_name, onto_name): return _BROADCASTS[(cast_name, onto_name)] else: raise KeyError( 'no rule found for broadcasting {!r} onto {!r}'.format( cast_name, onto_name))
0.000833
def makeRequests(callable_, args_list, callback=None, exc_callback=_handle_thread_exception): """Create several work requests for same callable with different arguments. Convenience function for creating several work requests for the same callable where each invocation of the callable receives different values for its arguments. ``args_list`` contains the parameters for each invocation of callable. Each item in ``args_list`` should be either a 2-item tuple of the list of positional arguments and a dictionary of keyword arguments or a single, non-tuple argument. See docstring for ``WorkRequest`` for info on ``callback`` and ``exc_callback``. """ requests = [] for item in args_list: if isinstance(item, tuple): requests.append( WorkRequest(callable_, item[0], item[1], callback=callback, exc_callback=exc_callback) ) else: requests.append( WorkRequest(callable_, [item], None, callback=callback, exc_callback=exc_callback) ) return requests
0.00085
def encode(hex): '''Encode hexadecimal string as base58 (ex: encoding a Monero address).''' data = _hexToBin(hex) l_data = len(data) if l_data == 0: return "" full_block_count = l_data // __fullBlockSize last_block_size = l_data % __fullBlockSize res_size = full_block_count * __fullEncodedBlockSize + __encodedBlockSizes[last_block_size] res = bytearray([__alphabet[0]] * res_size) for i in range(full_block_count): res = encode_block(data[(i*__fullBlockSize):(i*__fullBlockSize+__fullBlockSize)], res, i * __fullEncodedBlockSize) if last_block_size > 0: res = encode_block(data[(full_block_count*__fullBlockSize):(full_block_count*__fullBlockSize+last_block_size)], res, full_block_count * __fullEncodedBlockSize) return bytes(res).decode('ascii')
0.00486
def alias_ip_address(ip_address, interface, aws=False): """Adds an IP alias to a specific interface Adds an ip address as an alias to the specified interface on Linux systems. :param ip_address: (str) IP address to set as an alias :param interface: (str) The interface number or full device name, if an int is provided assumes the device name is eth<i> :param aws (bool) True to perform additional AWS config :return: None """ log = logging.getLogger(mod_logger + '.alias_ip_address') # Validate args if not isinstance(ip_address, basestring): msg = 'ip_address argument is not a string' log.error(msg) raise TypeError(msg) # Validate the IP address if not validate_ip_address(ip_address): msg = 'The provided IP address arg is invalid: {i}'.format(i=ip_address) log.error(msg) raise ValueError(msg) # Determine if the interface provided is a full device name try: int(interface) except ValueError: if isinstance(interface, basestring): device_name = str(interface) log.info('Full device name provided, will attempt to alias: {d}'.format(d=device_name)) else: raise TypeError('Provided interface arg must be an int or str') else: device_name = 'eth{i}'.format(i=interface) log.info('Integer provided as interface, using device name: {d}'.format(d=device_name)) # Add alias command = ['ifconfig', '{d}:0'.format(d=device_name), ip_address, 'up'] log.info('Running command to bring up the alias: {c}'.format(c=' '.join(command))) try: result = run_command(command) except CommandError: _, ex, trace = sys.exc_info() log.warn('CommandError: There was a problem running command: {c}\n{e}'.format( c=' '.join(command), e=str(ex))) else: log.info('Command produced output:\n{o}'.format(o=result['output'])) if int(result['code']) != 0: log.warn('ifconfig up command produced exit code: {c} and output:\n{o}'.format( c=result['code'], o=result['output'])) else: log.info('ifconfig up exited successfully') # Create interface file from the existing file base_ifcfg = os.path.abspath(os.path.join(os.sep, 'etc', 'sysconfig', 'network-scripts', 'ifcfg-{d}'.format( d=device_name))) alias_ifcfg = base_ifcfg + ':0' log.info('Creating interface config file: {f}'.format(f=alias_ifcfg)) # Ensure the base config file exists if not os.path.isfile(base_ifcfg): raise OSError('Required interface config file not found: {f}'.format(f=base_ifcfg)) else: log.info('Found base interface config file: {f}'.format(f=base_ifcfg)) # Delete the existing interface file if it exists if os.path.isfile(alias_ifcfg): log.info('Alias interface configuration file already exists, removing: {f}'.format(f=alias_ifcfg)) try: os.remove(alias_ifcfg) except OSError: _, ex, trace = sys.exc_info() msg = 'OSError: There was a problem removing existing alias config file: {f}\n{e}'.format( f=alias_ifcfg, e=str(ex)) raise OSError, msg, trace else: log.info('No existing alias interface configuration exists yet: {f}'.format(f=alias_ifcfg)) # Create the interface file log.info('Gathering entries from file: {f}...'.format(f=base_ifcfg)) ifcfg_entries = {} try: with open(base_ifcfg, 'r') as f: for line in f: if '=' in line: parts = line.split('=') if len(parts) == 2: parts[0] = parts[0].strip() parts[1] = parts[1].translate(None, '"').strip() ifcfg_entries[parts[0]] = parts[1] except(IOError, OSError): _, ex, trace = sys.exc_info() msg = 'Unable to read file: {f}\n{e}'.format(f=base_ifcfg, e=str(ex)) raise OSError, msg, trace # Defined the ifcfg file entries for the alias ifcfg_entries['IPADDR'] = ip_address ifcfg_entries['NETMASK'] = '255.255.255.0' ifcfg_entries['DEVICE'] = '{d}:0'.format(d=device_name) ifcfg_entries['NAME'] = '{d}:0'.format(d=device_name) log.info('Creating file: {f}'.format(f=alias_ifcfg)) try: with open(alias_ifcfg, 'a') as f: for var, val in ifcfg_entries.iteritems(): out_str = str(var) + '="' + str(val) + '"\n' log.info('Adding entry to %s: %s', alias_ifcfg, out_str) f.write(out_str) except(IOError, OSError): _, ex, trace = sys.exc_info() msg = 'Unable to write to file: {f}\n{e}'.format(f=alias_ifcfg, e=str(ex)) raise OSError, msg, trace # Performing additional configuration for AWS if aws: log.info('Checking if this host is actually on AWS...') if is_aws(): log.info('Performing additional configuration for AWS...') try: ec2 = EC2Util() ec2.add_secondary_ip(ip_address, interface) except EC2UtilError: _, ex, trace = sys.exc_info() msg = 'Unable to instruct AWS to add a secondary IP address <{ip}> on interface <{d}>\n{e}'.format( ip=ip_address, d=device_name, e=str(ex)) raise OSError, msg, trace else: log.info('AWS added the secondary IP address <{ip}> on interface <{d}>'.format( ip=ip_address, d=device_name)) else: log.warn('This system is not on AWS, not performing additional configuration') log.info('Restarting networking to ensure the changes take effect...') try: service_network_restart() except CommandError: _, ex, trace = sys.exc_info() msg = 'CommandError: There was a problem restarting network services\n{e}'.format(e=str(ex)) raise NetworkRestartError, msg, trace # Verify the alias was created log.info('Verifying the alias was successfully created...') command = ['/sbin/ifconfig'] try: result = run_command(command) except CommandError: _, ex, trace = sys.exc_info() log.warn('CommandError: Unable to run ifconfig to verify the IP alias was created\n{e}'.format(e=str(ex))) return # Check for the alias if '{d}:0'.format(d=device_name) not in result['output']: log.warn('The alias was not created yet, system reboot may be required: {d}:0'.format(d=device_name)) else: log.info('Alias created successfully!')
0.00283
def pprint(arr, columns=('temperature', 'luminosity'), names=('Temperature (Kelvin)', 'Luminosity (solar units)'), max_rows=32, precision=2): """ Create a pandas DataFrame from a numpy ndarray. By default use temp and lum with max rows of 32 and precision of 2. arr - An numpy.ndarray. columns - The columns to include in the pandas DataFrame. Defaults to temperature and luminosity. names - The column names for the pandas DataFrame. Defaults to Temperature and Luminosity. max_rows - If max_rows is an integer then set the pandas display.max_rows option to that value. If max_rows is True then set display.max_rows option to 1000. precision - An integer to set the pandas precision option. """ if max_rows is True: pd.set_option('display.max_rows', 1000) elif type(max_rows) is int: pd.set_option('display.max_rows', max_rows) pd.set_option('precision', precision) df = pd.DataFrame(arr.flatten(), index=arr['id'].flatten(), columns=columns) df.columns = names return df.style.format({names[0]: '{:.0f}', names[1]: '{:.2f}'})
0.00081
def count_scts_in_sct_extension(certificate: cryptography.x509.Certificate) -> Optional[int]: """Return the number of Signed Certificate Timestamps (SCTs) embedded in the certificate. """ scts_count = 0 try: # Look for the x509 extension sct_ext = certificate.extensions.get_extension_for_oid( ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS ) if isinstance(sct_ext.value, cryptography.x509.UnrecognizedExtension): # The version of OpenSSL on the system is too old and can't parse the SCT extension return None # Count the number of entries in the extension scts_count = len(sct_ext.value) except ExtensionNotFound: pass return scts_count
0.007282
def on_client_connect(self, client_conn): """Inform client of build state and version on connect. Parameters ---------- client_conn : ClientConnection object The client connection that has been successfully established. Returns ------- Future that resolves when the device is ready to accept messages. """ assert get_thread_ident() == self._server.ioloop_thread_id self._client_conns.add(client_conn) self._strategies[client_conn] = {} # map sensors -> sampling strategies katcp_version = self.PROTOCOL_INFO.major if katcp_version >= VERSION_CONNECT_KATCP_MAJOR: client_conn.inform(Message.inform( "version-connect", "katcp-protocol", self.PROTOCOL_INFO)) client_conn.inform(Message.inform( "version-connect", "katcp-library", "katcp-python-%s" % katcp.__version__)) client_conn.inform(Message.inform( "version-connect", "katcp-device", self.version(), self.build_state())) else: client_conn.inform(Message.inform("version", self.version())) client_conn.inform(Message.inform("build-state", self.build_state()))
0.003125
def left_sections(self): """ The number of sections that touch the left side. During merging, the cell's text will grow to include other cells. This property keeps track of the number of sections that are touching the left side. For example:: +-----+-----+ section --> | foo | dog | <-- section +-----+-----+ section --> | cat | +-----+ Has 2 sections on the left, but 1 on the right Returns ------- sections : int The number of sections on the left """ lines = self.text.split('\n') sections = 0 for i in range(len(lines)): if lines[i].startswith('+'): sections += 1 sections -= 1 return sections
0.002328
def ping(daemon, channel, data=None): """ Process the 'ping' control message. :param daemon: The control daemon; used to get at the configuration and the database. :param channel: The publish channel to which to send the response. :param data: Optional extra data. Will be returned as the second argument of the response. Responds to the named channel with a command of 'pong' and with the node_name (if configured) and provided data as arguments. """ if not channel: # No place to reply to return # Get our configured node name node_name = daemon.config['control'].get('node_name') # Format the response reply = ['pong'] if node_name or data: reply.append(node_name or '') if data: reply.append(data) # And send it with utils.ignore_except(): daemon.db.publish(channel, ':'.join(reply))
0.00104
def get_time_from_rfc3339(rfc3339): """ return time tuple from an RFC 3339-formatted time string :param rfc3339: str, time in RFC 3339 format :return: float, seconds since the Epoch """ try: # py 3 dt = dateutil.parser.parse(rfc3339, ignoretz=False) return dt.timestamp() except NameError: # py 2 # Decode the RFC 3339 date with no fractional seconds (the # format Origin provides). Note that this will fail to parse # valid ISO8601 timestamps not in this exact format. time_tuple = strptime(rfc3339, '%Y-%m-%dT%H:%M:%SZ') return timegm(time_tuple)
0.001534
def file_exists(fname): """Check if a file exists and is non-empty. """ try: return fname and os.path.exists(fname) and os.path.getsize(fname) > 0 except OSError: return False
0.004831
def excepthook(exc_type, exc_value, tracebackobj): """ Global function to catch unhandled exceptions. Parameters ---------- exc_type : str exception type exc_value : int exception value tracebackobj : traceback traceback object """ separator = "-" * 80 notice = "The following error was triggered:" now = datetime.now().strftime("%Y-%m-%d, %H:%M:%S") info = StringIO() traceback.print_tb(tracebackobj, None, info) info.seek(0) info = info.read() errmsg = f"{exc_type}\t \n{exc_value}" sections = [now, separator, errmsg, separator, info] msg = "\n".join(sections) print(''.join(traceback.format_tb(tracebackobj))) print('{0}: {1}'.format(exc_type, exc_value)) QtWidgets.QMessageBox.warning(None, notice, traceback.format_exc())
0.001189
def vertex_defects(mesh): """ Return the vertex defects, or (2*pi) minus the sum of the angles of every face that includes that vertex. If a vertex is only included by coplanar triangles, this will be zero. For convex regions this is positive, and concave negative. Returns -------- vertex_defect : (len(self.vertices), ) float Vertex defect at the every vertex """ angle_sum = np.asarray(mesh.face_angles_sparse.sum(axis=1)).flatten() defect = (2 * np.pi) - angle_sum return defect
0.001795
def register(app, uri, file_or_directory, pattern, use_modified_since, use_content_range): # TODO: Though mach9 is not a file server, I feel like we should at least # make a good effort here. Modified-since is nice, but we could # also look into etags, expires, and caching """ Register a static directory handler with Mach9 by adding a route to the router and registering a handler. :param app: Mach9 :param file_or_directory: File or directory path to serve from :param uri: URL to serve from :param pattern: regular expression used to match files in the URL :param use_modified_since: If true, send file modified time, and return not modified if the browser's matches the server's :param use_content_range: If true, process header for range requests and sends the file part that is requested """ # If we're not trying to match a file directly, # serve from the folder if not path.isfile(file_or_directory): uri += '<file_uri:' + pattern + '>' async def _handler(request, file_uri=None): # Using this to determine if the URL is trying to break out of the path # served. os.path.realpath seems to be very slow if file_uri and '../' in file_uri: raise InvalidUsage("Invalid URL") # Merge served directory and requested file if provided # Strip all / that in the beginning of the URL to help prevent python # from herping a derp and treating the uri as an absolute path root_path = file_path = file_or_directory if file_uri: file_path = path.join( file_or_directory, sub('^[/]*', '', file_uri)) # URL decode the path sent by the browser otherwise we won't be able to # match filenames which got encoded (filenames with spaces etc) file_path = path.abspath(unquote(file_path)) if not file_path.startswith(path.abspath(unquote(root_path))): raise FileNotFound('File not found', path=file_or_directory, relative_url=file_uri) try: headers = {} # Check if the client has been sent this file before # and it has not been modified since stats = None if use_modified_since: stats = await stat(file_path) modified_since = strftime( '%a, %d %b %Y %H:%M:%S GMT', gmtime(stats.st_mtime)) if request.headers.get('If-Modified-Since') == modified_since: return HTTPResponse(status=304) headers['Last-Modified'] = modified_since _range = None if use_content_range: _range = None if not stats: stats = await stat(file_path) headers['Accept-Ranges'] = 'bytes' headers['Content-Length'] = str(stats.st_size) if request.method != 'HEAD': try: _range = ContentRangeHandler(request, stats) except HeaderNotFound: pass else: del headers['Content-Length'] for key, value in _range.headers.items(): headers[key] = value if request.method == 'HEAD': return HTTPResponse( headers=headers, content_type=guess_type(file_path)[0] or 'text/plain') else: return await file(file_path, headers=headers, _range=_range) except ContentRangeError: raise except Exception: raise FileNotFound('File not found', path=file_or_directory, relative_url=file_uri) app.route(uri, methods=['GET', 'HEAD'])(_handler)
0.000246
def setup_address(self, name, address=default, transact={}): """ Set up the name to point to the supplied address. The sender of the transaction must own the name, or its parent name. Example: If the caller owns ``parentname.eth`` with no subdomains and calls this method with ``sub.parentname.eth``, then ``sub`` will be created as part of this call. :param str name: ENS name to set up :param str address: name will point to this address, in checksum format. If ``None``, erase the record. If not specified, name will point to the owner's address. :param dict transact: the transaction configuration, like in :meth:`~web3.eth.Eth.sendTransaction` :raises InvalidName: if ``name`` has invalid syntax :raises UnauthorizedError: if ``'from'`` in `transact` does not own `name` """ owner = self.setup_owner(name, transact=transact) self._assert_control(owner, name) if is_none_or_zero_address(address): address = None elif address is default: address = owner elif is_binary_address(address): address = to_checksum_address(address) elif not is_checksum_address(address): raise ValueError("You must supply the address in checksum format") if self.address(name) == address: return None if address is None: address = EMPTY_ADDR_HEX transact['from'] = owner resolver = self._set_resolver(name, transact=transact) return resolver.functions.setAddr(raw_name_to_hash(name), address).transact(transact)
0.003569
def _get_input_args(bam_file, data, out_base, background): """Retrieve input args, depending on genome build. VerifyBamID2 only handles GRCh37 (1, 2, 3) not hg19, so need to generate a pileup for hg19 and fix chromosome naming. """ if dd.get_genome_build(data) in ["hg19"]: return ["--PileupFile", _create_pileup(bam_file, data, out_base, background)] else: return ["--BamFile", bam_file]
0.004662
def make_cloud_mlengine_request_fn(credentials, model_name, version): """Wraps function to make CloudML Engine requests with runtime args.""" def _make_cloud_mlengine_request(examples): """Builds and sends requests to Cloud ML Engine.""" api = discovery.build("ml", "v1", credentials=credentials) parent = "projects/%s/models/%s/versions/%s" % (cloud.default_project(), model_name, version) input_data = { "instances": [{ # pylint: disable=g-complex-comprehension "input": { "b64": base64.b64encode(ex.SerializeToString()) } } for ex in examples] } prediction = api.projects().predict(body=input_data, name=parent).execute() return prediction["predictions"] return _make_cloud_mlengine_request
0.004773
def _fits_inside_predicate(self): """ Return a function taking an integer point size argument that returns |True| if the text in this fitter can be wrapped to fit entirely within its extents when rendered at that point size. """ def predicate(point_size): """ Return |True| if the text in *line_source* can be wrapped to fit entirely within *extents* when rendered at *point_size* using the font defined in *font_file*. """ text_lines = self._wrap_lines(self._line_source, point_size) cy = _rendered_size('Ty', point_size, self._font_file)[1] return (cy * len(text_lines)) <= self._height return predicate
0.002639
def _getAuthHeaders(self): """ Get authentication headers. If we have valid header data already, they immediately return it. If not, then get new authentication data. If we are currently in the process of getting the header data, put this request into a queue to be handled when the data are received. @returns: A deferred that will eventually be called back with the header data """ def _handleAuthBody(body): self.msg("_handleAuthBody: %(body)s", body=body) try: body_parsed = json.loads(body) access_token = body_parsed['access']['token'] tenant_id = access_token['tenant']['id'].encode('ascii') auth_token = access_token['id'].encode('ascii') self.auth_headers["X-Tenant-Id"] = tenant_id self.auth_headers["X-Auth-Token"] = auth_token self._state = self.AUTHENTICATED self.msg("_handleAuthHeaders: found token %(token)s" " tenant id %(tenant_id)s", token=self.auth_headers["X-Auth-Token"], tenant_id=self.auth_headers["X-Tenant-Id"]) # Callback all queued auth headers requests while not self._headers_requests.empty(): self._headers_requests.get().callback(self.auth_headers) except ValueError: # We received a bad response return fail(MalformedJSONError("Malformed keystone" " response received.")) def _handleAuthResponse(response): if response.code == httplib.OK: self.msg("_handleAuthResponse: %(response)s accepted", response=response) body = Deferred() response.deliverBody(StringIOReceiver(body)) body.addCallback(_handleAuthBody) return body else: self.msg("_handleAuthResponse: %(response)s rejected", response=response) return fail( KeystoneAuthenticationError("Keystone" " authentication credentials" " rejected")) self.msg("_getAuthHeaders: state is %(state)s", state=self._state) if self._state == self.AUTHENTICATED: # We are authenticated, immediately succeed with the current # auth headers self.msg("_getAuthHeaders: succeed with %(headers)s", headers=self.auth_headers) return succeed(self.auth_headers) elif (self._state == self.NOT_AUTHENTICATED or self._state == self.AUTHENTICATING): # We cannot satisfy the auth header request immediately, # put it in a queue self.msg("_getAuthHeaders: defer, place in queue") auth_headers_deferred = Deferred() self._headers_requests.put(auth_headers_deferred) if self._state == self.NOT_AUTHENTICATED: self.msg("_getAuthHeaders: not authenticated, start" " authentication process") # We are not authenticated, and not in the process of # authenticating. # Set our state to AUTHENTICATING and begin the # authentication process self._state = self.AUTHENTICATING d = self.agent.request('POST', self.auth_url, Headers({ "Content-type": ["application/json"] }), self._getAuthRequestBodyProducer()) d.addCallback(_handleAuthResponse) d.addErrback(auth_headers_deferred.errback) return auth_headers_deferred else: # Bad state, fail return fail(RuntimeError("Invalid state encountered."))
0.000473
def load(path): """Helper function that tries to load a filepath (or python module notation) as a python module and on failure `exec` it. Args: path (str): Path or module to load The function tries to import `example.module` when either `example.module`, `example/module` or `example/module.py` is given. """ importpath = path.replace("/", ".").replace("\\", ".") if importpath[-3:] == ".py": importpath = importpath[:-3] try: importlib.import_module(importpath) except (ModuleNotFoundError, TypeError): exec(open(path).read())
0.003317
def _get_rho(self, v): """ convert unit-cell volume in A^3 to density in g/cm^3 :param v: unit cell volume in A^3 :return: density in g/cm^3 :note: internal function """ v_mol = vol_uc2mol(v, self.z) # in m^3 rho = self.mass / v_mol * 1.e-6 # in g/cm^3 return rho
0.0059
def purge_items(self): """Remove purged and overlimit items from the cache. TODO: optimize somehow. Leave no more than 75% of `self.max_items` items in the cache.""" self._lock.acquire() try: il=self._items_list num_items = len(il) need_remove = num_items - int(0.75 * self.max_items) for _unused in range(need_remove): item=il.pop(0) try: del self._items[item.address] except KeyError: pass while il and il[0].update_state()=="purged": item=il.pop(0) try: del self._items[item.address] except KeyError: pass finally: self._lock.release()
0.00716
def _get_stm_with_branches(stm_it): """ :return: first statement with rank > 0 or None if iterator empty """ last = None while last is None or last.rank == 0: try: last = next(stm_it) except StopIteration: last = None break return last
0.003205
def flatten(l): """Flatten a nested list.""" return sum(map(flatten, l), []) \ if isinstance(l, list) or isinstance(l, tuple) else [l]
0.013333
def server_hardware(self): """ Gets the ServerHardware API client. Returns: ServerHardware: """ if not self.__server_hardware: self.__server_hardware = ServerHardware(self.__connection) return self.__server_hardware
0.006944
def unflatten(flat_dict, separator='_'): """ Creates a hierarchical dictionary from a flattened dictionary Assumes no lists are present :param flat_dict: a dictionary with no hierarchy :param separator: a string that separates keys :return: a dictionary with hierarchy """ _unflatten_asserts(flat_dict, separator) # This global dictionary is mutated and returned unflattened_dict = dict() def _unflatten(dic, keys, value): for key in keys[:-1]: dic = dic.setdefault(key, {}) dic[keys[-1]] = value for item in flat_dict: _unflatten(unflattened_dict, item.split(separator), flat_dict[item]) return unflattened_dict
0.001418
def actionAngleStaeckel_c(pot,delta,R,vR,vT,z,vz,u0=None,order=10): """ NAME: actionAngleStaeckel_c PURPOSE: Use C to calculate actions using the Staeckel approximation INPUT: pot - Potential or list of such instances delta - focal length of prolate spheroidal coordinates R, vR, vT, z, vz - coordinates (arrays) u0= (None) if set, u0 to use order= (10) order of Gauss-Legendre integration of the relevant integrals OUTPUT: (jr,jz,err) jr,jz : array, shape (len(R)) err - non-zero if error occured HISTORY: 2012-12-01 - Written - Bovy (IAS) """ if u0 is None: u0, dummy= bovy_coords.Rz_to_uv(R,z,delta=numpy.atleast_1d(delta)) #Parse the potential from galpy.orbit.integrateFullOrbit import _parse_pot npot, pot_type, pot_args= _parse_pot(pot,potforactions=True) #Parse delta delta= numpy.atleast_1d(delta) ndelta= len(delta) #Set up result arrays jr= numpy.empty(len(R)) jz= numpy.empty(len(R)) err= ctypes.c_int(0) #Set up the C code ndarrayFlags= ('C_CONTIGUOUS','WRITEABLE') actionAngleStaeckel_actionsFunc= _lib.actionAngleStaeckel_actions actionAngleStaeckel_actionsFunc.argtypes= [ctypes.c_int, ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ctypes.c_int, ndpointer(dtype=numpy.int32,flags=ndarrayFlags), ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ctypes.c_int, ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ctypes.c_int, ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ndpointer(dtype=numpy.float64,flags=ndarrayFlags), ctypes.POINTER(ctypes.c_int)] #Array requirements, first store old order f_cont= [R.flags['F_CONTIGUOUS'], vR.flags['F_CONTIGUOUS'], vT.flags['F_CONTIGUOUS'], z.flags['F_CONTIGUOUS'], vz.flags['F_CONTIGUOUS'], u0.flags['F_CONTIGUOUS'], delta.flags['F_CONTIGUOUS']] R= numpy.require(R,dtype=numpy.float64,requirements=['C','W']) vR= numpy.require(vR,dtype=numpy.float64,requirements=['C','W']) vT= numpy.require(vT,dtype=numpy.float64,requirements=['C','W']) z= numpy.require(z,dtype=numpy.float64,requirements=['C','W']) vz= numpy.require(vz,dtype=numpy.float64,requirements=['C','W']) u0= numpy.require(u0,dtype=numpy.float64,requirements=['C','W']) delta= numpy.require(delta,dtype=numpy.float64,requirements=['C','W']) jr= numpy.require(jr,dtype=numpy.float64,requirements=['C','W']) jz= numpy.require(jz,dtype=numpy.float64,requirements=['C','W']) #Run the C code actionAngleStaeckel_actionsFunc(len(R), R, vR, vT, z, vz, u0, ctypes.c_int(npot), pot_type, pot_args, ctypes.c_int(ndelta), delta, ctypes.c_int(order), jr, jz, ctypes.byref(err)) #Reset input arrays if f_cont[0]: R= numpy.asfortranarray(R) if f_cont[1]: vR= numpy.asfortranarray(vR) if f_cont[2]: vT= numpy.asfortranarray(vT) if f_cont[3]: z= numpy.asfortranarray(z) if f_cont[4]: vz= numpy.asfortranarray(vz) if f_cont[5]: u0= numpy.asfortranarray(u0) if f_cont[6]: delta= numpy.asfortranarray(delta) return (jr,jz,err.value)
0.023831
def per_section(it, is_delimiter=lambda x: x.isspace()): """ From http://stackoverflow.com/a/25226944/610569 """ ret = [] for line in it: if is_delimiter(line): if ret: yield ret # OR ''.join(ret) ret = [] else: ret.append(line.rstrip()) # OR ret.append(line) if ret: yield ret
0.002597
def state(self, value): """Set device state. :type value: str :param value: Future state (either ON or OFF) """ if value.upper() == ON: return self.SOAPAction('SetSocketSettings', 'SetSocketSettingsResult', self.controlParameters("1", "true")) elif value.upper() == OFF: return self.SOAPAction('SetSocketSettings', 'SetSocketSettingsResult', self.controlParameters("1", "false")) else: raise TypeError("State %s is not valid." % str(value))
0.007505
def create_data_and_metadata_from_data(self, data: numpy.ndarray, intensity_calibration: CalibrationModule.Calibration=None, dimensional_calibrations: typing.List[CalibrationModule.Calibration]=None, metadata: dict=None, timestamp: str=None) -> DataAndMetadata.DataAndMetadata: """Create a data_and_metadata object from data. .. versionadded:: 1.0 .. deprecated:: 1.1 Use :py:meth:`~nion.swift.Facade.DataItem.create_data_and_metadata` instead. Scriptable: No """ return self.create_data_and_metadata(numpy.copy(data), intensity_calibration, dimensional_calibrations, metadata, timestamp)
0.020031
def _check_configs(self): """ Reloads the configuration files. """ configs = set(self._find_configs()) known_configs = set(self.configs.keys()) new_configs = configs - known_configs for cfg in (known_configs - configs): self.log.debug("Compass configuration has been removed: " + cfg) del self.configs[cfg] for cfg in new_configs: self.log.debug("Found new compass configuration: " + cfg) self.configs[cfg] = CompassConfig(cfg)
0.003711
def main(*argv): """ main driver of program """ try: adminUsername = str(argv[0]) adminPassword = str(argv[1]) baseURL = str(argv[2]) #"https://www.arcgis.com/sharing/rest"# inviteSubject = str(argv[3]) inviteEmail = str(argv[4]) newUserName = argv[5] firstName = argv[6] lastName = argv[7] password = argv[8] email = argv[9] account_role = argv[10] # Local Variables # isAdded = True # Logic # # Access AGOL # sh = arcrest.AGOLTokenSecurityHandler(adminUsername, adminPassword) userInvite = arcrest.manageorg.UserInvite(username=newUserName, password=password, firstName=firstName, lastName=lastName, email=email, role=account_role) admin = arcrest.manageorg.Administration(securityHandler=sh, initialize=True) # Get the Org ID # community = admin.community user = community.user userDetails = user.user(username=adminUsername) orgID = userDetails['orgId'] # Access the Admin's Portal to Add User # portal = admin.portals(portalId=orgID) # Validate the username to ensure it's free # If it is not, raise error, else continue res = community.checkUserName(newUserName) if res['usernames'][0]['suggested'] != res['usernames'][0]['requested']: arcpy.AddError("Username %s is already taken" % newUserName) del community # Add the User # res = portal.inviteUser(invitationList=userInvite, html="welcome to the group", subject="user invite to AGOL") for msg in res['notInvited']: arcpy.AddWarning("%s was not invited" % msg) isAdded = False del sh del portal del res arcpy.SetParameterAsText(11, isAdded) except arcpy.ExecuteError: line, filename, synerror = trace() arcpy.AddError("error on line: %s" % line) arcpy.AddError("error in file name: %s" % filename) arcpy.AddError("with error message: %s" % synerror) arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2)) except FunctionError, f_e: messages = f_e.args[0] arcpy.AddError("error in function: %s" % messages["function"]) arcpy.AddError("error on line: %s" % messages["line"]) arcpy.AddError("error in file name: %s" % messages["filename"]) arcpy.AddError("with error message: %s" % messages["synerror"]) arcpy.AddError("ArcPy Error Message: %s" % messages["arc"]) except: line, filename, synerror = trace() arcpy.AddError("error on line: %s" % line) arcpy.AddError("error in file name: %s" % filename) arcpy.AddError("with error message: %s" % synerror)
0.003158
def __prepare_histogram(h1, h2): """Convert the histograms to scipy.ndarrays if required.""" h1 = h1 if scipy.ndarray == type(h1) else scipy.asarray(h1) h2 = h2 if scipy.ndarray == type(h2) else scipy.asarray(h2) if h1.shape != h2.shape or h1.size != h2.size: raise ValueError('h1 and h2 must be of same shape and size') return h1, h2
0.002762
def is_static_etcd(self): '''Determine if we are on a node running etcd''' return os.path.exists(os.path.join(self.static_pod_dir, "etcd.yaml"))
0.0125
def top3_full(votes): """ Description: Top m - 1 alternatives q = m(m - 1) + 2m moment conditions values calculation Parameters: votes: ordinal preference data (numpy ndarray of integers) """ #create array of zeros, length = q res = np.zeros(2 * len(votes[0]) + (len(votes[0]) * (len(votes[0]) - 1))) #iterate through each vote for vote in votes: #set verification boolean to true ver = True #check if vote belongs to c1 < c2 < c3, c2 < c3 < c1... moment for i in range(0, len(votes[0])): if vote[i][0] != vote[i - 1][0] + 1 and vote[i][0] != 0: ver = False break if ver: res[len(votes[0]) + (len(votes[0]) * (len(votes[0]) - 1)) + vote[0][0]] += 1 #increment moment of top ranked choice ranked at the top res[vote[0][0]] += 1 #top two moment add = 0 if vote[0][0] > vote[1][0]: add = 1 res[(vote[0][0] + 1) * (len(votes[0]) - 1) + add + vote[1][0]] += 1 res /= len(votes) #normalize moments return res
0.010686
def hit_count(self, request, hitcount): """ Called with a HttpRequest and HitCount object it will return a namedtuple: UpdateHitCountResponse(hit_counted=Boolean, hit_message='Message'). `hit_counted` will be True if the hit was counted and False if it was not. `'hit_message` will indicate by what means the Hit was either counted or ignored. """ UpdateHitCountResponse = namedtuple( 'UpdateHitCountResponse', 'hit_counted hit_message') # as of Django 1.8.4 empty sessions are not being saved # https://code.djangoproject.com/ticket/25489 if request.session.session_key is None: request.session.save() user = request.user try: is_authenticated_user = user.is_authenticated() except: is_authenticated_user = user.is_authenticated session_key = request.session.session_key ip = get_ip(request) user_agent = request.META.get('HTTP_USER_AGENT', '')[:255] hits_per_ip_limit = getattr(settings, 'HITCOUNT_HITS_PER_IP_LIMIT', 0) exclude_user_group = getattr(settings, 'HITCOUNT_EXCLUDE_USER_GROUP', None) # first, check our request against the IP blacklist if BlacklistIP.objects.filter(ip__exact=ip): return UpdateHitCountResponse( False, 'Not counted: user IP has been blacklisted') # second, check our request against the user agent blacklist if BlacklistUserAgent.objects.filter(user_agent__exact=user_agent): return UpdateHitCountResponse( False, 'Not counted: user agent has been blacklisted') # third, see if we are excluding a specific user group or not if exclude_user_group and is_authenticated_user: if user.groups.filter(name__in=exclude_user_group): return UpdateHitCountResponse( False, 'Not counted: user excluded by group') # eliminated first three possible exclusions, now on to checking our database of # active hits to see if we should count another one # start with a fresh active query set (HITCOUNT_KEEP_HIT_ACTIVE) qs = Hit.objects.filter_active() # check limit on hits from a unique ip address (HITCOUNT_HITS_PER_IP_LIMIT) if hits_per_ip_limit: if qs.filter(ip__exact=ip).count() >= hits_per_ip_limit: return UpdateHitCountResponse( False, 'Not counted: hits per IP address limit reached') # create a generic Hit object with request data hit = Hit(session=session_key, hitcount=hitcount, ip=get_ip(request), user_agent=request.META.get('HTTP_USER_AGENT', '')[:255],) # first, use a user's authentication to see if they made an earlier hit if is_authenticated_user: if not qs.filter(user=user, hitcount=hitcount): hit.user = user # associate this hit with a user hit.save() response = UpdateHitCountResponse( True, 'Hit counted: user authentication') else: response = UpdateHitCountResponse( False, 'Not counted: authenticated user has active hit') # if not authenticated, see if we have a repeat session else: if not qs.filter(session=session_key, hitcount=hitcount): hit.save() response = UpdateHitCountResponse( True, 'Hit counted: session key') else: response = UpdateHitCountResponse( False, 'Not counted: session key has active hit') return response
0.001596
def update(self, obj, data): """Helper function to update an already existing document instead of creating a new one. :param obj: Mongoengine Document to update :param data: incomming payload to deserialize :return: an :class UnmarshallResult: Example: :: from marshmallow_mongoengine import ModelSchema from mymodels import User class UserSchema(ModelSchema): class Meta: model = User def update_obj(id, payload): user = User.objects(id=id).first() result = UserSchema().update(user, payload) result.data is user # True Note: Given the update is done on a existing object, the required param on the fields is ignored """ # TODO: find a cleaner way to skip required validation on update required_fields = [k for k, f in self.fields.items() if f.required] for field in required_fields: self.fields[field].required = False loaded_data, errors = self._do_load(data, postprocess=False) for field in required_fields: self.fields[field].required = True if not errors: # Update the given obj fields for k, v in loaded_data.items(): # Skip default values that have been automatically # added during unserialization if k in data: setattr(obj, k, v) return ma.UnmarshalResult(data=obj, errors=errors)
0.001316
def subsample(self, factor): """ Downsample images by an integer factor. Parameters ---------- factor : positive int or tuple of positive ints Stride to use in subsampling. If a single int is passed, each dimension of the image will be downsampled by this factor. If a tuple is passed, each dimension will be downsampled by the given factor. """ value_shape = self.value_shape ndims = len(value_shape) if not hasattr(factor, '__len__'): factor = [factor] * ndims factor = [int(sf) for sf in factor] if any((sf <= 0 for sf in factor)): raise ValueError('All sampling factors must be positive; got ' + str(factor)) def roundup(a, b): return (a + b - 1) // b slices = [slice(0, value_shape[i], factor[i]) for i in range(ndims)] new_value_shape = tuple([roundup(value_shape[i], factor[i]) for i in range(ndims)]) return self.map(lambda v: v[slices], value_shape=new_value_shape)
0.004669
def get_album_by_mbid(self, mbid): """Looks up an album by its MusicBrainz ID""" params = {"mbid": mbid} doc = _Request(self, "album.getInfo", params).execute(True) return Album(_extract(doc, "artist"), _extract(doc, "name"), self)
0.007519
def response_from_prediction(self, y_pred, single=True): """Turns a model's prediction in *y_pred* into a JSON response. """ result = y_pred.tolist() if single: result = result[0] response = { 'metadata': get_metadata(), 'result': result, } return make_ujson_response(response, status_code=200)
0.005025
def locate_bar_r(icut, epos): """Fine position of the right CSU bar""" sm = len(icut) def swap_coor(x): return sm - 1 - x def swap_line(tab): return tab[::-1] return _locate_bar_gen(icut, epos, transform1=swap_coor, transform2=swap_line)
0.0033
def writeto(self, filename, **kwargs): """ Write the header to a fits file. :param filename: :return: """ fits.PrimaryHDU(header=self).writeto(filename, output_verify='ignore', **kwargs)
0.012821
def version(self, value): """ Save the Site's version from a string or version tuple @type value: tuple or str """ if isinstance(value, tuple): value = unparse_version(value) self._version = value
0.007692
def initialize(self,*args,**kwargs): """ Only try to parse as JSON if the JSON content type header is set. """ super(JSONHandler,self).initialize(*args,**kwargs) content_type = self.request.headers.get('Content-Type', '') if 'application/json' in content_type.lower(): self._parse_json_body_arguments()
0.016216