code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class SolarSchedule(models.Model): <NEW_LINE> <INDENT> EVENTS = { 'dawn_astronomical': { 'method': 'next_rising', 'horizon': '-18', 'use_center': True}, 'dawn_nautical': { 'method': 'next_rising', 'horizon': '-12', 'use_center': True}, 'dawn_civil': { 'method': 'next_rising', 'horizon': '-6', 'use_center': True}, 'sunrise': { 'method': 'next_rising', 'horizon': '-0:34', 'use_center': False}, 'solar_noon': { 'method': 'next_transit', 'horizon': '0', 'use_center': False}, 'sunset': { 'method': 'next_setting', 'horizon': '-0:34', 'use_center': False}, 'dusk_civil': { 'method': 'next_setting', 'horizon': '-6', 'use_center': True}, 'dusk_nautical': { 'method': 'next_setting', 'horizon': '-12', 'use_center': True}, 'dusk_astronomical': { 'method': 'next_setting', 'horizon': '18', 'use_center': True}, } <NEW_LINE> EVENT_CHOICES = [(key, ' '.join(key.split('_')).title()) for key in EVENTS.keys()] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('event',) <NEW_LINE> <DEDENT> event = models.CharField( max_length=18, unique=True, choices=EVENT_CHOICES, help_text='Solar event type') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.get_event_display() <NEW_LINE> <DEDENT> def calendar(self): <NEW_LINE> <INDENT> cal = ephem.Observer() <NEW_LINE> cal.lat = str(settings.X10_LATITUDE) <NEW_LINE> cal.lon = str(settings.X10_LONGITUDE) <NEW_LINE> cal.elev = 0 <NEW_LINE> cal.horizon = SolarSchedule.EVENTS[self.event]['horizon'] <NEW_LINE> cal.pressure = 0 <NEW_LINE> return cal <NEW_LINE> <DEDENT> def next_time(self, current_time: datetime = now()): <NEW_LINE> <INDENT> event = SolarSchedule.EVENTS[self.event] <NEW_LINE> cal = self.calendar() <NEW_LINE> current_time = current_time.astimezone(pytz.utc) <NEW_LINE> func = getattr(cal, event['method']) <NEW_LINE> if event['use_center']: <NEW_LINE> <INDENT> next_time = func(ephem.Sun(), start=current_time, use_center=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> next_time = func(ephem.Sun(), start=current_time) <NEW_LINE> <DEDENT> next_utc = next_time.datetime().replace(tzinfo=pytz.utc) <NEW_LINE> return next_utc
Model to represent and calculate times for solar events.
62599089283ffb24f3cf547b
@dataclass <NEW_LINE> class CE_Person(Person, CE_BaseModel): <NEW_LINE> <INDENT> def __init__(self, identifier: str, name: str, url: str, contributor: str, creator: str, title: str, source: str): <NEW_LINE> <INDENT> CE_BaseModel.__init__(self, identifier, name, url, contributor, creator) <NEW_LINE> self.title = title <NEW_LINE> self.source = source <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> d = {"title": self.title, "contributor": self.contributor, "creator": self.creator, "format_": self.format, "name": self.name, "family_name": self.familyName, "given_name": self.givenName, "description": self.description, "image": self.image, "publisher": self.publisher, "honorific_prefix": self.honorificPrefix, "honorific_suffix": self.honorificSuffix, "gender": self.gender, "job_title": self.jobTitle, "language": self.language, "birth_date": self.birthDate, "death_date": self.deathDate, "source": self.source, } <NEW_LINE> if self.identifier is not None: <NEW_LINE> <INDENT> d['identifier'] = self.identifier <NEW_LINE> <DEDENT> return d
Trompa Person model Inherits from schema.org Person
62599089656771135c48ae1e
class XmlSmartyLexer(DelegatingLexer): <NEW_LINE> <INDENT> name = 'XML+Smarty' <NEW_LINE> aliases = ['xml+smarty'] <NEW_LINE> alias_filenames = ['*.xml', '*.tpl'] <NEW_LINE> mimetypes = ['application/xml+smarty'] <NEW_LINE> def __init__(self, **options): <NEW_LINE> <INDENT> super().__init__(XmlLexer, SmartyLexer, **options) <NEW_LINE> <DEDENT> def analyse_text(text): <NEW_LINE> <INDENT> rv = SmartyLexer.analyse_text(text) - 0.01 <NEW_LINE> if looks_like_xml(text): <NEW_LINE> <INDENT> rv += 0.4 <NEW_LINE> <DEDENT> return rv
Subclass of the `SmartyLexer` that highlights unlexed data with the `XmlLexer`.
6259908950812a4eaa6219b3
class WebMaster(RSSSingleElement): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> RSSSingleElement.__init__(self, value, 'webMaster')
Email address for person responsible for technical issues relating to channel Example: WebMaster('[email protected] (John Smith)')
62599089283ffb24f3cf547c
@ejit <NEW_LINE> class OverflowError(ArithmeticError): <NEW_LINE> <INDENT> pass
Result too large to be represented.
6259908992d797404e38994a
class _CommandFemMeshNetgenFromShape(CommandManager): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(_CommandFemMeshNetgenFromShape, self).__init__() <NEW_LINE> self.resources = {'Pixmap': 'fem-femmesh-netgen-from-shape', 'MenuText': QtCore.QT_TRANSLATE_NOOP("FEM_MeshNetgenFromShape", "FEM mesh from shape by Netgen"), 'ToolTip': QtCore.QT_TRANSLATE_NOOP("FEM_MeshNetgenFromShape", "Create a FEM volume mesh from a solid or face shape by Netgen internal mesher")} <NEW_LINE> self.is_active = 'with_part_feature' <NEW_LINE> <DEDENT> def Activated(self): <NEW_LINE> <INDENT> FreeCAD.ActiveDocument.openTransaction("Create FEM mesh Netgen") <NEW_LINE> mesh_obj_name = 'FEMMeshNetgen' <NEW_LINE> FreeCADGui.addModule("ObjectsFem") <NEW_LINE> FreeCADGui.doCommand("ObjectsFem.makeMeshNetgen(FreeCAD.ActiveDocument, '" + mesh_obj_name + "')") <NEW_LINE> FreeCADGui.doCommand("FreeCAD.ActiveDocument.ActiveObject.Shape = FreeCAD.ActiveDocument." + self.selobj.Name) <NEW_LINE> import FemGui <NEW_LINE> if FemGui.getActiveAnalysis(): <NEW_LINE> <INDENT> FreeCADGui.addModule("FemGui") <NEW_LINE> FreeCADGui.doCommand("FemGui.getActiveAnalysis().addObject(FreeCAD.ActiveDocument.ActiveObject)") <NEW_LINE> <DEDENT> FreeCADGui.doCommand("FreeCADGui.ActiveDocument.setEdit(FreeCAD.ActiveDocument.ActiveObject.Name)") <NEW_LINE> FreeCADGui.Selection.clearSelection()
The FEM_MeshNetgenFromShape command definition
625990893617ad0b5ee07d2e
class UnsafeContentSource(BaseCSPIssue): <NEW_LINE> <INDENT> def getIssueName(self): <NEW_LINE> <INDENT> return "Unsafe Content Source: %s" % self._directive <NEW_LINE> <DEDENT> def getIssueDetail(self): <NEW_LINE> <INDENT> return "This content security policy allows for unsafe content sources" <NEW_LINE> <DEDENT> def getRemediationDetail(self): <NEW_LINE> <INDENT> return "Refactor the website to remove inline JavaScript and CSS"
Any directive that allows unsafe content (e.g. 'unsafe-eval')
62599089aad79263cf430397
class LBAL(chainer.Chain): <NEW_LINE> <INDENT> def __init__(self, n_class: int, med_dim: int=1024): <NEW_LINE> <INDENT> super(LBAL, self).__init__() <NEW_LINE> with self.init_scope(): <NEW_LINE> <INDENT> self.med_fc = links.Linear( None, med_dim, initialW=initializers.Normal(scale=0.01)) <NEW_LINE> self.bn = links.BatchNormalization(med_dim) <NEW_LINE> self.tail_fc = links.Linear( med_dim, n_class, initialW=initializers.Normal(scale=0.01)) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, x: chainer.Variable) -> chainer.Variable: <NEW_LINE> <INDENT> h = functions.relu(self.bn(self.med_fc(x))) <NEW_LINE> return self.tail_fc(h)
Chain to feed output of Extractor to FC => BN => ReLU => FC.
6259908955399d3f056280f1
class YARNModel(object): <NEW_LINE> <INDENT> def __init__(self, key): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> <DEDENT> @property <NEW_LINE> @cache.cached(timeout=5) <NEW_LINE> def state(self): <NEW_LINE> <INDENT> state = redis_store.get(self.key) <NEW_LINE> return json.loads(state) if state is not None else {} <NEW_LINE> <DEDENT> def exists(self): <NEW_LINE> <INDENT> return redis_store.get(self.key) is not None <NEW_LINE> <DEDENT> def refresh_datetime(self): <NEW_LINE> <INDENT> return self.state.get("refresh-datetime", '') <NEW_LINE> <DEDENT> def current_rm(self): <NEW_LINE> <INDENT> return self.state.get("current-rm", '') <NEW_LINE> <DEDENT> def applications(self): <NEW_LINE> <INDENT> return self.state.get("application-metrics", {}) <NEW_LINE> <DEDENT> def application_info(self, application_id): <NEW_LINE> <INDENT> return self.state.get("application-metrics", {}).get(application_id, {}) <NEW_LINE> <DEDENT> def cluster_metrics(self): <NEW_LINE> <INDENT> metrics = self.state.get("cluster-metrics", {}) <NEW_LINE> return metrics.get('clusterMetrics', {})
Model class that exposes YARN metrics stored in redis by a separate worker process.
625990897b180e01f3e49e53
class SomeString(SomeStringOrUnicode): <NEW_LINE> <INDENT> knowntype = str <NEW_LINE> def noneify(self): <NEW_LINE> <INDENT> return SomeString(can_be_None=True, no_nul=self.no_nul)
Stands for an object which is known to be a string.
625990894527f215b58eb78e
class Stage(object): <NEW_LINE> <INDENT> def __init__(self, size, layers, default_layer): <NEW_LINE> <INDENT> self.__size = size <NEW_LINE> self.__cam_at = None <NEW_LINE> self.__layers = {} <NEW_LINE> self.__spawns = {} <NEW_LINE> self.__layer_names = layers <NEW_LINE> self.__default_layer = default_layer <NEW_LINE> for layer in layers: <NEW_LINE> <INDENT> self.__layers[layer] = [] <NEW_LINE> self.__spawns[layer] = [] <NEW_LINE> <DEDENT> self.__dirty = set() <NEW_LINE> self.__death_observers = [] <NEW_LINE> logger.info('%dx%d Stage created', size[0], size[1]) <NEW_LINE> logger.info('%d layers created: %s', len(layers), layers) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for name in self.__layer_names: <NEW_LINE> <INDENT> layer = self.__layers[name] <NEW_LINE> for entity in layer: <NEW_LINE> <INDENT> yield entity <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self.__size <NEW_LINE> <DEDENT> def harvest_dead(self): <NEW_LINE> <INDENT> dead = {} <NEW_LINE> for name in self.__layers: <NEW_LINE> <INDENT> dead[name] = [] <NEW_LINE> for i, entity in enumerate(self.__layers[name]): <NEW_LINE> <INDENT> if entity.present().dead(): <NEW_LINE> <INDENT> dead[name].insert(0, i) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for name in dead: <NEW_LINE> <INDENT> for i in dead[name]: <NEW_LINE> <INDENT> corpse = self.__layers[name].pop(i) <NEW_LINE> self.__dirty.add(corpse) <NEW_LINE> for death_observer in self.__death_observers: <NEW_LINE> <INDENT> death_observer.tell_is_dead(corpse) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> logger.info('Dead harvested') <NEW_LINE> <DEDENT> def add_spawn(self, entity, layer=None): <NEW_LINE> <INDENT> if layer is None: <NEW_LINE> <INDENT> layer = self.__default_layer <NEW_LINE> <DEDENT> if layer not in self.__layer_names: <NEW_LINE> <INDENT> raise ValueError('Non-existent layer name') <NEW_LINE> <DEDENT> self.__spawns[layer].append(entity) <NEW_LINE> logging.debug( '%s will be spawned in %s the next physics step', entity, layer) <NEW_LINE> <DEDENT> def spawn(self): <NEW_LINE> <INDENT> for name in self.__layer_names: <NEW_LINE> <INDENT> for spawn in self.__spawns[name]: <NEW_LINE> <INDENT> logger.debug('Spawning %s in %s', spawn, name) <NEW_LINE> <DEDENT> self.__layers[name].extend(self.__spawns[name]) <NEW_LINE> self.__spawns[name] = [] <NEW_LINE> <DEDENT> <DEDENT> def add_death_observer(self, observer): <NEW_LINE> <INDENT> self.__death_observers.append(observer) <NEW_LINE> logger.info('%s now observes the deaths in %s', observer, self)
Stage((width, height), layers, default_layer) -> a new Stage Stages store Entities in layers. Layers are all named. Layers and the Entities in them have a set order.
62599089283ffb24f3cf547d
class Arg: <NEW_LINE> <INDENT> NO_DEFAULT = object() <NEW_LINE> ARG_TYPE_FIXTURE = "F" <NEW_LINE> ARG_TYPE_OPTION = "O" <NEW_LINE> ARG_TYPE_POSITIONAL = 'P' <NEW_LINE> ARG_TYPE_TRAIL = "T" <NEW_LINE> def __init__(self, name, arg_type, default=NO_DEFAULT): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.arg_type = arg_type <NEW_LINE> self.default = default <NEW_LINE> self.description = '(no description)' <NEW_LINE> self.short_description = '(no description)' <NEW_LINE> if arg_type == self.ARG_TYPE_OPTION: <NEW_LINE> <INDENT> self.argparse_name = name.replace('_', '-') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.argparse_name = name <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.arg_type == self.ARG_TYPE_TRAIL: <NEW_LINE> <INDENT> return '*' + self.argparse_name <NEW_LINE> <DEDENT> elif any(map(lambda x: self.default is x, (Command.Arg.NO_DEFAULT, True, False))): <NEW_LINE> <INDENT> return self.argparse_name <NEW_LINE> <DEDENT> elif self.default is None: <NEW_LINE> <INDENT> return '%s=VALUE' % self.argparse_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '%s=%s' % (self.argparse_name, self.default) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Arg %s>' % self.__str__() <NEW_LINE> <DEDENT> def ConfigureArgumentParser(self, parser): <NEW_LINE> <INDENT> if self.arg_type == self.ARG_TYPE_FIXTURE: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.arg_type == self.ARG_TYPE_TRAIL: <NEW_LINE> <INDENT> parser.add_argument(self.argparse_name, nargs='*') <NEW_LINE> return <NEW_LINE> <DEDENT> if self.arg_type == self.ARG_TYPE_OPTION: <NEW_LINE> <INDENT> if isinstance(self.default, bool): <NEW_LINE> <INDENT> assert self.default is not True, 'Can\'t handle boolean options with default=True' <NEW_LINE> parser.add_argument('--%s' % self.argparse_name, action='store_true', default=self.default) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parser.add_argument('--%s' % self.argparse_name, default=self.default) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> if self.arg_type is self.ARG_TYPE_POSITIONAL: <NEW_LINE> <INDENT> if self.default is self.NO_DEFAULT: <NEW_LINE> <INDENT> parser.add_argument(self.argparse_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parser.add_argument(self.argparse_name, nargs='?', default=self.default) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> raise TypeError('Unknown arg_type==%r' % self.arg_type)
Holds meta-information about the associated *function parameter*.
6259908963b5f9789fe86d47
class OrderFilter(django_filters.FilterSet): <NEW_LINE> <INDENT> start_date = DateFilter(field_name='date_created', lookup_expr='gte') <NEW_LINE> end_date = DateFilter(field_name='date_created', lookup_expr='lte') <NEW_LINE> description = CharFilter(field_name='description', lookup_expr='icontains') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Order <NEW_LINE> fields = '__all__' <NEW_LINE> exclude = ['date_created', 'customer']
create a class to filter the orders
625990894a966d76dd5f0ac4
class KvcacheSession(DictSession): <NEW_LINE> <INDENT> def _mark_dirty(self): <NEW_LINE> <INDENT> super()._mark_dirty() <NEW_LINE> if self._livedata: <NEW_LINE> <INDENT> self._store() <NEW_LINE> self._is_dirty = False <NEW_LINE> <DEDENT> <DEDENT> def _create_dict(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._container[self.id] <NEW_LINE> <DEDENT> except kvcache.NotFound: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> def _id_is_valid(self, id): <NEW_LINE> <INDENT> return id in self._container <NEW_LINE> <DEDENT> def _store(self): <NEW_LINE> <INDENT> self._container[self.id] = self._cached_dict <NEW_LINE> <DEDENT> def _revert(self): <NEW_LINE> <INDENT> self._cached_dict = None
Session backend that makes use of a configured :mod:`score.kvcache`.
62599089d486a94d0ba2db94
class YapfLinter(base.LinterBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(YapfLinter, self).__init__("yapf", "yapf 0.26.0") <NEW_LINE> <DEDENT> def get_lint_version_cmd_args(self): <NEW_LINE> <INDENT> return ["--version"] <NEW_LINE> <DEDENT> def needs_file_diff(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def get_lint_cmd_args(self, file_name): <NEW_LINE> <INDENT> return [file_name] <NEW_LINE> <DEDENT> def get_fix_cmd_args(self, file_name): <NEW_LINE> <INDENT> return ["-i", file_name]
Yapf linter.
62599089d8ef3951e32c8c4d
class JSON: <NEW_LINE> <INDENT> def __init__(self, serializer=json.dumps, adapters=(), **kw): <NEW_LINE> <INDENT> self.serializer = serializer <NEW_LINE> self.kw = kw <NEW_LINE> self.components = Components() <NEW_LINE> for type, adapter in adapters: <NEW_LINE> <INDENT> self.add_adapter(type, adapter) <NEW_LINE> <DEDENT> <DEDENT> def add_adapter(self, type_or_iface, adapter): <NEW_LINE> <INDENT> self.components.registerAdapter( adapter, (type_or_iface,), IJSONAdapter ) <NEW_LINE> <DEDENT> def __call__(self, info): <NEW_LINE> <INDENT> def _render(value, system): <NEW_LINE> <INDENT> request = system.get('request') <NEW_LINE> if request is not None: <NEW_LINE> <INDENT> response = request.response <NEW_LINE> ct = response.content_type <NEW_LINE> if ct == response.default_content_type: <NEW_LINE> <INDENT> response.content_type = 'application/json' <NEW_LINE> <DEDENT> <DEDENT> default = self._make_default(request) <NEW_LINE> return self.serializer(value, default=default, **self.kw) <NEW_LINE> <DEDENT> return _render <NEW_LINE> <DEDENT> def _make_default(self, request): <NEW_LINE> <INDENT> def default(obj): <NEW_LINE> <INDENT> if hasattr(obj, '__json__'): <NEW_LINE> <INDENT> return obj.__json__(request) <NEW_LINE> <DEDENT> obj_iface = providedBy(obj) <NEW_LINE> adapters = self.components.adapters <NEW_LINE> result = adapters.lookup( (obj_iface,), IJSONAdapter, default=_marker ) <NEW_LINE> if result is _marker: <NEW_LINE> <INDENT> raise TypeError('%r is not JSON serializable' % (obj,)) <NEW_LINE> <DEDENT> return result(obj, request) <NEW_LINE> <DEDENT> return default
Renderer that returns a JSON-encoded string. Configure a custom JSON renderer using the :meth:`~pyramid.config.Configurator.add_renderer` API at application startup time: .. code-block:: python from pyramid.config import Configurator config = Configurator() config.add_renderer('myjson', JSON(indent=4)) Once this renderer is registered as above, you can use ``myjson`` as the ``renderer=`` parameter to ``@view_config`` or :meth:`~pyramid.config.Configurator.add_view`: .. code-block:: python from pyramid.view import view_config @view_config(renderer='myjson') def myview(request): return {'greeting':'Hello world'} Custom objects can be serialized using the renderer by either implementing the ``__json__`` magic method, or by registering adapters with the renderer. See :ref:`json_serializing_custom_objects` for more information. .. note:: The default serializer uses ``json.JSONEncoder``. A different serializer can be specified via the ``serializer`` argument. Custom serializers should accept the object, a callback ``default``, and any extra ``kw`` keyword arguments passed during renderer construction. This feature isn't widely used but it can be used to replace the stock JSON serializer with, say, simplejson. If all you want to do, however, is serialize custom objects, you should use the method explained in :ref:`json_serializing_custom_objects` instead of replacing the serializer. .. versionadded:: 1.4 Prior to this version, there was no public API for supplying options to the underlying serializer without defining a custom renderer.
62599089dc8b845886d55198
class Bucket(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.bucket = [] <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def execute(self, settings_file=None, output=os.path.abspath(__file__), ClsResult = BucketResult): <NEW_LINE> <INDENT> logging.info('Wykonuje zadanie: ' + self.name + '.') <NEW_LINE> bucket_result = ClsResult(self) <NEW_LINE> for i in self.bucket: <NEW_LINE> <INDENT> bucket_result.add_bucket_result(i.execute_task()) <NEW_LINE> <DEDENT> return bucket_result <NEW_LINE> <DEDENT> def add_task(self, func, times_to_exec=1, *args, **kwargs): <NEW_LINE> <INDENT> self.bucket += [Task(func, times_to_exec, *args, **kwargs)] <NEW_LINE> <DEDENT> def get_task(self, task_id): <NEW_LINE> <INDENT> if len(self.bucket) - 1 < task_id: <NEW_LINE> <INDENT> raise TaskNotFoundError(task_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.bucket[task_id] <NEW_LINE> <DEDENT> <DEDENT> def del_task(self, task_id): <NEW_LINE> <INDENT> if len(self.bucket) - 1 < task_id: <NEW_LINE> <INDENT> raise TaskNotFoundError(task_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> del self.bucket[task_id] <NEW_LINE> <DEDENT> <DEDENT> def list_bucket(self): <NEW_LINE> <INDENT> return_str = '[ id - function name - times to execute - args - kwargs ]\n' <NEW_LINE> for index, tsk in enumerate(self.bucket): <NEW_LINE> <INDENT> return_str += "[ " + str(index) + " - " + tsk._get_str_about() + " ]\n" <NEW_LINE> <DEDENT> return return_str[:-1]
Container for tasks to execute.
625990893346ee7daa338452
class Dexterity(Ability): <NEW_LINE> <INDENT> associated_skills = ['Ranged Combat:', 'Sleight of Hand', 'Vehicles'] <NEW_LINE> associated_defenses = [] <NEW_LINE> ability_name = "Dexterity"
Dexterity: Skill: Ranged Combat: *: 0.5 point/rank Skill: Sleight of Hand 0.5 ppr Skill: Vehicles 0.5 ppr TOTAL: 1.5 ppr
6259908950812a4eaa6219b5
class Menu(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=32,unique=True) <NEW_LINE> icon = models.CharField(max_length=32) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title
菜单表
6259908926068e7796d4e522
class Usuario(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, on_delete=models.CASCADE) <NEW_LINE> direccion = models.CharField(max_length=110, blank=True, null=True) <NEW_LINE> telefono = models.CharField(max_length=50, blank=True, null=True) <NEW_LINE> monto = models.DecimalField(max_digits=11, decimal_places=2, default=200) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Usuario' <NEW_LINE> verbose_name_plural = 'Usuarios' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.user.first_name
Model definition for Usuario.
6259908999fddb7c1ca63bcc
class LoggerHook(pecan.hooks.PecanHook): <NEW_LINE> <INDENT> def on_route(self, state): <NEW_LINE> <INDENT> state.request.start = time.time() <NEW_LINE> <DEDENT> def after(self, state): <NEW_LINE> <INDENT> delta = time.time() - state.request.start <NEW_LINE> LOG.info('%s "%s %s" status: %d time: %0.3f', state.request.client_addr, state.request.method, state.request.url, state.response.status_code, delta)
Print out requests in the log
62599089d486a94d0ba2db96
class ContainerServiceCustomProfile(Model): <NEW_LINE> <INDENT> _validation = { 'orchestrator': {'required': True}, } <NEW_LINE> _attribute_map = { 'orchestrator': {'key': 'orchestrator', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, orchestrator: str, **kwargs) -> None: <NEW_LINE> <INDENT> super(ContainerServiceCustomProfile, self).__init__(**kwargs) <NEW_LINE> self.orchestrator = orchestrator
Properties to configure a custom container service cluster. All required parameters must be populated in order to send to Azure. :param orchestrator: Required. The name of the custom orchestrator to use. :type orchestrator: str
62599089e1aae11d1e7cf604
class OrderBase(TimestableMixin, models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> __metaclass__ = DeferredModelMetaclass.for_point( Order, extend_meta( verbose_name=u'Заказ', verbose_name_plural=u'Заказы' ) ) <NEW_LINE> user_name = models.CharField(verbose_name=u'ФИО пользователя', max_length=300, blank=True) <NEW_LINE> user_email = models.EmailField(verbose_name=u'Email пользователя', blank=True, null=True) <NEW_LINE> user_phone = models.CharField(verbose_name=u'телефон пользователя', max_length=20) <NEW_LINE> delivery = DeferredForeignKey(Delivery, verbose_name=u'Способ доставки', blank=True, null=True) <NEW_LINE> payment = DeferredForeignKey(Payment, verbose_name=u'Способ оплаты', blank=True, null=True) <NEW_LINE> address = models.TextField(blank=True, verbose_name=u'Адрес') <NEW_LINE> delivery_time = models.CharField(verbose_name=u'Удобные дата и время для доставки', blank=True, max_length=100) <NEW_LINE> price = models.DecimalField(verbose_name=u'Общая стоимость заказа', default=Decimal(0.00), decimal_places=2, max_digits=10) <NEW_LINE> delivery_price = models.DecimalField(verbose_name=u'Стоимость доставки', default=Decimal(0.00), decimal_places=2, max_digits=10, null=True) <NEW_LINE> comment = models.TextField(blank=True, verbose_name=u'Комментарий к заказу') <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> if self.id: <NEW_LINE> <INDENT> return u"Заказ №%d" % (self.id) <NEW_LINE> <DEDENT> return u"Новый заказ"
Базовый класс для информации о заказе
625990897cff6e4e811b7623
class SyncMapInstance(InstanceResource): <NEW_LINE> <INDENT> def __init__(self, version, payload, service_sid, sid=None): <NEW_LINE> <INDENT> super(SyncMapInstance, self).__init__(version) <NEW_LINE> self._properties = { 'sid': payload['sid'], 'unique_name': payload['unique_name'], 'account_sid': payload['account_sid'], 'service_sid': payload['service_sid'], 'url': payload['url'], 'links': payload['links'], 'revision': payload['revision'], 'date_expires': deserialize.iso8601_datetime(payload['date_expires']), 'date_created': deserialize.iso8601_datetime(payload['date_created']), 'date_updated': deserialize.iso8601_datetime(payload['date_updated']), 'created_by': payload['created_by'], } <NEW_LINE> self._context = None <NEW_LINE> self._solution = {'service_sid': service_sid, 'sid': sid or self._properties['sid'], } <NEW_LINE> <DEDENT> @property <NEW_LINE> def _proxy(self): <NEW_LINE> <INDENT> if self._context is None: <NEW_LINE> <INDENT> self._context = SyncMapContext( self._version, service_sid=self._solution['service_sid'], sid=self._solution['sid'], ) <NEW_LINE> <DEDENT> return self._context <NEW_LINE> <DEDENT> @property <NEW_LINE> def sid(self): <NEW_LINE> <INDENT> return self._properties['sid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_name(self): <NEW_LINE> <INDENT> return self._properties['unique_name'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def account_sid(self): <NEW_LINE> <INDENT> return self._properties['account_sid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def service_sid(self): <NEW_LINE> <INDENT> return self._properties['service_sid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self._properties['url'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def links(self): <NEW_LINE> <INDENT> return self._properties['links'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def revision(self): <NEW_LINE> <INDENT> return self._properties['revision'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def date_expires(self): <NEW_LINE> <INDENT> return self._properties['date_expires'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def date_created(self): <NEW_LINE> <INDENT> return self._properties['date_created'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def date_updated(self): <NEW_LINE> <INDENT> return self._properties['date_updated'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def created_by(self): <NEW_LINE> <INDENT> return self._properties['created_by'] <NEW_LINE> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> return self._proxy.fetch() <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> return self._proxy.delete() <NEW_LINE> <DEDENT> def update(self, ttl=values.unset): <NEW_LINE> <INDENT> return self._proxy.update(ttl=ttl, ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def sync_map_items(self): <NEW_LINE> <INDENT> return self._proxy.sync_map_items <NEW_LINE> <DEDENT> @property <NEW_LINE> def sync_map_permissions(self): <NEW_LINE> <INDENT> return self._proxy.sync_map_permissions <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items()) <NEW_LINE> return '<Twilio.Sync.V1.SyncMapInstance {}>'.format(context)
PLEASE NOTE that this class contains beta products that are subject to change. Use them with caution.
62599089283ffb24f3cf5481
class InvalidAuth(Exception): <NEW_LINE> <INDENT> pass
Raised when osu gives us an error login response. Don't get confused with `InvalidCredentials`, where osu don't even takes with us.
625990892c8b7c6e89bd53c8
class DemoException(Exception): <NEW_LINE> <INDENT> pass
An exception type for the demo.
625990894c3428357761be9c
class Libnet(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "https://www.example.com" <NEW_LINE> url = "https://github.com/libnet/libnet/archive/v1.2.tar.gz" <NEW_LINE> version('1.2', sha256='b7a371a337d242c017f3471d70bea2963596bec5bd3bd0e33e8517550e2311ef') <NEW_LINE> depends_on('libtool') <NEW_LINE> depends_on('automake') <NEW_LINE> depends_on('libseccomp') <NEW_LINE> def configure_args(self): <NEW_LINE> <INDENT> args = [] <NEW_LINE> return args
FIXME: Put a proper description of your package here.
62599089099cdd3c636761eb
class Sensor(sql.Base, abstract.Sensor): <NEW_LINE> <INDENT> __resource__ = 'sensors' <NEW_LINE> __tablename__ = 'sensor' <NEW_LINE> __table_args__ = sql.table_args() <NEW_LINE> id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True) <NEW_LINE> uuid = sqlalchemy.Column(sqlalchemy.String(36)) <NEW_LINE> project_id = sqlalchemy.Column(sqlalchemy.String(36)) <NEW_LINE> user_id = sqlalchemy.Column(sqlalchemy.String(36)) <NEW_LINE> name = sqlalchemy.Column(sqlalchemy.String(255)) <NEW_LINE> sensor_type = sqlalchemy.Column(sqlalchemy.String(255)) <NEW_LINE> value = sqlalchemy.Column(sqlalchemy.String(255)) <NEW_LINE> timestamp = sqlalchemy.Column(sqlalchemy.DateTime) <NEW_LINE> description = sqlalchemy.Column(sqlalchemy.String(255)) <NEW_LINE> documentation = sqlalchemy.Column(sqlalchemy.String(255)) <NEW_LINE> target_resource = sqlalchemy.Column(sqlalchemy.String(255))
Represent an sensor in sqlalchemy.
62599089091ae35668706825
@loader.tds <NEW_LINE> class CONTACTMod(loader.Module): <NEW_LINE> <INDENT> strings = {"name": "contact"} <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.name = self.strings["name"] <NEW_LINE> <DEDENT> def config_complete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> async def contactcmd(self, message): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> await message.delete() <NEW_LINE> x = 10 <NEW_LINE> lst = str(x) <NEW_LINE> await message.respond(lst) <NEW_LINE> dd = time.time() <NEW_LINE> while time.time() - dd < x: <NEW_LINE> <INDENT> now = str(x - round(time.time() - dd)) <NEW_LINE> if now != lst: <NEW_LINE> <INDENT> await message.respond(now) <NEW_LINE> <DEDENT> lst = now <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> await message.respond("Упс, ошибочка вышла! Напшите @gerasikoff, он вам поможет")
Это модуль для игры в "контакт"
62599089be7bc26dc9252c47
class TestLexer(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.f = io.StringIO() <NEW_LINE> self.lines = ['First 1 "string"\n', 'Second line\n', 'Third line\n'] <NEW_LINE> self.f.write(''.join(self.lines)) <NEW_LINE> self.f.seek(0) <NEW_LINE> self.lineReader = LineReader(self.f) <NEW_LINE> self.lexer = Lexer(self.lineReader) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_read(self): <NEW_LINE> <INDENT> token = self.lexer.read() <NEW_LINE> self.assertEqual(isinstance(token, IdToken), True) <NEW_LINE> self.assertEqual('First', token.getText()) <NEW_LINE> token = self.lexer.read() <NEW_LINE> self.assertEqual(isinstance(token, NumToken), True) <NEW_LINE> self.assertEqual('1', token.getText()) <NEW_LINE> token = self.lexer.read() <NEW_LINE> self.assertEqual(isinstance(token, StrToken), True) <NEW_LINE> self.assertEqual('"string', token.getText()) <NEW_LINE> token = self.lexer.read() <NEW_LINE> self.assertEqual(isinstance(token, IdToken), True) <NEW_LINE> self.assertEqual('\\n', token.getText()) <NEW_LINE> <DEDENT> def test_peek(self): <NEW_LINE> <INDENT> token = self.lexer.peek(0) <NEW_LINE> self.assertEqual(isinstance(token, IdToken), True) <NEW_LINE> self.assertEqual('First', token.getText()) <NEW_LINE> token = self.lexer.peek(1) <NEW_LINE> self.assertEqual(isinstance(token, NumToken), True) <NEW_LINE> self.assertEqual('1', token.getText()) <NEW_LINE> token = self.lexer.peek(2) <NEW_LINE> self.assertEqual(isinstance(token, StrToken), True) <NEW_LINE> self.assertEqual('"string', token.getText()) <NEW_LINE> token = self.lexer.peek(3) <NEW_LINE> self.assertEqual(isinstance(token, IdToken), True) <NEW_LINE> self.assertEqual('\\n', token.getText()) <NEW_LINE> token = self.lexer.read() <NEW_LINE> self.assertEqual(isinstance(token, IdToken), True) <NEW_LINE> self.assertEqual('First', token.getText())
Test case docstring.
62599089aad79263cf43039d
class OutputsWriter(object): <NEW_LINE> <INDENT> def __init__(self, options): <NEW_LINE> <INDENT> self._writers = {k: None for k in ['candidates', 'examples', 'gvcfs']} <NEW_LINE> if options.candidates_filename: <NEW_LINE> <INDENT> self._add_writer('candidates', io_utils.RawProtoWriterAdaptor( io_utils.make_tfrecord_writer( options.candidates_filename))) <NEW_LINE> <DEDENT> if options.examples_filename: <NEW_LINE> <INDENT> self._add_writer('examples', io_utils.RawProtoWriterAdaptor( io_utils.make_tfrecord_writer( options.examples_filename))) <NEW_LINE> <DEDENT> if options.gvcf_filename: <NEW_LINE> <INDENT> self._add_writer('gvcfs', io_utils.RawProtoWriterAdaptor( io_utils.make_tfrecord_writer( options.gvcf_filename))) <NEW_LINE> <DEDENT> <DEDENT> def write_examples(self, *examples): <NEW_LINE> <INDENT> self._write('examples', *examples) <NEW_LINE> <DEDENT> def write_gvcfs(self, *gvcfs): <NEW_LINE> <INDENT> self._write('gvcfs', *gvcfs) <NEW_LINE> <DEDENT> def write_candidates(self, *candidates): <NEW_LINE> <INDENT> self._write('candidates', *candidates) <NEW_LINE> <DEDENT> def _add_writer(self, name, writer): <NEW_LINE> <INDENT> if name not in self._writers: <NEW_LINE> <INDENT> raise ValueError( 'Expected writer {} to have a None binding in writers.'.format(name)) <NEW_LINE> <DEDENT> if self._writers[name] is not None: <NEW_LINE> <INDENT> raise ValueError('Expected writer {} to be bound to None in writers but ' 'saw {} instead'.format(name, self._writers[name])) <NEW_LINE> <DEDENT> self._writers[name] = writer <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> for writer in self._writers.itervalues(): <NEW_LINE> <INDENT> if writer is not None: <NEW_LINE> <INDENT> writer.__enter__() <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exception_type, exception_value, traceback): <NEW_LINE> <INDENT> for writer in self._writers.itervalues(): <NEW_LINE> <INDENT> if writer is not None: <NEW_LINE> <INDENT> writer.__exit__(exception_type, exception_value, traceback) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _write(self, writer_name, *protos): <NEW_LINE> <INDENT> writer = self._writers[writer_name] <NEW_LINE> if writer: <NEW_LINE> <INDENT> for proto in protos: <NEW_LINE> <INDENT> writer.write(proto)
Manages all of the outputs of make_examples in a single place.
62599089adb09d7d5dc0c13d
class TimeEvent(): <NEW_LINE> <INDENT> def __init__(self, signame): <NEW_LINE> <INDENT> self.signal = Signal.register(signame) <NEW_LINE> self.value = None <NEW_LINE> <DEDENT> def is_periodic(self): <NEW_LINE> <INDENT> return self.interval > 0 <NEW_LINE> <DEDENT> def post_at(self, act, abs_time): <NEW_LINE> <INDENT> assert isinstance(act, Ahsm) <NEW_LINE> self.act = act <NEW_LINE> self.interval = 0 <NEW_LINE> Framework.add_time_event_at(self, abs_time) <NEW_LINE> <DEDENT> def post_in(self, act, delta): <NEW_LINE> <INDENT> assert isinstance(act, Ahsm) <NEW_LINE> self.act = act <NEW_LINE> self.interval = 0 <NEW_LINE> Framework.add_time_event(self, delta) <NEW_LINE> <DEDENT> def post_every(self, act, delta): <NEW_LINE> <INDENT> assert isinstance(act, Ahsm) <NEW_LINE> self.act = act <NEW_LINE> self.interval = delta <NEW_LINE> Framework.add_time_event(self, delta) <NEW_LINE> <DEDENT> def disarm(self): <NEW_LINE> <INDENT> self.act = None <NEW_LINE> self.interval = 0 <NEW_LINE> Framework.remove_time_event(self)
TimeEvent is a composite class that contains Event-like fields. A TimeEvent is created by the application and added to the Framework. The Framework then posts the event to the HSM after the given delay. A one-shot TimeEvent is created by calling either post_at() or post_in(). A periodic TimeEvent is created by calling the post_every() method.
62599089283ffb24f3cf5483
class ListaMatriculasAluno(generics.ListAPIView): <NEW_LINE> <INDENT> def get_queryset(self): <NEW_LINE> <INDENT> queryset = Matricula.objects.filter(aluno_id=self.kwargs['pk']) <NEW_LINE> return queryset <NEW_LINE> <DEDENT> serializer_class = ListaMatriculasAlunoSerializer <NEW_LINE> authentication_classes = [BasicAuthentication] <NEW_LINE> permission_classes = [IsAuthenticated]
Listando as matriculas de um aluno ou aluna
625990894c3428357761be9e
class handles(object): <NEW_LINE> <INDENT> def __init__(self, *nodetypes): <NEW_LINE> <INDENT> if len(nodetypes) == 1 and isinstance(nodetypes[0], collections.Iterable): <NEW_LINE> <INDENT> nodetypes = nodetypes[0] <NEW_LINE> <DEDENT> self.nodetypes = list(nodetypes) <NEW_LINE> <DEDENT> def __call__(self, func): <NEW_LINE> <INDENT> nodetypes = self.nodetypes <NEW_LINE> if hasattr(func, "nodetypes"): <NEW_LINE> <INDENT> nodetypes = func.nodetypes + nodetypes <NEW_LINE> <DEDENT> func.nodetypes = nodetypes <NEW_LINE> return func
Decorator for walker functions. Use it by specifying the nodetypes that need to be handled by the given function. It is possible to use groupd (e.g., op.RELATIONS) directly. :: @handles(op.NODE, ...) def walk_special(...): ...
62599089a8370b77170f1fb1
class CaseLikeObject: <NEW_LINE> <INDENT> def filter_available_review_types(self, milestones: dict, reviews: QuerySet) -> list: <NEW_LINE> <INDENT> now = datetime.date.today() <NEW_LINE> available_reviews = [] <NEW_LINE> for review_type in reviews: <NEW_LINE> <INDENT> status = "ok" <NEW_LINE> review_dict = review_type.to_dict() <NEW_LINE> criteria = review_type.meta.get("criteria", []) <NEW_LINE> start_date = None <NEW_LINE> end_date = None <NEW_LINE> for test in criteria: <NEW_LINE> <INDENT> criterion = test["criterion"] <NEW_LINE> if criterion in ["before", "after"]: <NEW_LINE> <INDENT> duration_unit = test["unit"] <NEW_LINE> duration_value = test["value"] <NEW_LINE> offset = relativedelta(**{duration_unit: duration_value}) <NEW_LINE> milestone = test["milestone"] <NEW_LINE> if milestone not in milestones: <NEW_LINE> <INDENT> status = "milestone_missing" <NEW_LINE> break <NEW_LINE> <DEDENT> rel_date = milestones[milestone] + offset <NEW_LINE> if criterion == "after": <NEW_LINE> <INDENT> start_date = ( rel_date if not start_date or (rel_date > start_date) else start_date ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> end_date = rel_date if not end_date or (rel_date < end_date) else end_date <NEW_LINE> <DEDENT> <DEDENT> elif criterion == "parent_case_types": <NEW_LINE> <INDENT> acronym = self.type.acronym <NEW_LINE> if acronym not in test.get("value", []): <NEW_LINE> <INDENT> status = "invalid_case_type" <NEW_LINE> <DEDENT> <DEDENT> elif criterion == "state_value": <NEW_LINE> <INDENT> state_value = self.get_state_key(key=test["key"]) <NEW_LINE> if state_value != "pass" and ( not state_value or state_value.value != test["value"] ): <NEW_LINE> <INDENT> status = "invalid_case_type" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if status == "ok": <NEW_LINE> <INDENT> if start_date and now < start_date: <NEW_LINE> <INDENT> status = "before_start" <NEW_LINE> <DEDENT> if end_date and now > end_date: <NEW_LINE> <INDENT> status = "after_end" <NEW_LINE> <DEDENT> review_dict["dates"] = { "start": start_date.strftime(settings.API_DATETIME_FORMAT) if start_date else None, "end": end_date.strftime(settings.API_DATETIME_FORMAT) if end_date else None, "status": status, } <NEW_LINE> available_reviews.append(review_dict) <NEW_LINE> <DEDENT> <DEDENT> return available_reviews <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def available_case_review_types(self): <NEW_LINE> <INDENT> milestones = self.case_milestone_index() <NEW_LINE> reviews = self.get_reviews() <NEW_LINE> return self.filter_available_review_types(milestones, reviews) <NEW_LINE> <DEDENT> def case_milestone_index(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_state_key(self, key: str): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_reviews(self): <NEW_LINE> <INDENT> return CaseType.objects.filter(meta__review=True)
An object that has many of the same qualities as a Case but may not be one. Used to provide shared functionality to both Notice and Case models without duplicating code. Notices share many of the same qualities as a Case, but they do not exist on the TRS system as they were created before it was in use.
62599089099cdd3c636761ec
class FormValidator(FancyValidator): <NEW_LINE> <INDENT> validate_partial_form = False <NEW_LINE> validate_partial_python = None <NEW_LINE> validate_partial_other = None <NEW_LINE> def is_empty(self, value): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def field_is_empty(self, value): <NEW_LINE> <INDENT> return is_empty(value)
A FormValidator is something that can be chained with a Schema. Unlike normal chaining the FormValidator can validate forms that aren't entirely valid. The important method is .validate(), of course. It gets passed a dictionary of the (processed) values from the form. If you have .validate_partial_form set to True, then it will get the incomplete values as well -- check with the "in" operator if the form was able to process any particular field. Anyway, .validate() should return a string or a dictionary. If a string, it's an error message that applies to the whole form. If not, then it should be a dictionary of fieldName: errorMessage. The special key "form" is the error message for the form as a whole (i.e., a string is equivalent to {"form": string}). Returns None on no errors.
62599089be7bc26dc9252c48
class EnergyManagementSystemProgram(DataObject): <NEW_LINE> <INDENT> _schema = {'extensible-fields': OrderedDict([(u'program line 1', {'name': u'Program Line 1', 'pyname': u'program_line_1', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'})]), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'})]), 'format': None, 'group': u'Energy Management System', 'min-fields': 2, 'name': u'EnergyManagementSystem:Program', 'pyname': u'EnergyManagementSystemProgram', 'required-object': False, 'unique-object': False} <NEW_LINE> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self["Name"] <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, value=None): <NEW_LINE> <INDENT> self["Name"] = value <NEW_LINE> <DEDENT> def add_extensible(self, program_line_1=None, ): <NEW_LINE> <INDENT> vals = [] <NEW_LINE> program_line_1 = self.check_value("Program Line 1", program_line_1) <NEW_LINE> vals.append(program_line_1) <NEW_LINE> self._extdata.append(vals) <NEW_LINE> <DEDENT> @property <NEW_LINE> def extensibles(self): <NEW_LINE> <INDENT> return self._extdata <NEW_LINE> <DEDENT> @extensibles.setter <NEW_LINE> def extensibles(self, extensibles): <NEW_LINE> <INDENT> self._extdata = [] <NEW_LINE> for ext in extensibles: <NEW_LINE> <INDENT> self.add_extensible(*ext)
Corresponds to IDD object `EnergyManagementSystem:Program` This input defines an Erl program Each field after the name is a line of EMS Runtime Language
625990895fc7496912d4905d
class SearchWindow: <NEW_LINE> <INDENT> def __init__(self, input_win, relevant_win): <NEW_LINE> <INDENT> self._input_window = input_win <NEW_LINE> self._relevant_window = relevant_win <NEW_LINE> self._rmaxy, self._rmaxx = relevant_win.getmaxyx() <NEW_LINE> <DEDENT> def _fix(self): <NEW_LINE> <INDENT> self._input_window.border('|', '|', '-', '-', '+', '+', '+', '+') <NEW_LINE> self._relevant_window.border('|', '|', '-', '-', '+', '+', '+', '+') <NEW_LINE> self._refresh() <NEW_LINE> <DEDENT> def _refresh(self): <NEW_LINE> <INDENT> self._input_window.refresh() <NEW_LINE> self._relevant_window.refresh() <NEW_LINE> <DEDENT> def max_word_width(self): <NEW_LINE> <INDENT> return self._rmaxx - 4 <NEW_LINE> <DEDENT> def recover(self): <NEW_LINE> <INDENT> self._input_window.touchwin() <NEW_LINE> self._relevant_window.touchwin() <NEW_LINE> self._refresh() <NEW_LINE> <DEDENT> def show_input_word(self, word): <NEW_LINE> <INDENT> self._input_window.clear() <NEW_LINE> self._input_window.addstr(1, 1, word) <NEW_LINE> self._fix() <NEW_LINE> <DEDENT> def show_relevant(self, relevant, highlight_index): <NEW_LINE> <INDENT> self._relevant_window.clear() <NEW_LINE> word_y = 1 <NEW_LINE> max_index = 0 <NEW_LINE> for word in relevant: <NEW_LINE> <INDENT> if word_y > self._rmaxy - 2: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if max_index == highlight_index: <NEW_LINE> <INDENT> self._relevant_window.addstr(word_y, 1, word, curses.color_pair(1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._relevant_window.addstr(word_y, 1, word) <NEW_LINE> <DEDENT> word_y += 1 <NEW_LINE> max_index += 1 <NEW_LINE> <DEDENT> self._fix() <NEW_LINE> return max_index - 1
进行单词查询时左侧的窗口
625990894527f215b58eb792
class PubSubmissionFormPreview(FormPreview): <NEW_LINE> <INDENT> FormPreview.form_template = "add_publication.html" <NEW_LINE> FormPreview.preview_template = "add_publication_preview.html" <NEW_LINE> def process_preview(self, request, form, context): <NEW_LINE> <INDENT> cd = form.cleaned_data <NEW_LINE> if 'pmid' in cd: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> p = Publication.objects.get(pmid=cd['pmid']) <NEW_LINE> <DEDENT> except Publication.DoesNotExist: <NEW_LINE> <INDENT> pubrec = base.bioutils.get_pubmed(cd['pmid']) <NEW_LINE> p = create_object.make_pub(pubrec, cd) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pubrec = dict(Title=cd['title'], AuthorList=cd['authors'].split(','), FullJournalName=cd['journal'], PubDate=cd['publication_date'], Volume=cd['volume'], Issue=cd['issue'], Pages=cd['pages']) <NEW_LINE> p = create_object.make_pub(pubrec, cd) <NEW_LINE> <DEDENT> context["pub"] = p <NEW_LINE> session_utils.put(request.session, "publication", p) <NEW_LINE> <DEDENT> def done(self, request, cleaned_data): <NEW_LINE> <INDENT> msg = "The paper was successfully submitted to be curated later." <NEW_LINE> p = session_utils.get(request.session, "publication") <NEW_LINE> if "assignment" in request.POST: <NEW_LINE> <INDENT> curator,_ = Curator.objects.get_or_create(user=request.user) <NEW_LINE> p.assigned_to = curator <NEW_LINE> <DEDENT> if "contains_no_data" in request.POST: <NEW_LINE> <INDENT> note = " \nPaper has no TF-binding site data." <NEW_LINE> p.submission_notes += note <NEW_LINE> p.curation_complete = True <NEW_LINE> msg = """The paper was marked as complete, since it does not have data.""" <NEW_LINE> <DEDENT> p.save() <NEW_LINE> messages.success(request, msg) <NEW_LINE> return HttpResponseRedirect(reverse(base.views.home))
Form preview view for publication submission.
62599089283ffb24f3cf5486
class TestData2(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testData2(self): <NEW_LINE> <INDENT> pass
Data2 unit test stubs
62599089ec188e330fdfa493
class QueryNetworkError(Exception): <NEW_LINE> <INDENT> pass
Exception thrown when the socket connection fails.
62599089e1aae11d1e7cf607
class NonNull(Structure): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(NonNull, self).__init__(*args, **kwargs) <NEW_LINE> assert not isinstance(self._of_type, NonNull), ( "Can only create NonNull of a Nullable GraphQLType but got: {}." ).format(self._of_type) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{}!".format(self.of_type) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, NonNull) and ( self.of_type == other.of_type and self.args == other.args and self.kwargs == other.kwargs )
Non-Null Modifier A non-null is a kind of type marker, a wrapping type which points to another type. Non-null types enforce that their values are never null and can ensure an error is raised if this ever occurs during a request. It is useful for fields which you can make a strong guarantee on non-nullability, for example usually the id field of a database row will never be null. Note: the enforcement of non-nullability occurs within the executor.
625990893617ad0b5ee07d38
class Solution: <NEW_LINE> <INDENT> def uniquePaths(self, m, n): <NEW_LINE> <INDENT> dp = [[0] *(n) for i in range(m)] <NEW_LINE> for i in range(m): <NEW_LINE> <INDENT> for j in range(n): <NEW_LINE> <INDENT> if i == 0 or j == 0: <NEW_LINE> <INDENT> dp[i][j] = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dp[i][j] = dp[i-1][j] + dp[i][j-1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dp[m-1][n-1]
@param m: positive integer (1 <= m <= 100) @param n: positive integer (1 <= n <= 100) @return: An integer
6259908997e22403b383cade
class TestWrapper: <NEW_LINE> <INDENT> def __init__(self, test, sconf): <NEW_LINE> <INDENT> self.test = test <NEW_LINE> self.sconf = sconf <NEW_LINE> <DEDENT> def __call__(self, *args, **kw): <NEW_LINE> <INDENT> if not self.sconf.active: <NEW_LINE> <INDENT> raise SCons.Errors.UserError <NEW_LINE> <DEDENT> context = CheckContext(self.sconf) <NEW_LINE> ret = self.test(*(context,) + args, **kw) <NEW_LINE> if self.sconf.config_h is not None: <NEW_LINE> <INDENT> self.sconf.config_h_text = self.sconf.config_h_text + context.config_h <NEW_LINE> <DEDENT> context.Result("error: no result") <NEW_LINE> return ret
A wrapper around Tests (to ensure sanity)
625990897cff6e4e811b7629
class PostListAPIView(ListAPIView): <NEW_LINE> <INDENT> authentication_classes = (TokenAuthentication,) <NEW_LINE> permission_classes = [IsAuthenticated, ] <NEW_LINE> queryset = Post.objects.all() <NEW_LINE> serializer_class = PostSerializer <NEW_LINE> filter_backends = [filters.DjangoFilterBackend] <NEW_LINE> filterset_class = DateRangeFilter
All post , all likes, likes, dislike
6259908960cbc95b06365b5f
class DummyConverter(conversion.MySQLConverter): <NEW_LINE> <INDENT> ...
A dummy MySQL converter class that doesn't implement any conversion.
625990897047854f46340f9c
class LyLyricElement(LyObject): <NEW_LINE> <INDENT> def __init__(self, lyMarkupOrString=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.lyMarkupOrString = lyMarkupOrString <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<%s.%s object %r>' % (self.__module__, self.__class__.__name__, self.lyMarkupOrString) <NEW_LINE> <DEDENT> def stringOutput(self): <NEW_LINE> <INDENT> return str(self.lyMarkupOrString) + " "
Object represents a single Lyric in lilypond. >>> lle = lily.lilyObjects.LyLyricElement("hel_") >>> lle <music21.lily.lilyObjects.LyLyricElement object 'hel_'> >>> print(lle) hel_
62599089fff4ab517ebcf3fe
@abstract <NEW_LINE> class Transport(EnergyAsset): <NEW_LINE> <INDENT> capacity = EAttribute(eType=EDouble, unique=True, derived=False, changeable=True) <NEW_LINE> efficiency = EAttribute(eType=EDouble, unique=True, derived=False, changeable=True) <NEW_LINE> def __init__(self, *, capacity=None, efficiency=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> if capacity is not None: <NEW_LINE> <INDENT> self.capacity = capacity <NEW_LINE> <DEDENT> if efficiency is not None: <NEW_LINE> <INDENT> self.efficiency = efficiency
An abstract class that describes EnergyAssets that can transport energy. It is one of the 5 capabilities in ESDL
62599089aad79263cf4303a2
class StructEq(object): <NEW_LINE> <INDENT> NONEQ_ATTRS = frozenset() <NEW_LINE> def __eq__(self, other): <NEW_LINE> <INDENT> if self is other: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if type(self) != type(other): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if len(self.__dict__) != len(other.__dict__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> keys = ((frozenset(self.__dict__.iterkeys()) | frozenset(other.__dict__.iterkeys())) - self.NONEQ_ATTRS) <NEW_LINE> for key in keys: <NEW_LINE> <INDENT> left_elt = self.__dict__.get(key) <NEW_LINE> right_elt = other.__dict__.get(key) <NEW_LINE> if not (left_elt == right_elt): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(frozenset(self.__dict__.iteritems()))
A simple mixin that defines equality based on the objects attributes. This class is especially useful if you're in a situation where one object might not have all the attributes of the other, and your __eq__ method would otherwise have to remember to deal with that. Classes extending StructEq should only be used in hash tables if all of the class members are also hashable. Also, classes extending StructEq should not create a cyclic graph where all nodes in the cycle extend StructEq, or there will be an infinite loop. Cycles are allowed, but objects creating cycles should have their own __eq__ methods that prevent the infinite loop. To designate certain attributes that shouldn't be checked for equality, override the class level variable NONEQ_ATTRS with the set of attrs you don't want to check.
62599089aad79263cf4303a3
class SUPGPStabilizationTerm( Term ): <NEW_LINE> <INDENT> name = 'dw_st_supg_p' <NEW_LINE> arg_types = ('material', 'virtual', 'parameter', 'state') <NEW_LINE> function = staticmethod(terms.dw_st_supg_p) <NEW_LINE> def __call__( self, diff_var = None, chunk_size = None, **kwargs ): <NEW_LINE> <INDENT> delta, virtual, par, state = self.get_args( **kwargs ) <NEW_LINE> apr, vgr = self.get_approximation(virtual) <NEW_LINE> apc, vgc = self.get_approximation(state) <NEW_LINE> n_el, n_qp, dim, n_epr = apr.get_v_data_shape(self.integral) <NEW_LINE> if diff_var is None: <NEW_LINE> <INDENT> shape = (chunk_size, 1, dim * n_epr, 1 ) <NEW_LINE> mode = 0 <NEW_LINE> <DEDENT> elif diff_var == self.get_arg_name( 'state' ): <NEW_LINE> <INDENT> n_epc = apc.get_v_data_shape(self.integral)[3] <NEW_LINE> shape = (chunk_size, 1, dim * n_epr, n_epc ) <NEW_LINE> mode = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> vec1 = par() <NEW_LINE> vec2 = state() <NEW_LINE> bf = apr.get_base('v', 0, self.integral) <NEW_LINE> for out, chunk in self.char_fun( chunk_size, shape ): <NEW_LINE> <INDENT> status = self.function( out, vec1, 0, vec2, 0, delta, bf, vgr, vgc, apr.econn, apc.econn, chunk, mode ) <NEW_LINE> yield out, chunk, status
:Description: SUPG stabilization term, pressure part ( :math:`\delta` is a local stabilization parameter). :Definition: .. math:: \sum_{K \in \Ical_h}\int_{T_K} \delta_K\ \nabla p\cdot ((\ul{b} \cdot \nabla) \ul{v}) :Arguments: material : :math:`\delta_K`, virtual : :math:`\ul{v}`, parameter : :math:`\ul{b}`, state : :math:`p`
6259908923849d37ff852ca3
class CollectionResourceTest(TestBase): <NEW_LINE> <INDENT> def test_all_args(self): <NEW_LINE> <INDENT> resource = "RESOURCE" <NEW_LINE> date = datetime.date(1962, 1, 13) <NEW_LINE> user_id = "bilbo" <NEW_LINE> data = { 'a': 1, 'b': 2} <NEW_LINE> expected_data = data.copy() <NEW_LINE> expected_data['date'] = date.strftime("%Y-%m-%d") <NEW_LINE> url = URLBASE + "/%s/%s.json" % (user_id, resource) <NEW_LINE> self.common_api_test('_COLLECTION_RESOURCE', (resource, date, user_id, data), {}, (url, expected_data), {}) <NEW_LINE> <DEDENT> def test_date_string(self): <NEW_LINE> <INDENT> resource = "RESOURCE" <NEW_LINE> date = "1962-1-13" <NEW_LINE> user_id = "bilbo" <NEW_LINE> data = { 'a': 1, 'b': 2} <NEW_LINE> expected_data = data.copy() <NEW_LINE> expected_data['date'] = date <NEW_LINE> url = URLBASE + "/%s/%s.json" % (user_id, resource) <NEW_LINE> self.common_api_test('_COLLECTION_RESOURCE',(resource, date, user_id, data), {}, (url, expected_data), {} ) <NEW_LINE> <DEDENT> def test_no_date(self): <NEW_LINE> <INDENT> resource = "RESOURCE" <NEW_LINE> user_id = "bilbo" <NEW_LINE> data = { 'a': 1, 'b': 2} <NEW_LINE> expected_data = data.copy() <NEW_LINE> expected_data['date'] = datetime.date.today().strftime("%Y-%m-%d") <NEW_LINE> url = URLBASE + "/%s/%s.json" % (user_id, resource) <NEW_LINE> self.common_api_test('_COLLECTION_RESOURCE', (resource, None, user_id, data), {}, (url, expected_data), {}) <NEW_LINE> <DEDENT> def test_no_userid(self): <NEW_LINE> <INDENT> resource = "RESOURCE" <NEW_LINE> date = datetime.date(1962, 1, 13) <NEW_LINE> user_id = None <NEW_LINE> data = { 'a': 1, 'b': 2} <NEW_LINE> expected_data = data.copy() <NEW_LINE> expected_data['date'] = date.strftime("%Y-%m-%d") <NEW_LINE> expected_user_id = "-" <NEW_LINE> url = URLBASE + "/%s/%s.json" % (expected_user_id, resource) <NEW_LINE> self.common_api_test('_COLLECTION_RESOURCE', (resource, date, user_id, data), {}, (url,expected_data), {}) <NEW_LINE> <DEDENT> def test_no_data(self): <NEW_LINE> <INDENT> resource = "RESOURCE" <NEW_LINE> date = datetime.date(1962, 1, 13) <NEW_LINE> user_id = "bilbo" <NEW_LINE> data = None <NEW_LINE> url = URLBASE + "/%s/%s/date/%s.json" % (user_id, resource, date) <NEW_LINE> self.common_api_test('_COLLECTION_RESOURCE', (resource,date,user_id,data), {}, (url,data), {}) <NEW_LINE> <DEDENT> def test_body(self): <NEW_LINE> <INDENT> with mock.patch('fitbit.api.Fitbit._COLLECTION_RESOURCE') as coll_resource: <NEW_LINE> <INDENT> coll_resource.return_value = 999 <NEW_LINE> fb = Fitbit(consumer_key='x', consumer_secret='y') <NEW_LINE> retval = fb.body(date=1, user_id=2, data=3) <NEW_LINE> <DEDENT> args, kwargs = coll_resource.call_args <NEW_LINE> self.assertEqual(('body',), args) <NEW_LINE> self.assertEqual({'date': 1, 'user_id': 2, 'data': 3}, kwargs) <NEW_LINE> self.assertEqual(999, retval)
Tests for _COLLECTION_RESOURCE
6259908997e22403b383cae0
class IpAddressScan(BaseWsModel): <NEW_LINE> <INDENT> started_at = models.DateTimeField(null=False) <NEW_LINE> ended_at = models.DateTimeField(null=True) <NEW_LINE> ip_address = models.ForeignKey( IpAddress, related_name="ip_address_scans", null=False, on_delete=models.CASCADE, )
This is a class for representing a scan of a single IP address.
625990894c3428357761bea4
class SumoSolver(ABC): <NEW_LINE> <INDENT> def __init__(self, graph: MultiplexNet, nbins: int, bin_size: int = None, rseed: int = None): <NEW_LINE> <INDENT> if rseed is not None: <NEW_LINE> <INDENT> np.random.seed(rseed) <NEW_LINE> seed(rseed) <NEW_LINE> <DEDENT> if not isinstance(graph, MultiplexNet): <NEW_LINE> <INDENT> raise ValueError("Unrecognized graph object") <NEW_LINE> <DEDENT> if bin_size is None: <NEW_LINE> <INDENT> bin_size = round(graph.sample_names.size * (1 - RUN_DEFAULTS['subsample'])) <NEW_LINE> <DEDENT> if nbins <= 0 or bin_size > graph.nodes: <NEW_LINE> <INDENT> raise ValueError("Incorrect number of bins or bin size") <NEW_LINE> <DEDENT> self.graph = graph <NEW_LINE> self.bin_size = bin_size <NEW_LINE> self.nbins = nbins <NEW_LINE> self.logger = get_logger() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def factorize(self, sparsity_penalty: float, k: int, max_iter: int, tol: float, calc_cost: int, logger_name: str, bin_id: int) -> SumoNMFResults: <NEW_LINE> <INDENT> raise NotImplementedError("Not implemented") <NEW_LINE> <DEDENT> def calculate_avg_adjacency(self) -> np.ndarray: <NEW_LINE> <INDENT> avg_adj = np.zeros((self.graph.nodes, self.graph.nodes)) <NEW_LINE> connections = self.graph.connections <NEW_LINE> connections[connections == 0] = np.nan <NEW_LINE> for a in self.graph.adj_matrices: <NEW_LINE> <INDENT> avg_adj = np.nansum(np.dstack((a, avg_adj)), 2) <NEW_LINE> <DEDENT> avg_adj = avg_adj / self.graph.connections <NEW_LINE> if np.sum(np.isnan(avg_adj)) > 0: <NEW_LINE> <INDENT> avg_adj = svdEM(avg_adj) <NEW_LINE> <DEDENT> return avg_adj <NEW_LINE> <DEDENT> def create_sample_bins(self) -> list: <NEW_LINE> <INDENT> if any([x is None for x in [self.graph, self.nbins, self.bin_size]]): <NEW_LINE> <INDENT> raise ValueError("Solver parameters not set!") <NEW_LINE> <DEDENT> sample_ids = list(range(self.graph.nodes)) <NEW_LINE> shuffle(sample_ids) <NEW_LINE> bins = [sample_ids[i::self.nbins] for i in range(self.nbins)] <NEW_LINE> for i in range(len(bins)): <NEW_LINE> <INDENT> ms = self.bin_size - len(bins[i]) <NEW_LINE> bins[i] = np.array(sorted(bins[i] + list( np.random.choice(list(set(sample_ids) - set(bins[i])), size=ms, replace=False)))) <NEW_LINE> <DEDENT> return bins
Defines solver of sumo Args: | graph (MultiplexNet): network object, containing data about connections between nodes in each layer in form of adjacency matrices | nbins (int): number of bins, to distribute samples into | bin_size (int): size of bin, if None set to number of samples
62599089f9cc0f698b1c60c0
class PatronOrf: <NEW_LINE> <INDENT> def __init__(self, clases_orf): <NEW_LINE> <INDENT> self.clases_orf = clases_orf <NEW_LINE> self.orf_patron_hydro = [] <NEW_LINE> self.orf_patron_protein = [] <NEW_LINE> self.id_patron_hydro = [] <NEW_LINE> self.id_patron_protein = [] <NEW_LINE> <DEDENT> def count_patron(self): <NEW_LINE> <INDENT> for key, value in self.clases_orf['ORFS'].items(): <NEW_LINE> <INDENT> if re.search(r'(?=(\b[a-zA-Z]{13}\b))', value['descripcion'], re.I): <NEW_LINE> <INDENT> if re.search('hydro', value['descripcion']): <NEW_LINE> <INDENT> self.id_patron_hydro.append(value['identificacion_clase']) <NEW_LINE> self.orf_patron_hydro.append(key) <NEW_LINE> <DEDENT> <DEDENT> if re.search(r'protein', value['descripcion'], re.I): <NEW_LINE> <INDENT> self.id_patron_protein.append(value['identificacion_clase']) <NEW_LINE> self.orf_patron_protein.append(key) <NEW_LINE> <DEDENT> <DEDENT> print('-------------------------------------') <NEW_LINE> print('Respuestas del apartado 2.1') <NEW_LINE> print('------------------------------------- \n\n', end='\n *') <NEW_LINE> print('El número de clases que contiene como mínimo un ORF') <NEW_LINE> print('con el patrón del término protein es {}'.format( len(set(self.id_patron_protein))), end='\n * ') <NEW_LINE> print('El número de clases que contiene como mínimo un ORF') <NEW_LINE> print('con el patrón del término hydro y 13 caracteres es {} \n\n'.format( len(set(self.id_patron_hydro)))) <NEW_LINE> return self.orf_patron_protein, self.orf_patron_hydro, self.id_patron_protein, self.id_patron_hydro
Clase para procesar la colección de clases funcionales
6259908926068e7796d4e52c
class VerifyEmailRequestSerializer(serializers.Serializer): <NEW_LINE> <INDENT> code = serializers.CharField()
Serializer for verifying if the request by the verify email API is valid
62599089fff4ab517ebcf400
class DBTable(Table): <NEW_LINE> <INDENT> def __init__(self, storage, cache_size=10): <NEW_LINE> <INDENT> self._storage = storage <NEW_LINE> self._query_cache = LRUCache(capacity=cache_size) <NEW_LINE> data = self._read() <NEW_LINE> if data: <NEW_LINE> <INDENT> self._last_id = max(i for i in data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._last_id = 0 <NEW_LINE> <DEDENT> <DEDENT> def _read(self): <NEW_LINE> <INDENT> return self._storage.read() <NEW_LINE> <DEDENT> def _write(self, values): <NEW_LINE> <INDENT> self._query_cache.clear() <NEW_LINE> self._storage.write(values)
Represents a single TinyDB Table.
62599089ad47b63b2c5a943d
class PruneEvaluators(object): <NEW_LINE> <INDENT> pass
PruneEvaluators answer the question (return true/false) Evaluation.INCLUDE_AND_PRUNE Evaluation.EXCLUDE_AND_PRUNE
6259908923849d37ff852ca5
class TruckTableWidget(QWidget): <NEW_LINE> <INDENT> def __init__(self, number_of_goods, type): <NEW_LINE> <INDENT> QWidget.__init__(self) <NEW_LINE> self.type = type <NEW_LINE> self.truckHLayout = QHBoxLayout(self) <NEW_LINE> self.number_of_goods = number_of_goods <NEW_LINE> self.goodTable = QTableWidget(1,number_of_goods,self) <NEW_LINE> self.truckHLayout.addWidget(self.goodTable,2) <NEW_LINE> self.update_table() <NEW_LINE> <DEDENT> def update_table(self): <NEW_LINE> <INDENT> self.goodTable.setColumnCount(self.number_of_goods) <NEW_LINE> self.goodTable.setSizePolicy(QSizePolicy.MinimumExpanding,QSizePolicy.MinimumExpanding) <NEW_LINE> if self.type == 'inbound': <NEW_LINE> <INDENT> self.goodTable.setVerticalHeaderLabels(['Coming']) <NEW_LINE> self.goodTable.setMaximumHeight(50) <NEW_LINE> <DEDENT> elif self.type == 'outbound': <NEW_LINE> <INDENT> self.goodTable.setVerticalHeaderLabels(['Going']) <NEW_LINE> self.goodTable.setMaximumHeight(52) <NEW_LINE> <DEDENT> elif self.type == 'compound': <NEW_LINE> <INDENT> self.goodTable.setRowCount(2) <NEW_LINE> self.goodTable.setVerticalHeaderLabels(['Coming', 'Going']) <NEW_LINE> self.goodTable.setMaximumHeight(88)
Truck Data Widget
625990897b180e01f3e49e5a
class AliasTest(RepositoryTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(AliasTest, self).setUp() <NEW_LINE> view = self.view <NEW_LINE> self.kind = view.findPath(self._KIND_KIND) <NEW_LINE> self.itemKind = view.findPath(self._ITEM_KIND) <NEW_LINE> self.attrKind = self.itemKind.itsParent['Attribute'] <NEW_LINE> self.newKind = self.kind.newItem('newKind', view) <NEW_LINE> self.typeKind = view.findPath('//Schema/Core/Type') <NEW_LINE> self.aliasKind = view.findPath('//Schema/Core/Alias') <NEW_LINE> self.alias = self.aliasKind.newItem('alias', view) <NEW_LINE> self.dateTimeType = view.findPath('//Schema/Core/DateTime') <NEW_LINE> self.alias.addValue('types',self.dateTimeType) <NEW_LINE> self.intType = view.findPath('//Schema/Core/Integer') <NEW_LINE> self.alias.addValue('types',self.intType) <NEW_LINE> self.dateTimeString = '2004-01-08 12:34:56-0800' <NEW_LINE> self.dateTime = datetime(2004, 1, 8, 12, 34, 56, tzinfo=view.tzinfo.getInstance('US/Pacific')) <NEW_LINE> <DEDENT> def testIsAlias(self): <NEW_LINE> <INDENT> self.assert_(self.alias.isAlias()) <NEW_LINE> self.assert_(not self.dateTimeType.isAlias()) <NEW_LINE> self.assert_(not self.intType.isAlias()) <NEW_LINE> <DEDENT> def testType(self): <NEW_LINE> <INDENT> self.assert_(self.alias.type(1.43) is None) <NEW_LINE> self.assert_(self.alias.type(2.4+8j) is None) <NEW_LINE> self.assert_(self.alias.type(True) is None) <NEW_LINE> self.assert_(self.alias.type(self.alias.itsUUID) is None) <NEW_LINE> self.assert_(self.alias.type(12) is not None) <NEW_LINE> self.assert_(self.alias.type(self.dateTime) is not None) <NEW_LINE> <DEDENT> def testRecognizes(self): <NEW_LINE> <INDENT> self.assert_(not self.alias.recognizes(1.43)) <NEW_LINE> self.assert_(not self.alias.recognizes(2.4+8j)) <NEW_LINE> self.assert_(not self.alias.recognizes(True)) <NEW_LINE> self.assert_(not self.alias.recognizes(self.alias.itsUUID)) <NEW_LINE> self.assert_(self.alias.recognizes(12)) <NEW_LINE> self.assert_(self.alias.recognizes(self.dateTime))
Test Aliases
62599089dc8b845886d551a4
class BlngYangTypeNotSupported(Exception): <NEW_LINE> <INDENT> def __init__(self, yang_type, path): <NEW_LINE> <INDENT> message = 'No support for the given YANG type: %s\nPath: %s' % (yang_type, path) <NEW_LINE> super().__init__(message)
Raised when we encounter a YANG type (not a typedef) which is not supported.
625990897c178a314d78e9df
class TestPlanGenerator(object): <NEW_LINE> <INDENT> def __init__(self, modules_list, ): <NEW_LINE> <INDENT> self.modules_list = modules_list <NEW_LINE> <DEDENT> def generate_test_plan_json(self): <NEW_LINE> <INDENT> test_plan_json = { TestPlanKeys.MODULES_LIST: [] } <NEW_LINE> import pdb; pdb.set_trace() <NEW_LINE> for module_name in self.modules_list: <NEW_LINE> <INDENT> module_dict = { TestPlanKeys.MODULE_NAME: module_name, TestPlanKeys.CLASSES_LIST: [] } <NEW_LINE> test_plan_json[TestPlanKeys.MODULES_LIST].append(module_dict) <NEW_LINE> for class_object in get_classes(module_name): <NEW_LINE> <INDENT> class_dict = { TestPlanKeys.CLASS_NAME: class_object.__name__, TestPlanKeys.TESTS_LIST: [] } <NEW_LINE> module_dict[TestPlanKeys.CLASSES_LIST].append(class_dict) <NEW_LINE> for test_method_name, test_method_object in inspect.getmembers(class_object): <NEW_LINE> <INDENT> if test_method_name.startswith('test_') is False or test_method_name == 'test_config_network_types': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> tags = [] <NEW_LINE> if TestPlanKeys.TAGS in dir(test_method_object): <NEW_LINE> <INDENT> tags = test_method_object.tags <NEW_LINE> <DEDENT> test_dict = { TestPlanKeys.TEST_NAME: test_method_name, TestPlanKeys.TEST_DOCSTRING: test_method_object.__doc__, TestPlanKeys.TAGS: [tag.name for tag in tags], } <NEW_LINE> class_dict[TestPlanKeys.TESTS_LIST].append(test_dict) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return test_plan_json
Generates an HTML-based test plan document
625990897047854f46340fa0
class ConnectionError(Error): <NEW_LINE> <INDENT> pass
Error raised when there is a problem connecting to the server.
625990894a966d76dd5f0ad2
class DistributionTimeout(RuntimeError): <NEW_LINE> <INDENT> pass
Raised when files timeout during file distribution.
62599089099cdd3c636761f0
class Manager(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.loggers = {} <NEW_LINE> <DEDENT> def getLogger(self, name): <NEW_LINE> <INDENT> if not isinstance(name, basestring): <NEW_LINE> <INDENT> raise TypeError("A logger name must be string or Unicode") <NEW_LINE> <DEDENT> if isinstance(name, unicode): <NEW_LINE> <INDENT> name = name.encode("utf-8") <NEW_LINE> <DEDENT> logger = self.loggers.get(name) <NEW_LINE> if logger is None: <NEW_LINE> <INDENT> logger = Logger(name) <NEW_LINE> self.loggers[name] = logger <NEW_LINE> <DEDENT> return logger
Simplified version of 'logging.Manager'.
625990897cff6e4e811b762f
class CallforpaperFileFactory(grok.Adapter): <NEW_LINE> <INDENT> grok.implements(IFileFactory) <NEW_LINE> grok.context(ICallforpaper) <NEW_LINE> def __call__(self, name, contentType, data): <NEW_LINE> <INDENT> talk = createObject('collective.conference.talk') <NEW_LINE> notify(ObjectCreatedEvent(talk)) <NEW_LINE> return talk
Custom file factory for programs, which always creates a Track.
62599089d8ef3951e32c8c54
class Agent: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def plan(self, state, occupancy_map, debug=False, is_last_plan=False): <NEW_LINE> <INDENT> raise NotImplementedError
Abstract base class for the various exploration agents. Must have a plan method, which takes the state (position of the robot), and the current map. plan should return path to next state and/or actions needed to get there
625990897b180e01f3e49e5b
class MyClass(models.Model): <NEW_LINE> <INDENT> _name = "inspection_tech.inspection_result.type" <NEW_LINE> name = fields.Char(required = True, size = 64, string = "Inspection Result", index = True, help = "Inspection Result Value", select = True) <NEW_LINE> orderno = fields.Integer("Order") <NEW_LINE> _order = 'orderno, id' <NEW_LINE> _sql_constraints = [ ('name_uniq', 'unique(name)', 'Inspection Result type must be unique!'), ]
Inspection Point Result Type
625990894527f215b58eb796
class MuonKerenFittingTest(systemtesting.MantidSystemTest): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> LoadMuonNexus(Filename='MUT00053591.nxs', DetectorGroupingTable='gp', OutputWorkspace='MUT53591') <NEW_LINE> MuonProcess(InputWorkspace='MUT53591', Mode='Combined', SummedPeriodSet='1', ApplyDeadTimeCorrection=False, DetectorGroupingTable='gp', LoadedTimeZero=0, TimeZero=0, Xmin=0.08, Xmax=10.0, OutputType="PairAsymmetry", PairFirstIndex="0", PairSecondIndex="1", Alpha=1.0, OutputWorkspace='processed') <NEW_LINE> func = "name=FlatBackground,A0=0.1;name=Keren,A=0.1,Delta=0.2,Field=18,Fluct=0.2" <NEW_LINE> Fit(InputWorkspace='processed', Function=func, Output='out', CreateOutput=True) <NEW_LINE> params = mtd['out_Parameters'] <NEW_LINE> Background = params.cell(0,1) <NEW_LINE> Initial = params.cell(1,1) <NEW_LINE> Delta = params.cell(2,1) <NEW_LINE> Field = params.cell(3,1) <NEW_LINE> Fluct = params.cell(4,1) <NEW_LINE> Chisq = params.cell(5,1) <NEW_LINE> self.assertTrue(Chisq < 1.1, "Fitted chi-square too large") <NEW_LINE> self.assertDelta(Background, 0.1623, 0.0046, "Fitted A0 outside errors") <NEW_LINE> self.assertDelta(Initial, 0.0389, 0.0040, "Fitted A outside errors") <NEW_LINE> self.assertDelta(Delta, 0.96, 0.11, "Fitted Delta outside errors") <NEW_LINE> self.assertDelta(Field, 20.0, 1.0, "Fitted Field outside errors") <NEW_LINE> self.assertDelta(Fluct, 0.1, 0.01, "Fitted Fluct outside errors")
Tests the Keren fitting function on a real workspace, to check results vs. WiMDA
62599089656771135c48ae27
class JNTTFactoryConfigCommon(JNTTFactoryCommon): <NEW_LINE> <INDENT> def test_021_value_entry_config(self, **kwargs): <NEW_LINE> <INDENT> node_uuid='test_node' <NEW_LINE> main_value = self.get_main_value(node_uuid=node_uuid, **kwargs) <NEW_LINE> print(main_value) <NEW_LINE> config_value = main_value.create_config_value() <NEW_LINE> print(config_value) <NEW_LINE> main_value.set_config(node_uuid, 0, '0') <NEW_LINE> self.assertEqual('0', main_value.get_config(node_uuid, 0)) <NEW_LINE> main_value.set_config(node_uuid, 0, '5') <NEW_LINE> self.assertEqual('5', main_value.get_config(node_uuid, 0))
Test the value factory
6259908926068e7796d4e530
class PollAnswerManager(core_managers.CoreStateManager): <NEW_LINE> <INDENT> def get_poll_percentages(self): <NEW_LINE> <INDENT> total_vote_counts = 0 <NEW_LINE> vote_count_averages = [] <NEW_LINE> poll_answers = self.permitted() <NEW_LINE> for poll_answer in poll_answers: <NEW_LINE> <INDENT> total_vote_counts += poll_answer.vote_count <NEW_LINE> <DEDENT> for poll_answer in poll_answers: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> vote_count_averages.append( ( poll_answer, float('%0.2f' % ( (poll_answer.vote_count / float(total_vote_counts)) * 100 ) ) ) ) <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> vote_count_averages.append(0.0) <NEW_LINE> <DEDENT> <DEDENT> return vote_count_averages
Return percentage count the options of a poll have had.
6259908ae1aae11d1e7cf60b
class Down(nn.Module): <NEW_LINE> <INDENT> def __init__( self, in_channels: int, out_channels: int, pool: t.Literal["max", "avg"] = "max" ) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.block = nn.Sequential( SENextBottleneck( in_channels=in_channels, out_channels=out_channels, stride=2, is_shortcut=True, pool=pool, ), SENextBottleneck( in_channels=out_channels, out_channels=out_channels, stride=1, is_shortcut=False, ), ) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return self.block(x)
Downscaling with maxpool then double conv
6259908aad47b63b2c5a9441
class Scoreboard: <NEW_LINE> <INDENT> def __init__(self, ai_settings, screen, stats): <NEW_LINE> <INDENT> self.screen = screen <NEW_LINE> self.screen_rect = screen.get_rect() <NEW_LINE> self.ai_settings = ai_settings <NEW_LINE> self.stats = stats <NEW_LINE> self.text_color = (190, 255, 70) <NEW_LINE> self.font = pygame.font.SysFont(None, 48) <NEW_LINE> self.prep_score() <NEW_LINE> self.prep_high_score() <NEW_LINE> self.prep_level() <NEW_LINE> self.prep_ships() <NEW_LINE> <DEDENT> def prep_score(self): <NEW_LINE> <INDENT> rounded_score = round(self.stats.score, -1) <NEW_LINE> score_str = "{:,}".format(rounded_score) <NEW_LINE> self.score_image = self.font.render(score_str, True, self.text_color, self.ai_settings.bg_color) <NEW_LINE> self.score_rect = self.score_image.get_rect() <NEW_LINE> self.score_rect.right = self.screen_rect.right - 20 <NEW_LINE> self.score_rect.top = 20 <NEW_LINE> <DEDENT> def prep_high_score(self): <NEW_LINE> <INDENT> high_score = round(self.stats.high_score, -1) <NEW_LINE> high_score_str = "{:,}".format(high_score) <NEW_LINE> self.high_score_image = self.font.render(high_score_str, True, self.text_color, self.ai_settings.bg_color) <NEW_LINE> self.high_score_rect = self.high_score_image.get_rect() <NEW_LINE> self.high_score_rect.centerx = self.screen_rect.centerx <NEW_LINE> self.high_score_rect.top = self.score_rect.top <NEW_LINE> <DEDENT> def prep_level(self): <NEW_LINE> <INDENT> self.level_image = self.font.render(str(self.stats.level), True, self.text_color, self.ai_settings.bg_color) <NEW_LINE> self.level_rect = self.level_image.get_rect() <NEW_LINE> self.level_rect.right = self.score_rect.right <NEW_LINE> self.level_rect.top = self.score_rect.bottom + 10 <NEW_LINE> <DEDENT> def prep_ships(self): <NEW_LINE> <INDENT> self.ships = Group() <NEW_LINE> for ship_number in range(self.stats.ships_left): <NEW_LINE> <INDENT> ship = Ship(self.ai_settings, self.screen) <NEW_LINE> ship.rect.x = 10 + ship_number * ship.rect.width <NEW_LINE> ship.rect.y = 10 <NEW_LINE> self.ships.add(ship) <NEW_LINE> <DEDENT> <DEDENT> def show_score(self): <NEW_LINE> <INDENT> self.screen.blit(self.score_image, self.score_rect) <NEW_LINE> self.screen.blit(self.high_score_image, self.high_score_rect) <NEW_LINE> self.screen.blit(self.level_image, self.level_rect) <NEW_LINE> self.ships.draw(self.screen)
A class to report scoring information.
6259908ad8ef3951e32c8c55
class Aircraft(GameObject.GameObject): <NEW_LINE> <INDENT> def __init__(self, location, owner, aircraft_id, unique_id, max_speed, initial_location, health): <NEW_LINE> <INDENT> GameObject.GameObject.__init__(self, location, owner, aircraft_id, unique_id) <NEW_LINE> self.__max_speed = max_speed <NEW_LINE> self.__initial_location = initial_location <NEW_LINE> self.__current_health = health <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_speed(self): <NEW_LINE> <INDENT> return self.__max_speed <NEW_LINE> <DEDENT> @max_speed.setter <NEW_LINE> def max_speed(self, value): <NEW_LINE> <INDENT> self.__max_speed = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def initial_location(self): <NEW_LINE> <INDENT> return self.__initial_location <NEW_LINE> <DEDENT> @initial_location.setter <NEW_LINE> def initial_location(self, value): <NEW_LINE> <INDENT> self.__initial_location = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def current_health(self): <NEW_LINE> <INDENT> return self.__current_health <NEW_LINE> <DEDENT> @current_health.setter <NEW_LINE> def current_health(self, value): <NEW_LINE> <INDENT> self.__current_health = value
This is a base class for all moving objects in the game
6259908a4527f215b58eb797
class Storage(dict): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> __setattr__ = dict.__setitem__ <NEW_LINE> __delattr__ = dict.__delitem__ <NEW_LINE> __getitem__ = dict.get <NEW_LINE> __getattr__ = dict.get <NEW_LINE> __repr__ = lambda self: '<Storage %s>' % dict.__repr__(self) <NEW_LINE> __getstate__ = lambda self: None <NEW_LINE> __copy__ = lambda self: Storage(self) <NEW_LINE> def getlist(self, key): <NEW_LINE> <INDENT> value = self.get(key, []) <NEW_LINE> return value if not value else value if isinstance(value, (list, tuple)) else [value] <NEW_LINE> <DEDENT> def getfirst(self, key, default=None): <NEW_LINE> <INDENT> values = self.getlist(key) <NEW_LINE> return values[0] if values else default <NEW_LINE> <DEDENT> def getlast(self, key, default=None): <NEW_LINE> <INDENT> values = self.getlist(key) <NEW_LINE> return values[-1] if values else default
A Storage object is like a dictionary except `obj.foo` can be used in addition to `obj['foo']`, and setting obj.foo = None deletes item foo. >>> o = Storage(a=1) >>> print o.a 1 >>> o['a'] 1 >>> o.a = 2 >>> print o['a'] 2 >>> del o.a >>> print o.a None
6259908a50812a4eaa6219bd
class PygameInput(Input): <NEW_LINE> <INDENT> def sensingLoop(self): <NEW_LINE> <INDENT> if 'Scale' in self: <NEW_LINE> <INDENT> scale = self['Scale'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> scale = 1 <NEW_LINE> <DEDENT> if self['FollowMouse']: <NEW_LINE> <INDENT> self.respond({Strings.LOCATION: pygame.mouse.get_pos()}) <NEW_LINE> return <NEW_LINE> <DEDENT> for event in pygame.event.get(): <NEW_LINE> <INDENT> if event.type is KEYDOWN: <NEW_LINE> <INDENT> if event.key == 27: <NEW_LINE> <INDENT> self.die() <NEW_LINE> <DEDENT> if self['Keyboard']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.respond({'Key': event.key, 'KeyChar': chr(event.key)}) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.respond({'Key': event.key}) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pygame.event.post(event) <NEW_LINE> <DEDENT> <DEDENT> if event.type is MOUSEBUTTONDOWN: <NEW_LINE> <INDENT> if self['Clicks']: <NEW_LINE> <INDENT> self.respond({Strings.LOCATION: pygame.mouse.get_pos()}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pygame.event.post(event)
PygameInput is an input tied to the PygameDisplay. Specify: <FollowMouse>True</FollowMouse> to receive an input every frame specifying the current mouse position. <Keyboard>True</Keyboard> to grab keystrokes <Clicks>True</Clicks> to grab clicks. NB: If follow mouse is enabled, PygameInput will not return mouse and keypresses. You can, however, instantiate other PygameInputs in the XML that will capture mouse and keypresses.
6259908a283ffb24f3cf5490
class Slx9640_IfIndex(Slx_IfIndex_Core): <NEW_LINE> <INDENT> def __init__(self, interface, linecard='', speed='', tunnel_type='', *args, **kwargs): <NEW_LINE> <INDENT> self.data = kwargs <NEW_LINE> if not self.data: <NEW_LINE> <INDENT> self.data['interface'] = interface <NEW_LINE> self.data['linecard'] = linecard <NEW_LINE> self.data['speed'] = speed <NEW_LINE> self.data['tunnel_type'] = tunnel_type <NEW_LINE> <DEDENT> self.common_data() <NEW_LINE> self.speed_over_ride = True <NEW_LINE> self.valid_linecards = [] <NEW_LINE> self.dev_family = 'slx' <NEW_LINE> self.max_slot = 0 <NEW_LINE> self.min_slot = 0 <NEW_LINE> self.max_ve_id = 8192 <NEW_LINE> self.max_lo_id = 255 <NEW_LINE> self.max_po_id = 256 <NEW_LINE> self.speed_map = SLXRSpeedMap() <NEW_LINE> self.mgmt_intfid_value = [0] <NEW_LINE> self.validate_kwargs() <NEW_LINE> self.init_mapping() <NEW_LINE> self.expand_interface() <NEW_LINE> if int(self.sub_port) != 0: <NEW_LINE> <INDENT> self.speed_over_ride = False <NEW_LINE> <DEDENT> self.map_vars_to_bits() <NEW_LINE> return <NEW_LINE> <DEDENT> def init_mapping(self): <NEW_LINE> <INDENT> self.mapping = PortMapping() <NEW_LINE> for interface in range(1, 25): <NEW_LINE> <INDENT> self.mapping.add_interface(str(interface), chip_port=0, chip_num=0, valid_speeds=['10g'], breakout=False) <NEW_LINE> <DEDENT> for interface in range(25, 37): <NEW_LINE> <INDENT> self.mapping.add_interface(str(interface), chip_port=0, chip_num=0, valid_speeds=['40g', '100g'], breakout=True, breakout_speeds=['10g', '25g'])
Device specific classes validate against the device specific information.
6259908aa05bb46b3848bf1e
class LinkUser(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def can_navigate_from_start(self, link) -> bool: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def can_navigate_to_start(self, link) -> bool: <NEW_LINE> <INDENT> pass
Abstract parent class for all kinds link-users.
6259908a99fddb7c1ca63bd4
class Float(Field): <NEW_LINE> <INDENT> type = 'float' <NEW_LINE> _slots = { '_digits': None, 'group_operator': None, } <NEW_LINE> def __init__(self, string=Default, digits=Default, **kwargs): <NEW_LINE> <INDENT> super(Float, self).__init__(string=string, _digits=digits, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def digits(self): <NEW_LINE> <INDENT> if callable(self._digits): <NEW_LINE> <INDENT> with LazyCursor() as cr: <NEW_LINE> <INDENT> return self._digits(cr) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return self._digits <NEW_LINE> <DEDENT> <DEDENT> _related__digits = property(attrgetter('_digits')) <NEW_LINE> _related_group_operator = property(attrgetter('group_operator')) <NEW_LINE> _description_digits = property(attrgetter('digits')) <NEW_LINE> _column_digits = property(lambda self: not callable(self._digits) and self._digits) <NEW_LINE> _column_digits_compute = property(lambda self: callable(self._digits) and self._digits) <NEW_LINE> _column_group_operator = property(attrgetter('group_operator')) <NEW_LINE> def convert_to_cache(self, value, record, validate=True): <NEW_LINE> <INDENT> value = float(value or 0.0) <NEW_LINE> if not validate: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> digits = self.digits <NEW_LINE> return float_round(value, precision_digits=digits[1]) if digits else value <NEW_LINE> <DEDENT> def convert_to_export(self, value, env): <NEW_LINE> <INDENT> if value or value == 0.0: <NEW_LINE> <INDENT> return value if env.context.get('export_raw_data') else ustr(value) <NEW_LINE> <DEDENT> return ''
The precision digits are given by the attribute :param digits: a pair (total, decimal), or a function taking a database cursor and returning a pair (total, decimal)
6259908aa8370b77170f1fbd
class HistoryStub(dnf.yum.history.YumHistory): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.old_data_pkgs = {} <NEW_LINE> <DEDENT> def _old_data_pkgs(self, tid, sort=True): <NEW_LINE> <INDENT> if sort: <NEW_LINE> <INDENT> raise NotImplementedError('sorting not implemented yet') <NEW_LINE> <DEDENT> return self.old_data_pkgs.get(tid, ())[:] <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def old(self, tids=[], limit=None, *_args, **_kwargs): <NEW_LINE> <INDENT> create = lambda tid: dnf.yum.history.YumHistoryTransaction(self, (int(tid), 0, '0:685cc4ac4ce31b9190df1604a96a3c62a3100c35', 1, '1:685cc4ac4ce31b9190df1604a96a3c62a3100c36', 0, 0)) <NEW_LINE> sorted_all_tids = sorted(self.old_data_pkgs.keys(), reverse=True) <NEW_LINE> trxs = (create(tid) for tid in tids or sorted_all_tids if tid in self.old_data_pkgs) <NEW_LINE> limited = trxs if limit is None else itertools.islice(trxs, limit) <NEW_LINE> return tuple(limited)
Stub of dnf.yum.history.YumHistory for easier testing.
6259908a5fc7496912d49063
class Payload(object): <NEW_LINE> <INDENT> def __init__(self, packets=None, encoded_payload=None): <NEW_LINE> <INDENT> self.packets = packets or [] <NEW_LINE> if encoded_payload is not None: <NEW_LINE> <INDENT> self.decode(encoded_payload) <NEW_LINE> <DEDENT> <DEDENT> def encode(self, b64=False): <NEW_LINE> <INDENT> encoded_payload = b'' <NEW_LINE> for pkt in self.packets: <NEW_LINE> <INDENT> encoded_packet = pkt.encode(b64=b64) <NEW_LINE> packet_len = len(encoded_packet) <NEW_LINE> if b64: <NEW_LINE> <INDENT> encoded_payload += str(packet_len).encode('utf-8') + b':' + encoded_packet <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> binary_len = b'' <NEW_LINE> while packet_len != 0: <NEW_LINE> <INDENT> binary_len = six.int2byte(packet_len % 10) + binary_len <NEW_LINE> packet_len = int(packet_len / 10) <NEW_LINE> <DEDENT> if not pkt.binary: <NEW_LINE> <INDENT> encoded_payload += b'\0' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> encoded_payload += b'\1' <NEW_LINE> <DEDENT> encoded_payload += binary_len + b'\xff' + encoded_packet <NEW_LINE> <DEDENT> <DEDENT> return encoded_payload <NEW_LINE> <DEDENT> def decode(self, encoded_payload): <NEW_LINE> <INDENT> fixed_double_encode = False <NEW_LINE> self.packets = [] <NEW_LINE> while encoded_payload: <NEW_LINE> <INDENT> if six.byte2int(encoded_payload[0:1]) <= 1: <NEW_LINE> <INDENT> packet_len = 0 <NEW_LINE> i = 1 <NEW_LINE> while six.byte2int(encoded_payload[i:i + 1]) != 255: <NEW_LINE> <INDENT> packet_len = packet_len * 10 + six.byte2int( encoded_payload[i:i + 1]) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> self.packets.append(packet.Packet( encoded_packet=encoded_payload[i + 1:i + 1 + packet_len])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i = encoded_payload.find(b':') <NEW_LINE> if i == -1: <NEW_LINE> <INDENT> raise ValueError('invalid payload') <NEW_LINE> <DEDENT> packet_len = int(encoded_payload[0:i]) <NEW_LINE> if not fixed_double_encode: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> fixed_payload = encoded_payload.decode( 'utf-8').encode('raw_unicode_escape') <NEW_LINE> fixed_payload.decode('utf-8') <NEW_LINE> encoded_payload = fixed_payload <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> packet_len += len(encoded_payload) - len(fixed_payload) <NEW_LINE> <DEDENT> fixed_double_encode = True <NEW_LINE> <DEDENT> pkt = encoded_payload[i + 1: i + 1 + packet_len] <NEW_LINE> self.packets.append(packet.Packet(encoded_packet=pkt)) <NEW_LINE> <DEDENT> encoded_payload = encoded_payload[i + 1 + packet_len:]
Engine.IO payload.
6259908a099cdd3c636761f2
class TestBuildingApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = Infoplus.api.building_api.BuildingApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_add_building(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_add_building_audit(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_add_building_file(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_add_building_file_by_url(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_add_building_tag(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_delete_building(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_delete_building_file(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_delete_building_tag(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_building_by_filter(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_building_by_id(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_building_files(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_building_tags(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_duplicate_building_by_id(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_update_building(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_update_building_custom_fields(self): <NEW_LINE> <INDENT> pass
BuildingApi unit test stubs
6259908ad486a94d0ba2dba6
class DistributionTestBase(object): <NEW_LINE> <INDENT> dist = None <NEW_LINE> params = None <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> random.seed(0) <NEW_LINE> numpy.random.seed(0) <NEW_LINE> <DEDENT> def _sample_postprocessing(self, sample): <NEW_LINE> <INDENT> return sample <NEW_LINE> <DEDENT> def dist_params(self): <NEW_LINE> <INDENT> if self.params is None: <NEW_LINE> <INDENT> params = [tuple(1 + rand(self.dist.numargs))] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params = self.params <NEW_LINE> <DEDENT> return params <NEW_LINE> <DEDENT> def test_mixed_density_goodness_of_fit(self): <NEW_LINE> <INDENT> for param in self.dist_params(): <NEW_LINE> <INDENT> dim = get_dim(self.dist.rvs(*param, size=2)[0]) <NEW_LINE> sample_count = NUM_BASE_SAMPLES + NUM_SAMPLES_SCALE * dim <NEW_LINE> samples = self.dist.rvs(*param, size=sample_count) <NEW_LINE> samples = list(map(self._sample_postprocessing, samples)) <NEW_LINE> probabilities = [self.pdf(sample, *param) for sample in samples] <NEW_LINE> gof = mixed_density_goodness_of_fit(samples, probabilities) <NEW_LINE> self.assertGreater(gof, TEST_FAILURE_RATE) <NEW_LINE> <DEDENT> <DEDENT> def test_good_fit(self): <NEW_LINE> <INDENT> for param in self.dist_params(): <NEW_LINE> <INDENT> dim = get_dim(self.dist.rvs(*param, size=2)[0]) <NEW_LINE> sample_count = NUM_BASE_SAMPLES + NUM_SAMPLES_SCALE * dim <NEW_LINE> samples = self.dist.rvs(*param, size=sample_count) <NEW_LINE> samples = list(map(self._sample_postprocessing, samples)) <NEW_LINE> probabilities = [self.pdf(sample, *param) for sample in samples] <NEW_LINE> gof = self.goodness_of_fit(samples, probabilities) <NEW_LINE> self.assertGreater(gof, TEST_FAILURE_RATE) <NEW_LINE> <DEDENT> <DEDENT> def goodness_of_fit(self, samples, probabilities): <NEW_LINE> <INDENT> raise NotImplementedError
Abstract base class for probability distribution unit tests. This class supplies two test methods, :meth:`.test_goodness_of_fit` and :meth:`.test_mixed_density_goodness_of_fit` for testing the goodness of fit functions. Subclasses must override and implement one class attribute and two instance methods. The :attr:`.dist` class attribute must be set to one of SciPy probability distribution constructors in :mod:`scipy.stats`. The :meth:`.goodness_of_fit` method must return the result of calling one of the goodness of fit functions being tested. The :meth:`.probabilites` method must return an object representing the probabilities for each sample; the output depends on the format of the inputs to the :meth:`.goodness_of_fit` method. Subclasses may also set the :attr:`.params` attribute, which is a list of tuples that will be provided as arguments to the underlying SciPy distribution constructor as specified in :attr:`.dist`. If not specified, random arguments will be provided. If samples drawn from :attr:`.dist` must be modified in some way before the PDF or PMF can be computed, then subclasses may override the :meth:`._sample_postprocessing` method.
6259908abe7bc26dc9252c4e
class Online(DeclarativeBase): <NEW_LINE> <INDENT> __tablename__ = 'online' <NEW_LINE> __table_args__ = { 'mysql_engine':'MEMORY', } <NEW_LINE> id = Column('id', INTEGER(), primary_key=True, nullable=False, doc='online id') <NEW_LINE> user = Column('user', VARCHAR(length=32), nullable=False, doc='weixin account') <NEW_LINE> nas_addr = Column('nas_addr', VARCHAR(length=15), nullable=False, doc='bas address') <NEW_LINE> acct_session_id = Column('acct_session_id', VARCHAR(length=64), nullable=False, doc='session id') <NEW_LINE> acct_start_time = Column('acct_start_time', VARCHAR(length=19), nullable=False, doc='session start time') <NEW_LINE> framed_ipaddr = Column('framed_ipaddr', VARCHAR(length=32), nullable=False, doc='ip address') <NEW_LINE> mac_addr = Column('mac_addr', VARCHAR(length=17), nullable=False, doc='mac address') <NEW_LINE> billing_times = Column('billing_times', INTEGER(), nullable=False, doc='bill times') <NEW_LINE> input_total = Column('input_total', INTEGER(), doc='input flow (kb)') <NEW_LINE> output_total = Column('output_total', INTEGER(), doc='output flow (kb)') <NEW_LINE> start_source = Column('start_source', SMALLINT(), nullable=False, doc='')
user online table user column as index
6259908a3617ad0b5ee07d42
class DimError(OptimizationError): <NEW_LINE> <INDENT> def __init__(self, value='expressions have incompatible dimensions'): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self.value)
Occurs when combining two expressions of incompatible dimension.
6259908a23849d37ff852cab
class EmployeeXlsxReportView(FormView): <NEW_LINE> <INDENT> template_name = 'clock-report.html' <NEW_LINE> form_class = ClockSearchForm <NEW_LINE> success_url = reverse_lazy('employee-clock-report') <NEW_LINE> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> form_class = self.get_form_class() <NEW_LINE> form = self.get_form(form_class) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> username = self.request.session['username'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> messages.add_message(self.request, messages.ERROR, 'Sign in to continue') <NEW_LINE> return redirect(reverse_lazy('employee-view')) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> employee = Employee.objects.get(username=username) <NEW_LINE> <DEDENT> except (ObjectDoesNotExist, MultipleObjectsReturned): <NEW_LINE> <INDENT> messages.add_message(self.request, messages.ERROR, 'There was an error in your request. Please try again.') <NEW_LINE> return redirect(reverse_lazy('employee-view')) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> from_date_string = '{y}{m}{d}'.format( y=form.cleaned_data['from_year'], m=form.cleaned_data['from_month'], d=form.cleaned_data['from_day'] ) <NEW_LINE> to_date_string = '{y}{m}{d}'.format( y=form.cleaned_data['to_year'], m=form.cleaned_data['to_month'], d=form.cleaned_data['to_day'] ) <NEW_LINE> from_date = datetime.strptime(from_date_string, "%Y%m%d").date() <NEW_LINE> to_date = datetime.strptime(to_date_string, "%Y%m%d").date() <NEW_LINE> clocks = EmployeeClock.objects.filter( timestamp__gt=from_date, timestamp__lt=to_date, employee=employee ) <NEW_LINE> context = self.get_context_data(**kwargs) <NEW_LINE> context['clocks'] = clocks <NEW_LINE> output = io.BytesIO() <NEW_LINE> workbook = xlsxwriter.Workbook(output, {'in_memory': True}) <NEW_LINE> worksheet = workbook.add_worksheet() <NEW_LINE> worksheet.write(0, 0, clocks[0].timestamp) <NEW_LINE> workbook.close() <NEW_LINE> output.seek(0) <NEW_LINE> response = HttpResponse(output.read(), content_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet") <NEW_LINE> response['Content-Disposition'] = "attachment; filename=report.xlsx" <NEW_LINE> return response <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> messages.add_message(self.request, messages.ERROR, 'Error: {error}'.format(error=e)) <NEW_LINE> return redirect(reverse_lazy('employee-clock-report')) <NEW_LINE> <DEDENT> messages.add_message(self.request, messages.ERROR, 'Something went wrong. Please try again.') <NEW_LINE> return redirect(reverse_lazy('employee-view')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.form_invalid(form, **kwargs)
View that allows employee objects to generate a report for clock objects by providing the day, month, and year of the clocks. Uses a range filter
6259908a60cbc95b06365b64
class FortePiano(Dynamic): <NEW_LINE> <INDENT> _command = "\\fp"
FortePiano (\\fp).
6259908aadb09d7d5dc0c14b
@dataclass <NEW_LINE> class BeitModelOutputWithPooling(BaseModelOutputWithPooling): <NEW_LINE> <INDENT> pass
Class for outputs of [`BeitModel`]. Args: last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`): Sequence of hidden-states at the output of the last layer of the model. pooler_output (`torch.FloatTensor` of shape `(batch_size, hidden_size)`): Average of the last layer hidden states of the patch tokens (excluding the *[CLS]* token) if *config.use_mean_pooling* is set to True. If set to False, then the final hidden state of the *[CLS]* token will be returned. hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`): Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer plus the initial embedding outputs. attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`): Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`. Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
6259908a7b180e01f3e49e5d
class Program(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.completed_logging_setup = False <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.main_setup() <NEW_LINE> logger.debug("setup completed: habitat ready") <NEW_LINE> <DEDENT> except SystemExit: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> if self.completed_logging_setup: <NEW_LINE> <INDENT> logger.exception("uncaught exception in main_setup, exiting") <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> logger.debug("habitat: starting up") <NEW_LINE> self.parser.run() <NEW_LINE> logger.debug("main_execution finished gracefully") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logger.exception("uncaught exception in main_execution, panic!") <NEW_LINE> raise <NEW_LINE> <DEDENT> logger.info("habitat: main() returning gracefully") <NEW_LINE> <DEDENT> def main_setup(self): <NEW_LINE> <INDENT> base_path = os.path.split(os.path.abspath(__file__))[0] <NEW_LINE> with open(os.path.join(base_path, "config.yml")) as f: <NEW_LINE> <INDENT> self.options = yaml.load(f) <NEW_LINE> <DEDENT> setup_logging(self.options["log_stderr_level"], self.options["log_file"], self.options["log_file_level"]) <NEW_LINE> self.completed_logging_setup = True <NEW_LINE> self.parser = parser.parser.Parser(self.options)
Program provides the :py:meth:`main`, :py:meth:`shutdown` and :py:meth:`reload` methods
6259908a656771135c48ae29
class OrgBehavior(VCardBehavior): <NEW_LINE> <INDENT> hasNative = True <NEW_LINE> @staticmethod <NEW_LINE> def transformToNative(obj): <NEW_LINE> <INDENT> if obj.isNative: <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> obj.isNative = True <NEW_LINE> obj.value = splitFields(obj.value) <NEW_LINE> return obj <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def transformFromNative(obj): <NEW_LINE> <INDENT> if not obj.isNative: <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> obj.isNative = False <NEW_LINE> obj.value = serializeFields(obj.value) <NEW_LINE> return obj
A list of organization values and sub-organization values.
6259908af9cc0f698b1c60c4
class IPView(ip.IPythonView): <NEW_LINE> <INDENT> def onKeyPressExtend(self, event): <NEW_LINE> <INDENT> if ip.IPythonView.onKeyPressExtend(self, event): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if event.string == '\x04': <NEW_LINE> <INDENT> self.destroy()
Extend IPythonView to support closing with Ctrl+D
6259908aa05bb46b3848bf1f
class SVMCommandPrintStaticFields(gdb.Command): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__('svm-print-static-fields', gdb.COMMAND_USER) <NEW_LINE> <DEDENT> def complete(self, text, word): <NEW_LINE> <INDENT> return [x for x in ['enable', 'disable'] if x.startswith(text)] <NEW_LINE> <DEDENT> def invoke(self, arg, from_tty): <NEW_LINE> <INDENT> if arg == '': <NEW_LINE> <INDENT> print('svm-print-static-fields is %s' % {True : 'enabled', False : 'disabled'}.get(SVMUtil.print_static_fields)) <NEW_LINE> <DEDENT> elif arg == 'on' or arg == 'enable': <NEW_LINE> <INDENT> SVMUtil.print_static_fields = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> SVMUtil.print_static_fields = False
Use this command to enable/disable printing of static field members
6259908a8a349b6b43687e50
class DeleteVolume(command.Command): <NEW_LINE> <INDENT> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(DeleteVolume, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'volumes', metavar='<volume>', nargs="+", help=_('Volume(s) to delete (name or ID)'), ) <NEW_LINE> parser.add_argument( '--force', action='store_true', default=False, help=_('Attempt forced removal of volume(s), regardless of state ' '(defaults to False)'), ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> volume_client = self.app.client_manager.volume <NEW_LINE> result = 0 <NEW_LINE> for i in parsed_args.volumes: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> volume_obj = utils.find_resource( volume_client.volumes, i) <NEW_LINE> if parsed_args.force: <NEW_LINE> <INDENT> volume_client.volumes.force_delete(volume_obj.id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> volume_client.volumes.delete(volume_obj.id) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> result += 1 <NEW_LINE> LOG.error(_("Failed to delete volume with " "name or ID '%(volume)s': %(e)s"), {'volume': i, 'e': e}) <NEW_LINE> <DEDENT> <DEDENT> if result > 0: <NEW_LINE> <INDENT> total = len(parsed_args.volumes) <NEW_LINE> msg = (_("%(result)s of %(total)s volumes failed " "to delete.") % {'result': result, 'total': total}) <NEW_LINE> raise exceptions.CommandError(msg)
Delete volume(s)
6259908aad47b63b2c5a9445
class PhotoEffect(BaseEffect): <NEW_LINE> <INDENT> transpose_method = models.CharField(_('rotate or flip'), max_length=15, blank=True, choices=IMAGE_TRANSPOSE_CHOICES) <NEW_LINE> color = models.FloatField(_('color'), default=1.0, help_text=_("A factor of 0.0 gives a black and white image, a factor of 1.0 gives the original image.")) <NEW_LINE> brightness = models.FloatField(_('brightness'), default=1.0, help_text=_("A factor of 0.0 gives a black image, a factor of 1.0 gives the original image.")) <NEW_LINE> contrast = models.FloatField(_('contrast'), default=1.0, help_text=_("A factor of 0.0 gives a solid grey image, a factor of 1.0 gives the original image.")) <NEW_LINE> sharpness = models.FloatField(_('sharpness'), default=1.0, help_text=_("A factor of 0.0 gives a blurred image, a factor of 1.0 gives the original image.")) <NEW_LINE> filters = models.CharField(_('filters'), max_length=200, blank=True, help_text=_(IMAGE_FILTERS_HELP_TEXT)) <NEW_LINE> reflection_size = models.FloatField(_('size'), default=0, help_text=_("The height of the reflection as a percentage of the orignal image. A factor of 0.0 adds no reflection, a factor of 1.0 adds a reflection equal to the height of the orignal image.")) <NEW_LINE> reflection_strength = models.FloatField(_('strength'), default=0.6, help_text=_("The initial opacity of the reflection gradient.")) <NEW_LINE> background_color = models.CharField(_('color'), max_length=7, default="#FFFFFF", help_text=_("The background color of the reflection gradient. Set this to match the background color of your page.")) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _("photo effect") <NEW_LINE> verbose_name_plural = _("photo effects") <NEW_LINE> app_label = 'photos' <NEW_LINE> <DEDENT> def pre_process(self, im): <NEW_LINE> <INDENT> if self.transpose_method != '': <NEW_LINE> <INDENT> method = getattr(PILImage, self.transpose_method) <NEW_LINE> im = im.transpose(method) <NEW_LINE> <DEDENT> if im.mode != 'RGB' and im.mode != 'RGBA': <NEW_LINE> <INDENT> return im <NEW_LINE> <DEDENT> for name in ['Color', 'Brightness', 'Contrast', 'Sharpness']: <NEW_LINE> <INDENT> factor = getattr(self, name.lower()) <NEW_LINE> if factor != 1.0: <NEW_LINE> <INDENT> im = getattr(ImageEnhance, name)(im).enhance(factor) <NEW_LINE> <DEDENT> <DEDENT> for name in self.filters.split('->'): <NEW_LINE> <INDENT> image_filter = getattr(ImageFilter, name.upper(), None) <NEW_LINE> if image_filter is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> im = im.filter(image_filter) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return im <NEW_LINE> <DEDENT> def post_process(self, im): <NEW_LINE> <INDENT> if self.reflection_size != 0.0: <NEW_LINE> <INDENT> im = add_reflection(im, bgcolor=self.background_color, amount=self.reflection_size, opacity=self.reflection_strength) <NEW_LINE> <DEDENT> return im
A pre-defined effect to apply to photos
6259908a4527f215b58eb799
class Mixer: <NEW_LINE> <INDENT> def __init__(self, mixer_params): <NEW_LINE> <INDENT> self.amplitude = get_parameter(mixer_params, 'amplitude', 1.e-5, 'Mixer') <NEW_LINE> assert self.amplitude <= 1. <NEW_LINE> self.decay = get_parameter(mixer_params, 'decay', 2., 'Mixer') <NEW_LINE> assert self.decay >= 1. <NEW_LINE> if self.decay == 1.: <NEW_LINE> <INDENT> warnings.warn("Mixer with decay=1. doesn't decay") <NEW_LINE> <DEDENT> self.disable_after = get_parameter(mixer_params, 'disable_after', 15, 'Mixer') <NEW_LINE> self.verbose = mixer_params.get('verbose', 0) <NEW_LINE> <DEDENT> def update_amplitude(self, sweeps): <NEW_LINE> <INDENT> self.amplitude /= self.decay <NEW_LINE> if sweeps >= self.disable_after or self.amplitude <= np.finfo('float').eps: <NEW_LINE> <INDENT> if self.verbose >= 0.1: <NEW_LINE> <INDENT> print("disable mixer after {0:d} sweeps, final amplitude {1:.2e}".format( sweeps, self.amplitude)) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def perturb_svd(self, engine, theta, i0, update_LP, update_RP): <NEW_LINE> <INDENT> raise NotImplementedError("This function should be implemented in derived classes")
Base class of a general Mixer. Since DMRG performs only local updates of the state, it can get stuck in "local minima", in particular if the Hamiltonain is long-range -- which is the case if one maps a 2D system ("infinite cylinder") to 1D -- or if one wants to do single-site updates (currently not implemented in TeNPy). The idea of the mixer is to perturb the state with the terms of the Hamiltonian which have contributions in both the "left" and "right" side of the system. In that way, it adds fluctuation of the quantum numbers and non-zero contributions of the long-range terms - leading to a significantly improved convergence of DMRG. The strength of the perturbation is given by the `amplitude` of the mixer. A good strategy is to choose an initially significant amplitude and let it decay until the perturbation becomes completely irrelevant and the mixer gets disabled. This original idea of the mixer was introduced in [White2005]_. [Hubig2015]_ discusses the mixer and provides an improved version. Parameters ---------- env : :class:`~tenpy.networks.mpo.MPOEnvironment` Environment for contraction ``<psi|H|psi>`` for later mixer_params : dict Optional parameters as described in the following table. Use ``verbose>0`` to print the used parameters during runtime. ============== ========= =============================================================== key type description ============== ========= =============================================================== amplitude float Initial strength of the mixer. (Should be << 1.) -------------- --------- --------------------------------------------------------------- decay float To slowly turn off the mixer, we divide `amplitude` by `decay` after each sweep. (Should be >= 1.) -------------- --------- --------------------------------------------------------------- disable_after int We disable the mixer completely after this number of sweeps. ============== ========= =============================================================== Attributes ---------- amplitude : float Current amplitude for mixing. decay : float Factor by which `amplitude` is divided after each sweep. disable_after : int The number of sweeps after which the mixer should be disabled. verbose : int Level of output vebosity.
6259908ad486a94d0ba2dbaa
@registry.register_sensor(name="speed_limit") <NEW_LINE> class SpeedLimitSensor(simulator.Sensor): <NEW_LINE> <INDENT> def __init__( self, hero: carla.ActorBlueprint, *args, **kwargs) -> None: <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self._hero = hero <NEW_LINE> <DEDENT> def _get_uuid(self, *args: Any, **kwargs: Any) -> str: <NEW_LINE> <INDENT> return "speed_limit" <NEW_LINE> <DEDENT> def _get_sensor_type(self, *args: Any, **kwargs: Any) -> CARLASensorTypes: <NEW_LINE> <INDENT> return CARLASensorTypes.SPEED_LIMIT <NEW_LINE> <DEDENT> @property <NEW_LINE> def observation_space(self, *args: Any, **kwargs: Any) -> gym.spaces.Box: <NEW_LINE> <INDENT> return gym.spaces.Box( low=-np.inf, high=np.inf, shape=(3,), dtype=np.float32, ) <NEW_LINE> <DEDENT> def get_observation(self, *args: Any, **kwargs: Any) -> np.ndarray: <NEW_LINE> <INDENT> speed_limit = self._hero.get_speed_limit() <NEW_LINE> return np.asarray( speed_limit, dtype=np.float32, ) <NEW_LINE> <DEDENT> def close(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def default( cls, hero: carla.ActorBlueprint, *args, **kwargs) -> "SpeedLimitSensor": <NEW_LINE> <INDENT> return cls(hero=hero)
CARLA vehicle speed limit sensor.
6259908a5fdd1c0f98e5fb6c
class HieroGetShot(Hook): <NEW_LINE> <INDENT> def execute(self, item, data, **kwargs): <NEW_LINE> <INDENT> sequence = self._get_sequence(item, data) <NEW_LINE> sg = self.parent.shotgun <NEW_LINE> filt = [ ["project", "is", self.parent.context.project], ["sg_sequence", "is", sequence], ["code", "is", item.name()], ] <NEW_LINE> fields = kwargs.get("fields", []) <NEW_LINE> shots = sg.find("Shot", filt, fields=fields) <NEW_LINE> if len(shots) > 1: <NEW_LINE> <INDENT> raise StandardError("Multiple shots named '%s' found", item.name()) <NEW_LINE> <DEDENT> if len(shots) == 0: <NEW_LINE> <INDENT> shot_data = { "code": item.name(), "sg_sequence": sequence, "project": self.parent.context.project, } <NEW_LINE> shot = sg.create("Shot", shot_data) <NEW_LINE> self.parent.log_info("Created Shot in Shotgun: %s" % shot_data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shot = shots[0] <NEW_LINE> <DEDENT> self.parent.execute_hook( "hook_upload_thumbnail", entity=shot, source=item.source(), item=item, task=kwargs.get("task") ) <NEW_LINE> return shot <NEW_LINE> <DEDENT> def _get_sequence(self, item, data): <NEW_LINE> <INDENT> if "seq_cache" not in data: <NEW_LINE> <INDENT> data["seq_cache"] = {} <NEW_LINE> <DEDENT> hiero_sequence = item.parentSequence() <NEW_LINE> if hiero_sequence.guid() in data["seq_cache"]: <NEW_LINE> <INDENT> return data["seq_cache"][hiero_sequence.guid()] <NEW_LINE> <DEDENT> sg = self.parent.shotgun <NEW_LINE> filt = [ ["project", "is", self.parent.context.project], ["code", "is", hiero_sequence.name()], ] <NEW_LINE> sequences = sg.find("Sequence", filt) <NEW_LINE> if len(sequences) > 1: <NEW_LINE> <INDENT> raise StandardError("Multiple sequences named '%s' found" % hiero_sequence.name()) <NEW_LINE> <DEDENT> if len(sequences) == 0: <NEW_LINE> <INDENT> seq_data = { "code": hiero_sequence.name(), "project": self.parent.context.project, } <NEW_LINE> sequence = sg.create("Sequence", seq_data) <NEW_LINE> self.parent.log_info("Created Sequence in Shotgun: %s" % seq_data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sequence = sequences[0] <NEW_LINE> <DEDENT> self.parent.execute_hook("hook_upload_thumbnail", entity=sequence, source=hiero_sequence, item=None) <NEW_LINE> data["seq_cache"][hiero_sequence.guid()] = sequence <NEW_LINE> return sequence
Return a Shotgun Shot dictionary for the given Hiero items
6259908a63b5f9789fe86d5f
class VertexBufferObject(AbstractBuffer): <NEW_LINE> <INDENT> def __init__(self, size, target, usage): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self.target = target <NEW_LINE> self.usage = usage <NEW_LINE> self._context = pyglet.gl.current_context <NEW_LINE> id = GLuint() <NEW_LINE> glGenBuffers(1, id) <NEW_LINE> self.id = id.value <NEW_LINE> glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT) <NEW_LINE> glBindBuffer(target, self.id) <NEW_LINE> glBufferData(target, self.size, None, self.usage) <NEW_LINE> glPopClientAttrib() <NEW_LINE> global _workaround_vbo_finish <NEW_LINE> if pyglet.gl.current_context._workaround_vbo_finish: <NEW_LINE> <INDENT> _workaround_vbo_finish = True <NEW_LINE> <DEDENT> <DEDENT> def bind(self): <NEW_LINE> <INDENT> glBindBuffer(self.target, self.id) <NEW_LINE> <DEDENT> def unbind(self): <NEW_LINE> <INDENT> glBindBuffer(self.target, 0) <NEW_LINE> <DEDENT> def set_data(self, data): <NEW_LINE> <INDENT> glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT) <NEW_LINE> glBindBuffer(self.target, self.id) <NEW_LINE> glBufferData(self.target, self.size, data, self.usage) <NEW_LINE> glPopClientAttrib() <NEW_LINE> <DEDENT> def set_data_region(self, data, start, length): <NEW_LINE> <INDENT> glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT) <NEW_LINE> glBindBuffer(self.target, self.id) <NEW_LINE> glBufferSubData(self.target, start, length, data) <NEW_LINE> glPopClientAttrib() <NEW_LINE> <DEDENT> def map(self, invalidate=False): <NEW_LINE> <INDENT> glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT) <NEW_LINE> glBindBuffer(self.target, self.id) <NEW_LINE> if invalidate: <NEW_LINE> <INDENT> glBufferData(self.target, self.size, None, self.usage) <NEW_LINE> <DEDENT> ptr = ctypes.cast(glMapBuffer(self.target, GL_WRITE_ONLY), ctypes.POINTER(ctypes.c_byte * self.size)).contents <NEW_LINE> glPopClientAttrib() <NEW_LINE> return ptr <NEW_LINE> <DEDENT> def unmap(self): <NEW_LINE> <INDENT> glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT) <NEW_LINE> glUnmapBuffer(self.target) <NEW_LINE> glPopClientAttrib() <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.id is not None: <NEW_LINE> <INDENT> self._context.delete_buffer(self.id) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def delete(self): <NEW_LINE> <INDENT> id = GLuint(self.id) <NEW_LINE> glDeleteBuffers(1, id) <NEW_LINE> self.id = None <NEW_LINE> <DEDENT> def resize(self, size): <NEW_LINE> <INDENT> temp = (ctypes.c_byte * size)() <NEW_LINE> glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT) <NEW_LINE> glBindBuffer(self.target, self.id) <NEW_LINE> data = glMapBuffer(self.target, GL_READ_ONLY) <NEW_LINE> ctypes.memmove(temp, data, min(size, self.size)) <NEW_LINE> glUnmapBuffer(self.target) <NEW_LINE> self.size = size <NEW_LINE> glBufferData(self.target, self.size, temp, self.usage) <NEW_LINE> glPopClientAttrib()
Lightweight representation of an OpenGL VBO. The data in the buffer is not replicated in any system memory (unless it is done so by the video driver). While this can improve memory usage and possibly performance, updates to the buffer are relatively slow. This class does not implement :py:class:`AbstractMappable`, and so has no :py:meth:`~AbstractMappable.get_region` method. See :py:class:`MappableVertexBufferObject` for a VBO class that does implement :py:meth:`~AbstractMappable.get_region`.
6259908a5fc7496912d49066
class liberCriaFotogrametria(bpy.types.Panel): <NEW_LINE> <INDENT> bl_label = "Generate/Import Archs" <NEW_LINE> bl_idname = "liber_cria_fotogrametria" <NEW_LINE> bl_space_type = 'VIEW_3D' <NEW_LINE> bl_region_type = 'TOOLS' <NEW_LINE> bl_category = "Liber" <NEW_LINE> def draw(self, context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> scn = context.scene <NEW_LINE> obj = context.object <NEW_LINE> row = layout.row() <NEW_LINE> row.label(text="Scene Setup:") <NEW_LINE> row = layout.row() <NEW_LINE> knife=row.operator("object.liber_arruma_cena", text="Fix scene!", icon="PARTICLES") <NEW_LINE> row = layout.row() <NEW_LINE> row.label(text="3D Scanning:") <NEW_LINE> row = layout.row() <NEW_LINE> row.operator("import_mesh.stl", text="Import STL", icon="IMPORT") <NEW_LINE> row = layout.row() <NEW_LINE> row.label(text="Scanning by Photogrammetry:") <NEW_LINE> col = layout.column(align=True) <NEW_LINE> col.prop(scn.my_tool, "path", text="") <NEW_LINE> row = layout.row() <NEW_LINE> row.operator("object.liber_gera_modelo_foto", text="Start Photogrammetry!", icon="IMAGE_DATA") <NEW_LINE> row = layout.row() <NEW_LINE> row.label(text="Align and Resize:") <NEW_LINE> row = layout.row() <NEW_LINE> row.operator("object.cria_tres_pontos", text="3 Points Click", icon="OUTLINER_OB_MESH") <NEW_LINE> col = self.layout.column(align = True) <NEW_LINE> col.prop(context.scene, "medida_real2") <NEW_LINE> row = layout.row() <NEW_LINE> row.operator("object.alinha_forca", text="Align and Resize!", icon="LAMP_POINT") <NEW_LINE> row = layout.row() <NEW_LINE> row.label(text="CT-Scan Reconstruction:") <NEW_LINE> col = layout.column(align=True) <NEW_LINE> col.prop(scn.my_tool, "path", text="") <NEW_LINE> row = layout.row() <NEW_LINE> row.operator("object.gera_modelos_tomo_arc", text="Arch Generator", icon="SNAP_FACE")
Planejamento de cirurgia ortognática no Blender
6259908a5fcc89381b266f59
class SugarModule: <NEW_LINE> <INDENT> def __init__(self, connection, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._connection = connection <NEW_LINE> result = self._connection.get_module_fields(self._name) <NEW_LINE> if result is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._fields = result['module_fields'] <NEW_LINE> self._table = result['table_name'] <NEW_LINE> self._relationships = (result['link_fields'] or {}).copy() <NEW_LINE> <DEDENT> def _search(self, query_str, start = 0, total = 20, fields = None, query = None): <NEW_LINE> <INDENT> if fields is None: <NEW_LINE> <INDENT> fields = [] <NEW_LINE> <DEDENT> if 'id' not in fields: <NEW_LINE> <INDENT> fields.append('id') <NEW_LINE> <DEDENT> if 'name' not in fields: <NEW_LINE> <INDENT> fields.append('name') <NEW_LINE> <DEDENT> result = {} <NEW_LINE> entry_list = [] <NEW_LINE> offset = 0 <NEW_LINE> while len(entry_list) < total: <NEW_LINE> <INDENT> resp_data = self._connection.get_entry_list(self._name, query_str, '', start + offset, fields, total - len(entry_list), 0) <NEW_LINE> if resp_data['total_count']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result['total'] = int(resp_data['total_count']) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> print(resp_data) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result['total'] = 0 <NEW_LINE> <DEDENT> if resp_data['result_count'] == 0: <NEW_LINE> <INDENT> result['offset'] = 0 <NEW_LINE> break <NEW_LINE> <DEDENT> offset = result['offset'] = resp_data['next_offset'] <NEW_LINE> for record in resp_data['entry_list']: <NEW_LINE> <INDENT> entry = SugarEntry(self) <NEW_LINE> for key, obj in list(record['name_value_list'].items()): <NEW_LINE> <INDENT> val = obj['value'] <NEW_LINE> entry[key] = HTMLP.unescape(val) if isinstance(val, basestring) else val <NEW_LINE> <DEDENT> entry_list.append(entry) <NEW_LINE> <DEDENT> if resp_data['result_count'] == int(resp_data['total_count']): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> result['entries'] = entry_list <NEW_LINE> return result <NEW_LINE> <DEDENT> def query(self, fields = None): <NEW_LINE> <INDENT> return QueryList(self, fields = fields) <NEW_LINE> <DEDENT> def search(self, value, offset = 0, maxresults = 1000, user = '', fields = None, unifiedonly = True, favorites = False): <NEW_LINE> <INDENT> if fields is None: <NEW_LINE> <INDENT> fields = ['id', 'name'] <NEW_LINE> <DEDENT> resp_data = self._connection.search_by_module(value, [self._name], offset, maxresults, user, fields, unifiedonly, favorites) <NEW_LINE> results = [] <NEW_LINE> for mod_results in resp_data['entry_list']: <NEW_LINE> <INDENT> if mod_results['name'] != self._name: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for record in mod_results['records']: <NEW_LINE> <INDENT> entry = SugarEntry(self) <NEW_LINE> for key, obj in list(record.items()): <NEW_LINE> <INDENT> val = obj['value'] <NEW_LINE> entry[key] = HTMLP.unescape(val) if isinstance(val, basestring) else val <NEW_LINE> <DEDENT> results.append(entry) <NEW_LINE> <DEDENT> <DEDENT> return results
Defines a SugarCRM module. This is used to perform module related tasks, such as queries and creating new entries.
6259908a23849d37ff852cb2
class CatCertCommandStub(CatCertCommand): <NEW_LINE> <INDENT> def __init__(self, cert_pem): <NEW_LINE> <INDENT> CatCertCommand.__init__(self) <NEW_LINE> self.cert = create_from_pem(cert_pem) <NEW_LINE> <DEDENT> def _validate_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _create_cert(self): <NEW_LINE> <INDENT> return self.cert
A testing CatCertCommand that allows bypassing the loading of a certificate file.
6259908a2c8b7c6e89bd53dd