code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class SingletonMeta(type): <NEW_LINE> <INDENT> def __init__(cls, name: str, bases: Tuple[type, ...], dct: Dict[str, Any]): <NEW_LINE> <INDENT> cls._Singleton__instance = None <NEW_LINE> super(SingletonMeta, cls).__init__(name, bases, dct) <NEW_LINE> <DEDENT> def __call__(cls, *args, **kwargs): <NEW_LINE> <INDENT> if cls._Singleton__instance is None: <NEW_LINE> <INDENT> cls._Singleton__instance = super(SingletonMeta, cls).__call__(*args, **kwargs) <NEW_LINE> <DEDENT> return cls._Singleton__instance <NEW_LINE> <DEDENT> def exists_instance(cls) -> bool: <NEW_LINE> <INDENT> return cls._Singleton__instance is not None
Metaclass for defining singleton classes. The resulting singleton class can then be instantiated at most once. The first instance is reused for subsequent instantiations and the arguments provided in subsequent instantiations are simply discarded.
6259909badb09d7d5dc0c396
class SpriteSheet(object): <NEW_LINE> <INDENT> def __init__(self, file_name: str): <NEW_LINE> <INDENT> self.sprite_sheet = pygame.image.load(file_name).convert() <NEW_LINE> <DEDENT> def get_image(self, x: int, y: int, width: int, height: int): <NEW_LINE> <INDENT> image = pygame.Surface([width, height]).convert() <NEW_LINE> image.blit(self.sprite_sheet, (0, 0), (x, y, width, height)) <NEW_LINE> image.set_colorkey(settings.BLACK) <NEW_LINE> return image
Class. Used to grab images out of a sprite sheet.
6259909cd8ef3951e32c8d7f
class Trajectory(models.Model): <NEW_LINE> <INDENT> data = models.FileField(storage=fs, upload_to=_upload_trajectory_file_path) <NEW_LINE> length = models.PositiveIntegerField( help_text='length in frames', default=0) <NEW_LINE> parent_traj = models.ForeignKey('self', blank=True, null=True) <NEW_LINE> collection = models.ForeignKey(Collection) <NEW_LINE> hash_sha512 = models.CharField(max_length=128, unique=False) <NEW_LINE> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> owner = models.ForeignKey('auth.User', related_name='trajectory') <NEW_LINE> def save(self, *args, **kw): <NEW_LINE> <INDENT> if not self.pk: <NEW_LINE> <INDENT> with _symlink_workaround_temp_uploaded_file(self.data) as f: <NEW_LINE> <INDENT> with mdtraj.open(f) as traj: <NEW_LINE> <INDENT> self.length += len(traj) <NEW_LINE> <DEDENT> <DEDENT> import hashlib <NEW_LINE> func = hashlib.sha512() <NEW_LINE> for chunk in self.data.chunks(): <NEW_LINE> <INDENT> func.update(chunk) <NEW_LINE> <DEDENT> computed_hash = func.hexdigest() <NEW_LINE> if not computed_hash == self.hash_sha512: <NEW_LINE> <INDENT> raise ParseError(["Uploaded trajectory has different hash value than promised ", {"promised": self.hash_sha512, "received": computed_hash}]) <NEW_LINE> <DEDENT> <DEDENT> super(Trajectory, self).save(*args, **kw)
Stores a trajectory file associated to a collection. Has a unique hash (sha512). Can refer to a parent trajectory from which the actual has been forked from.
6259909cc4546d3d9def81c0
class Address(models.Model): <NEW_LINE> <INDENT> id = models.CharField(max_length=64, primary_key=True) <NEW_LINE> lines = models.CharField(max_length=200) <NEW_LINE> city = models.CharField(max_length=64) <NEW_LINE> state = models.CharField(max_length=2) <NEW_LINE> postalCode = models.CharField(max_length=5) <NEW_LINE> normal = models.ForeignKey("AddressNormal") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "|".join([s for s in (self.lines, self.city, self.state, self.postalCode) if s is not None and len(s)!=0]) <NEW_LINE> <DEDENT> def get_hash(self): <NEW_LINE> <INDENT> return hashlib.new("sha256", str(self).encode()).hexdigest() <NEW_LINE> <DEDENT> def set_hash(self): <NEW_LINE> <INDENT> self.id = self.get_hash() <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = "Addresses"
Represents a single address input
6259909cc4546d3d9def81c2
class ApplicationView(ApplicationAuthMixin, View): <NEW_LINE> <INDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> applicationid = kwargs.get('applicationid') <NEW_LINE> app_obj = AppDetails(applicationid) <NEW_LINE> return render(request, 'application.html', {'application': app_obj.application})
Show applied application to the user who applied it!
6259909cadb09d7d5dc0c3a8
class KeystoreException(Exception): <NEW_LINE> <INDENT> pass
Superclass for all pyjks exceptions.
6259909cc4546d3d9def81c3
class DiscussionSettingsControlPanel(controlpanel.ControlPanelFormWrapper): <NEW_LINE> <INDENT> form = DiscussionSettingsEditForm <NEW_LINE> index = ViewPageTemplateFile('controlpanel.pt') <NEW_LINE> def settings(self): <NEW_LINE> <INDENT> registry = queryUtility(IRegistry) <NEW_LINE> settings = registry.forInterface(IDiscussionSettings, check=False) <NEW_LINE> wftool = getToolByName(self.context, "portal_workflow", None) <NEW_LINE> workflow_chain = wftool.getChainForPortalType('Discussion Item') <NEW_LINE> output = [] <NEW_LINE> if settings.globally_enabled: <NEW_LINE> <INDENT> output.append("globally_enabled") <NEW_LINE> <DEDENT> if 'one_state_workflow' not in workflow_chain and 'comment_review_workflow' not in workflow_chain: <NEW_LINE> <INDENT> output.append("moderation_custom") <NEW_LINE> <DEDENT> elif settings.moderation_enabled: <NEW_LINE> <INDENT> output.append("moderation_enabled") <NEW_LINE> <DEDENT> if settings.anonymous_comments: <NEW_LINE> <INDENT> output.append("anonymous_comments") <NEW_LINE> <DEDENT> ctrlOverview = getMultiAdapter((self.context, self.request), name='overview-controlpanel') <NEW_LINE> if ctrlOverview.mailhost_warning(): <NEW_LINE> <INDENT> output.append("invalid_mail_setup") <NEW_LINE> <DEDENT> wftool = getToolByName(self.context, 'portal_workflow', None) <NEW_LINE> if workflow_chain: <NEW_LINE> <INDENT> discussion_workflow = workflow_chain[0] <NEW_LINE> output.append(discussion_workflow) <NEW_LINE> <DEDENT> return ' '.join(output) <NEW_LINE> <DEDENT> def mailhost_warning(self): <NEW_LINE> <INDENT> mailhost = getToolByName(aq_inner(self.context), 'MailHost', None) <NEW_LINE> if mailhost is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> mailhost = getattr(aq_base(mailhost), 'smtp_host', None) <NEW_LINE> email = getattr(aq_inner(self.context), 'email_from_address', None) <NEW_LINE> if mailhost and email: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def custom_comment_workflow_warning(self): <NEW_LINE> <INDENT> wftool = getToolByName(self.context, "portal_workflow", None) <NEW_LINE> workflow_chain = wftool.getChainForPortalType('Discussion Item') <NEW_LINE> if 'one_state_workflow' in workflow_chain or 'comment_review_workflow' in workflow_chain: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def unmigrated_comments_warning(self): <NEW_LINE> <INDENT> catalog = getToolByName(aq_inner(self.context), 'portal_catalog', None) <NEW_LINE> count_comments_old = catalog.searchResults( object_provides=IDiscussionResponse.__identifier__) <NEW_LINE> if count_comments_old: <NEW_LINE> <INDENT> return True
Discussion settings control panel.
6259909c50812a4eaa621aef
class TimeReturn(TimeFrameAnalyzerBase): <NEW_LINE> <INDENT> params = ( ('data', None), ('firstopen', True), ('fund', None), ) <NEW_LINE> def start(self): <NEW_LINE> <INDENT> super(TimeReturn, self).start() <NEW_LINE> if self.p.fund is None: <NEW_LINE> <INDENT> self._fundmode = self.strategy.broker.fundmode <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._fundmode = self.p.fund <NEW_LINE> <DEDENT> self._value_start = 0.0 <NEW_LINE> self._lastvalue = None <NEW_LINE> if self.p.data is None: <NEW_LINE> <INDENT> if not self._fundmode: <NEW_LINE> <INDENT> self._lastvalue = self.strategy.broker.getvalue() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._lastvalue = self.strategy.broker.fundvalue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def notify_fund(self, cash, value, fundvalue, shares): <NEW_LINE> <INDENT> if not self._fundmode: <NEW_LINE> <INDENT> if self.p.data is None: <NEW_LINE> <INDENT> self._value = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._value = self.p.data[0] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.p.data is None: <NEW_LINE> <INDENT> self._value = fundvalue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._value = self.p.data[0] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def on_dt_over(self): <NEW_LINE> <INDENT> if self.p.data is None or self._lastvalue is not None: <NEW_LINE> <INDENT> self._value_start = self._lastvalue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.p.firstopen: <NEW_LINE> <INDENT> self._value_start = self.p.data.open[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._value_start = self.p.data[0] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def next(self): <NEW_LINE> <INDENT> super(TimeReturn, self).next() <NEW_LINE> self.rets[self.dtkey] = (self._value / self._value_start) - 1.0 <NEW_LINE> self._lastvalue = self._value
This analyzer calculates the Returns by looking at the beginning and end of the timeframe Params: - ``timeframe`` (default: ``None``) If ``None`` the ``timeframe`` of the 1st data in the system will be used Pass ``TimeFrame.NoTimeFrame`` to consider the entire dataset with no time constraints - ``compression`` (default: ``None``) Only used for sub-day timeframes to for example work on an hourly timeframe by specifying "TimeFrame.Minutes" and 60 as compression If ``None`` then the compression of the 1st data of the system will be used - ``data`` (default: ``None``) Reference asset to track instead of the portfolio value. .. note:: this data must have been added to a ``cerebro`` instance with ``addata``, ``resampledata`` or ``replaydata`` - ``firstopen`` (default: ``True``) When tracking the returns of a ``data`` the following is done when crossing a timeframe boundary, for example ``Years``: - Last ``close`` of previous year is used as the reference price to see the return in the current year The problem is the 1st calculation, because the data has** no previous** closing price. As such and when this parameter is ``True`` the *opening* price will be used for the 1st calculation. This requires the data feed to have an ``open`` price (for ``close`` the standard [0] notation will be used without reference to a field price) Else the initial close will be used. - ``fund`` (default: ``None``) If ``None`` the actual mode of the broker (fundmode - True/False) will be autodetected to decide if the returns are based on the total net asset value or on the fund value. See ``set_fundmode`` in the broker documentation Set it to ``True`` or ``False`` for a specific behavior Methods: - get_analysis Returns a dictionary with returns as values and the datetime points for each return as keys
6259909c50812a4eaa621af0
class Number(HerokuConnectFieldMixin, models.DecimalField): <NEW_LINE> <INDENT> def get_internal_type(self): <NEW_LINE> <INDENT> return "FloatField" <NEW_LINE> <DEDENT> def get_db_prep_save(self, value, connection): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> value = float(self.to_python(value)) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def get_db_prep_value(self, value, connection, prepared=False): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> value = float(self.to_python(value)) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def from_db_value(self, value, *args, **kwargs): <NEW_LINE> <INDENT> return self.to_python(value)
Salesforce ``Number`` field. Allows users to enter any number. Leading zeros are removed. Numbers in Salesforce are constrained by length and decimal places. Heroku Connect maps those decimal values to ``double precision`` floats. To have the same accuracy and avoid Salesforce validation rule issues this field uses :obj:`.Decimal` values and casts them to floats when persisting them to PostgreSQL.
6259909c283ffb24f3cf56ec
class itkNumericTraitsUC(vcl_numeric_limitsUC): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> __swig_destroy__ = _itkNumericTraitsPython.delete_itkNumericTraitsUC <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> _itkNumericTraitsPython.itkNumericTraitsUC_swiginit(self,_itkNumericTraitsPython.new_itkNumericTraitsUC(*args)) <NEW_LINE> <DEDENT> def min(*args): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsUC_min(*args) <NEW_LINE> <DEDENT> min = staticmethod(min) <NEW_LINE> def max(*args): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsUC_max(*args) <NEW_LINE> <DEDENT> max = staticmethod(max) <NEW_LINE> def NonpositiveMin(): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsUC_NonpositiveMin() <NEW_LINE> <DEDENT> NonpositiveMin = staticmethod(NonpositiveMin) <NEW_LINE> def IsPositive(*args): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsUC_IsPositive(*args) <NEW_LINE> <DEDENT> IsPositive = staticmethod(IsPositive) <NEW_LINE> def IsNonpositive(*args): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsUC_IsNonpositive(*args) <NEW_LINE> <DEDENT> IsNonpositive = staticmethod(IsNonpositive) <NEW_LINE> def IsNegative(*args): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsUC_IsNegative(*args) <NEW_LINE> <DEDENT> IsNegative = staticmethod(IsNegative) <NEW_LINE> def IsNonnegative(*args): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsUC_IsNonnegative(*args) <NEW_LINE> <DEDENT> IsNonnegative = staticmethod(IsNonnegative) <NEW_LINE> def ZeroValue(): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsUC_ZeroValue() <NEW_LINE> <DEDENT> ZeroValue = staticmethod(ZeroValue) <NEW_LINE> def OneValue(): <NEW_LINE> <INDENT> return _itkNumericTraitsPython.itkNumericTraitsUC_OneValue() <NEW_LINE> <DEDENT> OneValue = staticmethod(OneValue)
Proxy of C++ itkNumericTraitsUC class
6259909c3617ad0b5ee07fa7
class pp_ls: <NEW_LINE> <INDENT> def __init__(self, val): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> return self.val['lazy_str'].lazy_string() <NEW_LINE> <DEDENT> def display_hint (self): <NEW_LINE> <INDENT> return 'string'
Print a std::basic_string of some kind
6259909cc4546d3d9def81c6
class CarColorOption(models.Model): <NEW_LINE> <INDENT> color_name = models.CharField(max_length=128) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.color_name
List of options for car maker in Car model
6259909cc4546d3d9def81c8
class BackgroundTimer(object): <NEW_LINE> <INDENT> def __init__(self, interval=60, repeat=True, call=None): <NEW_LINE> <INDENT> self._timer = None <NEW_LINE> self.callback = call <NEW_LINE> self.interval = interval <NEW_LINE> self.repeat = repeat <NEW_LINE> self.is_running = False <NEW_LINE> if call is None: <NEW_LINE> <INDENT> self.callback = do_nothing <NEW_LINE> <DEDENT> <DEDENT> def start(self): <NEW_LINE> <INDENT> if not self.is_running: <NEW_LINE> <INDENT> self._timer = threading.Timer(self.interval, self._run) <NEW_LINE> self._timer.start() <NEW_LINE> self.is_running = True <NEW_LINE> <DEDENT> <DEDENT> def _run(self): <NEW_LINE> <INDENT> self.is_running = False <NEW_LINE> if self.repeat: <NEW_LINE> <INDENT> self.start() <NEW_LINE> <DEDENT> self.callback() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self._timer.cancel() <NEW_LINE> self.is_running = False
A repeating timer that runs in the background and calls a provided callback each time the timer runs out.
6259909c283ffb24f3cf56f2
class BusinessDetailSerializer(BusinessSerializer): <NEW_LINE> <INDENT> services = ServiceSerializer(many=True, read_only=True) <NEW_LINE> categories = CategorySerializer(many=True, read_only=True)
Serializer for Business Detail object
6259909c091ae35668706a8a
class Call(Action): <NEW_LINE> <INDENT> def __init__(self, amount): <NEW_LINE> <INDENT> Action.__init__(self) <NEW_LINE> self.amount = amount <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Call: " + " $" + str(self.amount)
Action a player can take to call the current bet. next hand.
6259909cd8ef3951e32c8d89
class ComponentTests(ossie.utils.testing.ScaComponentTestCase): <NEW_LINE> <INDENT> def testScaBasicBehavior(self): <NEW_LINE> <INDENT> execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False) <NEW_LINE> execparams = dict([(x.id, any.from_any(x.value)) for x in execparams]) <NEW_LINE> self.launch(execparams) <NEW_LINE> self.assertNotEqual(self.comp, None) <NEW_LINE> self.assertEqual(self.comp.ref._non_existent(), False) <NEW_LINE> self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True) <NEW_LINE> expectedProps = [] <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True)) <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True)) <NEW_LINE> props = self.comp.query([]) <NEW_LINE> props = dict((x.id, any.from_any(x.value)) for x in props) <NEW_LINE> for expectedProp in expectedProps: <NEW_LINE> <INDENT> self.assertEquals(props.has_key(expectedProp.id), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_uses(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_usesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_provides(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_providesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a(port.get_repid()), True) <NEW_LINE> <DEDENT> self.comp.start() <NEW_LINE> self.comp.stop() <NEW_LINE> self.comp.releaseObject()
Test for all component implementations in stream_to_streams_cc_4o
6259909cc4546d3d9def81ca
class ExceptionsTest(cros_test_lib.TestCase): <NEW_LINE> <INDENT> def _TestException(self, err, expected_startswith): <NEW_LINE> <INDENT> err2 = cPickle.loads(cPickle.dumps(err, cPickle.HIGHEST_PROTOCOL)) <NEW_LINE> self.assertTrue(str(err).startswith(expected_startswith)) <NEW_LINE> self.assertEqual(str(err), str(err2)) <NEW_LINE> <DEDENT> def testParallelAttributeError(self): <NEW_LINE> <INDENT> err1 = cbuildbot_run.ParallelAttributeError('SomeAttr') <NEW_LINE> self._TestException(err1, 'No such parallel run attribute') <NEW_LINE> err2 = cbuildbot_run.ParallelAttributeError('SomeAttr', 'SomeBoard', 'SomeTarget') <NEW_LINE> self._TestException(err2, 'No such board-specific parallel run attribute') <NEW_LINE> <DEDENT> def testAttrSepCountError(self): <NEW_LINE> <INDENT> err1 = cbuildbot_run.AttrSepCountError('SomeAttr') <NEW_LINE> self._TestException(err1, 'Attribute name has an unexpected number') <NEW_LINE> <DEDENT> def testAttrNotPickleableError(self): <NEW_LINE> <INDENT> err1 = cbuildbot_run.AttrNotPickleableError('SomeAttr', 'SomeValue') <NEW_LINE> self._TestException(err1, 'Run attribute "SomeAttr" value cannot')
Test that the exceptions in the module are sane.
6259909c091ae35668706a8e
class XboxControllerAdapter(ApplicationSession): <NEW_LINE> <INDENT> @inlineCallbacks <NEW_LINE> def onJoin(self, details): <NEW_LINE> <INDENT> log.msg("XboxControllerAdapter connected.") <NEW_LINE> extra = self.config.extra <NEW_LINE> self._id = extra['id'] <NEW_LINE> self._xbox = extra['xbox'] <NEW_LINE> self._xbox._session = self <NEW_LINE> for proc in [self.get_data]: <NEW_LINE> <INDENT> uri = 'io.crossbar.examples.iot.devices.pi.{}.xboxcontroller.{}'.format(self._id, proc.__name__) <NEW_LINE> yield self.register(proc, uri) <NEW_LINE> log.msg("XboxControllerAdapter registered procedure {}".format(uri)) <NEW_LINE> <DEDENT> self.publish('io.crossbar.examples.iot.devices.pi.{}.xboxcontroller.on_ready'.format(self._id)) <NEW_LINE> log.msg("XboxControllerAdapter ready.") <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> return self._xbox._last <NEW_LINE> <DEDENT> def on_data(self, data): <NEW_LINE> <INDENT> uri = 'io.crossbar.examples.iot.devices.pi.{}.xboxcontroller.on_data'.format(self._id) <NEW_LINE> self.publish(uri, data) <NEW_LINE> log.msg("XboxControllerAdapter event published to {}: {}".format(uri, data))
Connects Xbox gamepad controller to WAMP.
6259909c656771135c48af60
class OldSeqAccuracy(DiscreteLoss): <NEW_LINE> <INDENT> def _forward(self, x, gold): <NEW_LINE> <INDENT> ignoremask = self.get_ignore_mask(gold, self.ignore_indices) <NEW_LINE> _, best = torch.max(x, 2) <NEW_LINE> same = best == gold <NEW_LINE> outignoremask = None <NEW_LINE> if ignoremask is not None: <NEW_LINE> <INDENT> same = same | ~ ignoremask <NEW_LINE> outignoremask = ignoremask.long().sum(1) > 0 <NEW_LINE> <DEDENT> sameseqs = same.long().sum(1) <NEW_LINE> sameseqs = sameseqs == int(same.size(1)) <NEW_LINE> return sameseqs, outignoremask
very basic explicit seqaccuracy implementation. does not support batchable sparse mask
6259909c283ffb24f3cf56f8
class Storage(object): <NEW_LINE> <INDENT> path = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> if Storage.path is None: <NEW_LINE> <INDENT> raise Exception("Path for data not set") <NEW_LINE> <DEDENT> db = Database(os.path.join(Storage.path, "db")) <NEW_LINE> if db.exists(): <NEW_LINE> <INDENT> db.open() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> db.create() <NEW_LINE> db.add_index(ActiveIndex(db.path, "activeIndex")) <NEW_LINE> db.add_index(DateTreeIndex(db.path, "dateIndex")) <NEW_LINE> <DEDENT> self._db = db <NEW_LINE> <DEDENT> def get(self, showDeleted): <NEW_LINE> <INDENT> if showDeleted: <NEW_LINE> <INDENT> return [self._map(r) for r in self._db.all("id")] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [self._map(r["doc"]) for r in self._db.get_many("activeIndex", True, with_doc=True)] <NEW_LINE> <DEDENT> <DEDENT> def getDetail(self, id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> record = self._db.get("id", id) <NEW_LINE> return self._map(record) <NEW_LINE> <DEDENT> except RecordNotFound: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def update(self, id, title, body): <NEW_LINE> <INDENT> record = Record(id, time.time(), title.encode('utf-8'), body.encode('utf-8'), StateType.Modified) <NEW_LINE> d = record.dict() <NEW_LINE> try: <NEW_LINE> <INDENT> exists = self._db.get("id", id) <NEW_LINE> for key, value in d.iteritems(): <NEW_LINE> <INDENT> if key != "id": <NEW_LINE> <INDENT> exists[key] = value <NEW_LINE> <DEDENT> <DEDENT> self._db.update(exists) <NEW_LINE> <DEDENT> except RecordNotFound: <NEW_LINE> <INDENT> d["_id"] = id <NEW_LINE> self._db.insert(d) <NEW_LINE> record["id"] = id <NEW_LINE> <DEDENT> return record <NEW_LINE> <DEDENT> def add(self, title, body): <NEW_LINE> <INDENT> record = Record(None, time.time(), title.encode('utf-8'), body.encode('utf-8'), StateType.Added) <NEW_LINE> result = self._db.insert(record.dict()) <NEW_LINE> record.id = result["_id"] <NEW_LINE> return record <NEW_LINE> <DEDENT> def delete(self, id): <NEW_LINE> <INDENT> result = self._db.get("id", id) <NEW_LINE> result["state"] = StateType.Deleted <NEW_LINE> self._db.update(result) <NEW_LINE> <DEDENT> def getFromDate(self, date): <NEW_LINE> <INDENT> records = self._db.get_many("dateIndex", limit=-1, start=date, end=None, inclusive_start=True, with_doc=True) <NEW_LINE> return [self._map(r["doc"]) for r in records] <NEW_LINE> <DEDENT> def _map(self, r): <NEW_LINE> <INDENT> return Record(r["_id"], r["date"], r["title"], r["body"], r["state"])
Database storage
6259909dc4546d3d9def81cc
class Circulo: <NEW_LINE> <INDENT> def __init__(self, centro, radio): <NEW_LINE> <INDENT> self.Centro, self.Radio = centro, radio
Representa un circulo en un mundo 2D
6259909d099cdd3c6367632b
class TestCloudLocationReq(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testCloudLocationReq(self): <NEW_LINE> <INDENT> pass
CloudLocationReq unit test stubs
6259909dc4546d3d9def81cd
class CmdWorkspace(BaseWorkspace): <NEW_LINE> <INDENT> def __init__(self, working_dir, cmd=None, auto=False, **kw): <NEW_LINE> <INDENT> BaseWorkspace.__init__(self, working_dir) <NEW_LINE> if auto: <NEW_LINE> <INDENT> for marker, cls in _cmd_classes.items(): <NEW_LINE> <INDENT> target = abspath(normpath(join(self.working_dir, marker))) <NEW_LINE> if not isdir(target): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> cmd = cls() <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> self.cmd = cmd <NEW_LINE> self.update_cmd_table(cmd) <NEW_LINE> self.initialize() <NEW_LINE> <DEDENT> def update_cmd_table(self, cmd): <NEW_LINE> <INDENT> self.cmd_table = {} <NEW_LINE> if cmd: <NEW_LINE> <INDENT> self.cmd_table.update(cmd.cmd_table) <NEW_LINE> <DEDENT> <DEDENT> def get_cmd(self, name): <NEW_LINE> <INDENT> cmd = self.cmd_table.get(name) <NEW_LINE> if not cmd: <NEW_LINE> <INDENT> logger.info('%s required but no init defined', name) <NEW_LINE> return dummy_action <NEW_LINE> <DEDENT> return cmd <NEW_LINE> <DEDENT> @property <NEW_LINE> def marker(self): <NEW_LINE> <INDENT> return self.cmd and self.cmd.marker or None <NEW_LINE> <DEDENT> def check_marker(self): <NEW_LINE> <INDENT> if self.marker is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> target = join(self.working_dir, self.marker) <NEW_LINE> logger.debug('checking isdir: %s', target) <NEW_LINE> return isdir(target) <NEW_LINE> <DEDENT> def initialize(self, **kw): <NEW_LINE> <INDENT> if self.check_marker(): <NEW_LINE> <INDENT> logger.debug('already initialized: %s', self.working_dir) <NEW_LINE> return <NEW_LINE> <DEDENT> return self.get_cmd('init')(self, **kw) <NEW_LINE> <DEDENT> def save(self, **kw): <NEW_LINE> <INDENT> return self.get_cmd('save')(self, **kw)
Default workspace, file based.
6259909d656771135c48af63
class GPIO(ObjectFromList): <NEW_LINE> <INDENT> def __init__(self, server_guid=None): <NEW_LINE> <INDENT> super(GPIO, self).__init__() <NEW_LINE> if server_guid is None: <NEW_LINE> <INDENT> server_guid = [srv.guid for srv in Servers().get_all()] <NEW_LINE> <DEDENT> self.server_guid = server_guid <NEW_LINE> <DEDENT> def get_inputs(self, names=None): <NEW_LINE> <INDENT> return self._get_objects_from_list( "GPIO Input", object_names=names, server_guid=self.server_guid, sub_condition=None, ) <NEW_LINE> <DEDENT> def get_outputs(self, names=None): <NEW_LINE> <INDENT> return self._get_objects_from_list( "GPIO Output", object_names=names, server_guid=self.server_guid, sub_condition=None, )
Класс для работы с тревожными входами/выходами Args: server_guid (:obj:`str` | List[:obj:`str`], optional): Guid сервера или список guid. По умолчанию :obj:`None`, что соотвествует всем доступным серверам. Examples: >>> gpio = GPIO() >>> gpio_door = gpio.get_inputs("Door")[0] >>> gpio_door.obj.state("gpio_input_level") 'Input Low (Normal High)' >>> gpio_light = gpio.get_outputs("Light")[0] >>> gpio_light.obj.set_output_high()
6259909d656771135c48af65
class CreateQuestionView(LoginRequired, CreateView): <NEW_LINE> <INDENT> template_name = 'qa/create_question.html' <NEW_LINE> message = _('Thank you! your question has been created.') <NEW_LINE> form_class = QuestionForm <NEW_LINE> model = Question <NEW_LINE> def get_context_data(self, *args, **kwargs): <NEW_LINE> <INDENT> context = super( CreateQuestionView, self).get_context_data(*args, **kwargs) <NEW_LINE> context['course_no'] = self.kwargs['course_no'] <NEW_LINE> return context <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> print("Helo asdlkalsdk") <NEW_LINE> form.instance.courseNo = course.objects.get(courseNo=self.kwargs['course_no']) <NEW_LINE> form.instance.user = self.request.user <NEW_LINE> return super(CreateQuestionView, self).form_valid(form) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> if qa_messages: <NEW_LINE> <INDENT> def get_success_url(self): <NEW_LINE> <INDENT> messages.success(self.request, self.message) <NEW_LINE> <DEDENT> <DEDENT> url = reverse('qa:qa_index', kwargs={'course_no': self.kwargs['course_no']}) <NEW_LINE> return url
View to handle the creation of a new question
6259909dadb09d7d5dc0c3c6
class Dconv_vertical(nn.Module): <NEW_LINE> <INDENT> def __init__(self, inplane, outplane, kernel_size, stride, padding): <NEW_LINE> <INDENT> super(Dconv_vertical, self).__init__() <NEW_LINE> print('Dconv_vertical is used') <NEW_LINE> self.dilated_conv = nn.Conv2d(inplane, outplane, kernel_size=kernel_size, stride=stride, padding=padding, bias=False) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x_shape = x.size() <NEW_LINE> x_offset = torch.empty(x_shape[0], x_shape[1], x_shape[2], x_shape[3]).cuda(cuda_number) <NEW_LINE> perm = torch.randperm(x_shape[2]) <NEW_LINE> x_offset[:, :, :, :] = x[:, :, perm, :] <NEW_LINE> return self.dilated_conv(x_offset)
Deformable convolution with random shuffling of the feature map. Random shuffle feature maps vertically. The sampling locations are generated for each forward pass during the training.
6259909d099cdd3c63676330
class YakDBUtils: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def incrementKey(key): <NEW_LINE> <INDENT> if isinstance(key, str): key = key.encode("utf-8") <NEW_LINE> keyList = list(key) <NEW_LINE> for idx in range(-1,(-1)-len(keyList),-1): <NEW_LINE> <INDENT> lastChar = keyList[idx] <NEW_LINE> if lastChar == 255: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> newLastChar = lastChar + 1 <NEW_LINE> keyList[idx] = newLastChar <NEW_LINE> return bytes(keyList) <NEW_LINE> <DEDENT> return key + b"\x00"
This class provides static utility methods for using YakDB.
6259909d3617ad0b5ee07fc3
class Phi(CombinatorialFreeModule, BindableClass): <NEW_LINE> <INDENT> def __init__(self, NCSF): <NEW_LINE> <INDENT> CombinatorialFreeModule.__init__(self, NCSF.base_ring(), Compositions(), prefix='Phi', bracket=False, category=NCSF.MultiplicativeBasesOnPrimitiveElements()) <NEW_LINE> <DEDENT> def _from_complete_on_generators(self, n): <NEW_LINE> <INDENT> one = self.base_ring().one() <NEW_LINE> return self.sum_of_terms( ( (J, one / coeff_sp(J,[n])) for J in Compositions(n) ), distinct=True ) <NEW_LINE> <DEDENT> def _to_complete_on_generators(self, n): <NEW_LINE> <INDENT> minus_one = -self.base_ring().one() <NEW_LINE> complete = self.realization_of().complete() <NEW_LINE> return complete.sum_of_terms( ( (J, minus_one**(len(J)+1) * n / coeff_ell(J,[n])) for J in Compositions(n) ), distinct=True ) <NEW_LINE> <DEDENT> class Element(CombinatorialFreeModule.Element): <NEW_LINE> <INDENT> def verschiebung(self, n): <NEW_LINE> <INDENT> parent = self.parent() <NEW_LINE> C = parent._indices <NEW_LINE> return parent.sum_of_terms([(C([i // n for i in I]), coeff * (n ** len(I))) for (I, coeff) in self if all(i % n == 0 for i in I)], distinct=True) <NEW_LINE> <DEDENT> def star_involution(self): <NEW_LINE> <INDENT> parent = self.parent() <NEW_LINE> dct = {I.reversed(): coeff for (I, coeff) in self} <NEW_LINE> return parent._from_dict(dct) <NEW_LINE> <DEDENT> def psi_involution(self): <NEW_LINE> <INDENT> parent = self.parent() <NEW_LINE> dct = {I: (-1) ** (I.size() - len(I)) * coeff for (I, coeff) in self} <NEW_LINE> return parent._from_dict(dct)
The Hopf algebra of non-commutative symmetric functions in the Phi basis. The Phi basis is defined in Definition 3.4 of [NCSF1]_, where it is denoted by `(\Phi^I)_I`. It is a multiplicative basis, and is connected to the elementary generators `\Lambda_i` of the ring of non-commutative symmetric functions by the following relation: Define a non-commutative symmetric function `\Phi_n` for every positive integer `n` by the power series identity .. MATH:: \sum_{k\geq 1} t^k \frac{1}{k} \Phi_k = -\log \left( \sum_{k \geq 0} (-t)^k \Lambda_k \right), with `\Lambda_0` denoting `1`. For every composition `(i_1, i_2, \ldots, i_k)`, we have `\Phi^{(i_1, i_2, \ldots, i_k)} = \Phi_{i_1} \Phi_{i_2} \cdots \Phi_{i_k}`. The `\Phi`-basis is well-defined only when the base ring is a `\QQ`-algebra. The elements of the `\Phi`-basis are known as the "power-sum non-commutative symmetric functions of the second kind". The generators `\Phi_n` are related to the (first) Eulerian idempotents in the descent algebras of the symmetric groups (see [NCSF1]_, 5.4 for details). EXAMPLES:: sage: NCSF = NonCommutativeSymmetricFunctions(QQ) sage: Phi = NCSF.Phi(); Phi Non-Commutative Symmetric Functions over the Rational Field in the Phi basis sage: Phi.an_element() 2*Phi[] + 2*Phi[1] + 3*Phi[1, 1]
6259909dc4546d3d9def81d4
class Publisher(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return 'Catagory : %s' % self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> get_latest_by = "name" <NEW_LINE> ordering = ['name'] <NEW_LINE> verbose_name = "Catagory" <NEW_LINE> verbose_name_plural = "Catagories"
Class defines book's publisher
6259909d3617ad0b5ee07fc7
class RedfishUpdateServiceNotFoundError( Exception ): <NEW_LINE> <INDENT> pass
Raised when the Update Service or an update action cannot be found
6259909d091ae35668706aa8
class LFRCollate(object): <NEW_LINE> <INDENT> def __init__(self, feature_dim, char_list, path_list, label_list, LFR_m=1, LFR_n=1): <NEW_LINE> <INDENT> self.path_list = path_list <NEW_LINE> self.label_list = label_list <NEW_LINE> self.LFR_m = LFR_m <NEW_LINE> self.LFR_n = LFR_n <NEW_LINE> self.feature_dim = feature_dim <NEW_LINE> self.char_list = char_list <NEW_LINE> <DEDENT> def __call__(self, batch): <NEW_LINE> <INDENT> return _collate_fn(batch, self.feature_dim, self.char_list, self.LFR_m, self.LFR_n, self.path_list, self.label_list)
Build this wrapper to pass arguments(LFR_m, LFR_n) to _collate_fn
6259909e50812a4eaa621b08
class Share(models.Model): <NEW_LINE> <INDENT> container = models.ForeignKey(Container) <NEW_LINE> expire_date = models.DateTimeField('share expiration date') <NEW_LINE> secret = models.CharField(max_length=250,null=True,blank=True) <NEW_LINE> def generate_secret(self): <NEW_LINE> <INDENT> self.secret = str(uuid.uuid4()) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.secret in ['',None]: <NEW_LINE> <INDENT> self.generate_secret() <NEW_LINE> <DEDENT> super(Share, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.container.name <NEW_LINE> <DEDENT> def get_label(self): <NEW_LINE> <INDENT> return "main" <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> unique_together = ('expire_date','container',) <NEW_LINE> app_label = 'main'
a temporary share / link for a container
6259909e091ae35668706ab4
@python_2_unicode_compatible <NEW_LINE> class Binding(object): <NEW_LINE> <INDENT> def __init__(self, wsdl, name, port_name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.port_name = port_name <NEW_LINE> self.port_type = None <NEW_LINE> self.wsdl = wsdl <NEW_LINE> self._operations = {} <NEW_LINE> <DEDENT> def resolve(self, definitions): <NEW_LINE> <INDENT> self.port_type = definitions.get('port_types', self.port_name.text) <NEW_LINE> for operation in self._operations.values(): <NEW_LINE> <INDENT> operation.resolve(definitions) <NEW_LINE> <DEDENT> <DEDENT> def _operation_add(self, operation): <NEW_LINE> <INDENT> self._operations[operation.name] = operation <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__class__.__name__ <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<%s(name=%r, port_type=%r)>' % ( self.__class__.__name__, self.name.text, self.port_type) <NEW_LINE> <DEDENT> def get(self, name): <NEW_LINE> <INDENT> return self._operations.get(name) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def match(cls, node): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse(cls, definitions, xmlelement): <NEW_LINE> <INDENT> raise NotImplementedError()
Base class for the various bindings (SoapBinding / HttpBinding) Binding | +-> Operation | +-> ConcreteMessage | +-> AbstractMessage
6259909e283ffb24f3cf571d
class StylesList(object): <NEW_LINE> <INDENT> def __init__(self, dirlist): <NEW_LINE> <INDENT> self.__styles = sorted(self.__findStyles(dirlist)) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.__styles) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self.__styles[index] <NEW_LINE> <DEDENT> def __findStyles(self, dirlist): <NEW_LINE> <INDENT> styles = [] <NEW_LINE> for path in dirlist: <NEW_LINE> <INDENT> styles += self.__findStylesInDir(path) <NEW_LINE> <DEDENT> return styles <NEW_LINE> <DEDENT> def __findStylesInDir(self, path): <NEW_LINE> <INDENT> if not os.path.exists(path): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> style = Style() <NEW_LINE> return [os.path.join(path, styledir) for styledir in os.listdir(path) if (not styledir.startswith("__") and style.check(os.path.join(path, styledir)))]
Класс для хранения списка существующих стилей страниц
6259909e50812a4eaa621b09
class InitialConditionsView(Sequence): <NEW_LINE> <INDENT> def __init__(self, model): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> initial = self.model.initials[key] <NEW_LINE> return (initial.pattern, initial.value) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.model.initials)
Compatibility shim for the Model.initial_conditions property.
6259909e099cdd3c6367633c
class ServiceManager(list): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ServiceManager, self).__init__(*args, **kwargs) <NEW_LINE> msg = "ServiceManager is deprecated. Use fixtures instead." <NEW_LINE> warnings.warn(msg, DeprecationWarning) <NEW_LINE> self.failed = set() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, type, value, tb): <NEW_LINE> <INDENT> self.stop_all() <NEW_LINE> <DEDENT> @property <NEW_LINE> def running(self): <NEW_LINE> <INDENT> def is_running(p): <NEW_LINE> <INDENT> return p.is_running() <NEW_LINE> <DEDENT> return filter(is_running, self) <NEW_LINE> <DEDENT> def start(self, service): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> map(self.start_class, service.depends) <NEW_LINE> if service.is_running(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if service in self.failed: <NEW_LINE> <INDENT> log.warning("%s previously failed to start", service) <NEW_LINE> return <NEW_LINE> <DEDENT> service.start() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> log.exception("Unable to start service %s", service) <NEW_LINE> self.failed.add(service) <NEW_LINE> <DEDENT> <DEDENT> def start_all(self): <NEW_LINE> <INDENT> for service in self: <NEW_LINE> <INDENT> self.start(service) <NEW_LINE> <DEDENT> <DEDENT> def start_class(self, class_): <NEW_LINE> <INDENT> matches = filter(lambda svc: isinstance(svc, class_), self) <NEW_LINE> if not matches: <NEW_LINE> <INDENT> svc = class_() <NEW_LINE> self.register(svc) <NEW_LINE> matches = [svc] <NEW_LINE> <DEDENT> map(self.start, matches) <NEW_LINE> return matches <NEW_LINE> <DEDENT> def register(self, service): <NEW_LINE> <INDENT> self.append(service) <NEW_LINE> <DEDENT> def stop_class(self, class_): <NEW_LINE> <INDENT> matches = filter(lambda svc: isinstance(svc, class_), self) <NEW_LINE> map(self.stop, matches) <NEW_LINE> <DEDENT> def stop(self, service): <NEW_LINE> <INDENT> for dep_class in service.depended_by: <NEW_LINE> <INDENT> self.stop_class(dep_class) <NEW_LINE> <DEDENT> service.stop() <NEW_LINE> <DEDENT> def stop_all(self): <NEW_LINE> <INDENT> map(self.stop, reversed(self.running))
A class that manages services that may be required by some of the unit tests. ServiceManager will start up daemon services as subprocesses or threads and will stop them when requested or when destroyed.
6259909e283ffb24f3cf571e
class DataLoaderTransformer(NervanaObject): <NEW_LINE> <INDENT> def __init__(self, dataloader, index=None): <NEW_LINE> <INDENT> super(DataLoaderTransformer, self).__init__() <NEW_LINE> self.dataloader = dataloader <NEW_LINE> self.index = index <NEW_LINE> if self.index is not None: <NEW_LINE> <INDENT> data_size = np.prod(self.dataloader.shapes()[index]) <NEW_LINE> self._shape = (data_size, self.be.bsz) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> return getattr(self.dataloader, key) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for tup in self.dataloader: <NEW_LINE> <INDENT> if self.index is None: <NEW_LINE> <INDENT> yield self.transform(tup) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = self.transform(tup[self.index]) <NEW_LINE> if ret is None: <NEW_LINE> <INDENT> raise ValueError( '{} returned None from a transformer'.format( self.__class__.__name__ ) ) <NEW_LINE> <DEDENT> out = list(tup) <NEW_LINE> out[self.index] = ret <NEW_LINE> yield out <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def transform(self, t): <NEW_LINE> <INDENT> raise NotImplemented()
DataLoaderTransformers are used to transform the output of a DataLoader. DataLoader doesn't have easy access to the device or graph, so any computation that should happen there should use a DataLoaderTransformer.
6259909e3617ad0b5ee07fd9
class GaussianSimProcess(SimProcess): <NEW_LINE> <INDENT> def __init__(self, rate, std): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.has_pdf = True <NEW_LINE> self.has_cdf = True <NEW_LINE> self.rate = rate <NEW_LINE> self.std = std <NEW_LINE> <DEDENT> def generate_trace(self): <NEW_LINE> <INDENT> return max(0, np.random.normal(loc=1/self.rate, scale=self.std)) <NEW_LINE> <DEDENT> def pdf(self, x): <NEW_LINE> <INDENT> return norm.pdf(x, loc=1/self.rate, scale=self.std) <NEW_LINE> <DEDENT> def cdf(self, x): <NEW_LINE> <INDENT> return norm.cdf(x, loc=1/self.rate, scale=self.std)
GaussianSimProcess extends the functionality of :class:`~simfaas.SimProcess.SimProcess` for gaussian processes. This class also implements the `pdf` and `cdf` functions which can be used for visualization purposes. Parameters ---------- rate : float The rate at which the process should fire off std : float The standard deviation of the simulated process
6259909ed8ef3951e32c8d9f
class _PartitionKeyRange(object): <NEW_LINE> <INDENT> MinInclusive = 'minInclusive' <NEW_LINE> MaxExclusive = 'maxExclusive' <NEW_LINE> Id = 'id' <NEW_LINE> Parents = 'parents'
Partition Key Range Constants
6259909e656771135c48af75
@python_2_unicode_compatible <NEW_LINE> class Person(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField( User, on_delete=models.CASCADE, help_text="The corresponding user to this person") <NEW_LINE> phone = models.CharField( max_length=20, blank=True, help_text="Person's phone number, no particular formatting") <NEW_LINE> allergies = models.CharField( max_length=400, blank=True, help_text="Allergy information for the person") <NEW_LINE> comments = models.CharField( max_length=400, blank=True, help_text="Comments or other notes about the person") <NEW_LINE> teams = models.ManyToManyField( Team, blank=True, help_text="Teams that the person is on") <NEW_LINE> is_shib_acct = models.BooleanField( help_text="A boolean to indicate if the person uses shibboleth authentication for login") <NEW_LINE> objects = PersonManager() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> name = self.user.first_name + " " + self.user.last_name + " (" + self.user.username + ")" <NEW_LINE> if(name == " ()"): <NEW_LINE> <INDENT> return "Anonymous User" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return name <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def formatted_phone_number(self): <NEW_LINE> <INDENT> match = re.match("(?:\\+?1 ?-?)?\\(?([0-9]{3})\\)?-? ?([0-9]{3})-? ?([0-9]{4})", self.phone) <NEW_LINE> if(match): <NEW_LINE> <INDENT> return match.expand("(\\1)-\\2-\\3") <NEW_LINE> <DEDENT> return self.phone
A class to associate more personal information with the default django auth user class
6259909e3617ad0b5ee07fdd
class PlecostResults(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.__target = kwargs.get("target", None) <NEW_LINE> self.__start_time = kwargs.get("start_time", datetime.now()) <NEW_LINE> self.__end_time = kwargs.get("end_time", datetime.now()) <NEW_LINE> self.__wordpress_info = kwargs.get("wordpress_info", None) <NEW_LINE> self.__plugins = kwargs.get("plugins", None) <NEW_LINE> if not isinstance(self.__target, str): <NEW_LINE> <INDENT> raise TypeError("Expected basestring, got '%s' instead" % type(self.__target)) <NEW_LINE> <DEDENT> if not isinstance(self.__wordpress_info, PlecostWordPressInfo): <NEW_LINE> <INDENT> raise TypeError("Expected PlecostWordPressInfo, got '%s' instead" % type(self.__wordpress_info)) <NEW_LINE> <DEDENT> if not isinstance(self.__plugins, list): <NEW_LINE> <INDENT> raise TypeError("Expected list, got '%s' instead" % type(self.__plugins)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for plugin in self.__plugins: <NEW_LINE> <INDENT> if not isinstance(plugin, PlecostPluginInfo): <NEW_LINE> <INDENT> raise TypeError("Expected PlecostPluginInfo, got '%s' instead" % type(plugin)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.__outdated_plugins = [] <NEW_LINE> for plugin in self.__plugins: <NEW_LINE> <INDENT> if plugin.is_outdated is True: <NEW_LINE> <INDENT> self.__outdated_plugins.append(plugin) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def target(self): <NEW_LINE> <INDENT> return self.__target <NEW_LINE> <DEDENT> @property <NEW_LINE> def wordpress_info(self): <NEW_LINE> <INDENT> return self.__wordpress_info <NEW_LINE> <DEDENT> @property <NEW_LINE> def plugins(self): <NEW_LINE> <INDENT> return self.__plugins <NEW_LINE> <DEDENT> @property <NEW_LINE> def start_time(self): <NEW_LINE> <INDENT> return self.__start_time <NEW_LINE> <DEDENT> @property <NEW_LINE> def end_time(self): <NEW_LINE> <INDENT> return self.__end_time <NEW_LINE> <DEDENT> @property <NEW_LINE> def outdated_plugins(self): <NEW_LINE> <INDENT> return self.__outdated_plugins
Plecost results
6259909eadb09d7d5dc0c3e6
class VixException(Exception): <NEW_LINE> <INDENT> def __init__(self, err_code): <NEW_LINE> <INDENT> _vix.Vix_GetErrorText.restype = c_char_p <NEW_LINE> self._err_code = err_code <NEW_LINE> self._msg = _vix.Vix_GetErrorText(self._err_code, None) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self._msg)
A exception that specifies VIX-related errors and the corresponding messages.
6259909e283ffb24f3cf5724
class Sha256DictStore(BaseDictStore): <NEW_LINE> <INDENT> HASH_SIZE_BYTES = 8 <NEW_LINE> def hash_object(cls, serialized_obj): <NEW_LINE> <INDENT> hash_bytes = sha256(serialized_obj).digest() <NEW_LINE> hexdigest = hexlify(hash_bytes[:Sha256DictStore.HASH_SIZE_BYTES]) <NEW_LINE> return hexdigest.decode('utf-8')
Dict-based store using truncated SHA256 hex-encoded hashes. >>> store = Sha256DictStore() >>> obj = b'dummy' >>> obj_hash = store.hash_object(obj) >>> store.add(obj) == obj_hash True >>> obj_hash in store True >>> b'nonexistent' not in store True >>> store.get(obj_hash) == obj True
6259909e656771135c48af77
class NodeStatistic(NamedTuple): <NEW_LINE> <INDENT> is_updated: bool = False <NEW_LINE> error: Exception = None <NEW_LINE> update_time: float = None
Statistic should be kept separately for each node because each node can have 10 or even 100 of different statistic profiles according number of group nodes using it
6259909eadb09d7d5dc0c3e8
class VxrailHost(object): <NEW_LINE> <INDENT> def __init__(self, esxversion): <NEW_LINE> <INDENT> self.esxversion = esxversion <NEW_LINE> <DEDENT> def GetEsxVersion(self, host): <NEW_LINE> <INDENT> version = vim.HostSystem.config.product.fullName <NEW_LINE> return self.version
A class that defines ESX attributes for a VxRail Host Attributes: esxversion: Version of ESXi marvinvib: Version of the Marvin vib.
6259909e283ffb24f3cf5726
class MyWindow(QWidget): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.init_gui() <NEW_LINE> <DEDENT> def init_gui(self): <NEW_LINE> <INDENT> self.resize(200,200) <NEW_LINE> self.move(50,50) <NEW_LINE> self.setWindowTitle("MyWindow") <NEW_LINE> self.button1 = QPushButton('Button1') <NEW_LINE> self.button2 = QPushButton('Button2') <NEW_LINE> self.button3 = QPushButton('Button3') <NEW_LINE> self.button4 = QPushButton('Button4') <NEW_LINE> self.button5 = QPushButton('Button5') <NEW_LINE> self.button6 = QPushButton('Button6') <NEW_LINE> self.button7 = QPushButton('Button7') <NEW_LINE> self.button8 = QPushButton('Button8') <NEW_LINE> self.vbox = QVBoxLayout(self) <NEW_LINE> self.vbox.addWidget(self.button1) <NEW_LINE> self.vbox.addWidget(self.button2) <NEW_LINE> self.vbox.addWidget(self.button3) <NEW_LINE> self.vbox.addWidget(self.button4) <NEW_LINE> self.hbox = QHBoxLayout(self) <NEW_LINE> self.hbox.addWidget(self.button5) <NEW_LINE> self.hbox.addWidget(self.button6) <NEW_LINE> self.hbox.addWidget(self.button7) <NEW_LINE> self.hbox.addWidget(self.button8) <NEW_LINE> self.vbox.addStretch(1) <NEW_LINE> self.vbox.addLayout(self.hbox) <NEW_LINE> self.setLayout(self.vbox)
Main Window class for our application
6259909ec4546d3d9def81e3
class StringRelativePointer16(StringRelativePointer): <NEW_LINE> <INDENT> def __init__(self, size=0, address=None, field_order='auto'): <NEW_LINE> <INDENT> super().__init__(size=size, address=address, bit_size=16, field_order=field_order)
A `StringRelativePointer16` field is a :class:`StringRelativePointer` field with a :class:`Field` *size* of two bytes.
6259909e091ae35668706ac0
class TestNER(unittest.TestCase): <NEW_LINE> <INDENT> @ignore_warnings <NEW_LINE> def test_stanford(self): <NEW_LINE> <INDENT> sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> result = sock.connect_ex(("localhost", 9000)) <NEW_LINE> if result != 0: <NEW_LINE> <INDENT> Popen( [executable, "core_nlp.py"], cwd="c:\stanford-corenlp-full-2018-02-27", creationflags=CREATE_NEW_CONSOLE, ) <NEW_LINE> print( "Initializing CoreNLP...." ) <NEW_LINE> time.sleep(120) <NEW_LINE> <DEDENT> self.assertEqual( get_stanford_named_entities( "Apple announced it would negotiate a deal with Microsoft for $1 billion." ), [("MONEY", "$ 1 billion")], ) <NEW_LINE> sock.close() <NEW_LINE> <DEDENT> @ignore_warnings <NEW_LINE> def test_spacy(self): <NEW_LINE> <INDENT> self.assertEqual( get_spacy_named_entities( "Apple announced it would negotiate a deal with Microsoft for $1 billion." ), [ ("ORGANIZATION", "Apple"), ("ORGANIZATION", "Microsoft"), ("MONEY", "$1 billion"), ], ) <NEW_LINE> <DEDENT> @ignore_warnings <NEW_LINE> def test_nltk(self): <NEW_LINE> <INDENT> self.assertEqual( get_nltk_named_entities( "Apple announced it would negotiate a deal with Microsoft for $1 billion." ), [("PERSON", "Apple"), ("ORGANIZATION", "Microsoft")], ) <NEW_LINE> <DEDENT> @ignore_warnings <NEW_LINE> def test_consolidated(self): <NEW_LINE> <INDENT> self.assertEqual( get_named_entities( "Apple announced it would negotiate a deal with Microsoft for $1 billion." ), [ ("ORGANIZATION", "Apple"), ("ORGANIZATION", "Microsoft"), ("MONEY", "$1 billion"), ], )
Class for testing named entity recognition functions
6259909e283ffb24f3cf5728
class MattermChannel(Channel): <NEW_LINE> <INDENT> def __init__(self, team, subteam, id, name, private=False, member=True, im=None): <NEW_LINE> <INDENT> if subteam is None and im is None: <NEW_LINE> <INDENT> raise Exception('only DM channels should be subteamless') <NEW_LINE> <DEDENT> if subteam is not None: <NEW_LINE> <INDENT> fullid = '%s/%s' % (subteam.name, id,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fullid = id <NEW_LINE> <DEDENT> self.team = team <NEW_LINE> self.subteam = subteam <NEW_LINE> self.client = team.client <NEW_LINE> self.id = fullid <NEW_LINE> self.realid = id <NEW_LINE> self.realname = name <NEW_LINE> if subteam is not None: <NEW_LINE> <INDENT> self.name = '%s/%s' % (subteam.name, name,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> self.private = private <NEW_LINE> self.member = member <NEW_LINE> self.imuser = im <NEW_LINE> if subteam is not None: <NEW_LINE> <INDENT> self.nameparselist = [ subteam.nameparser, ParseMatch(name) ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.nameparselist = [ ParseMatch(name) ] <NEW_LINE> <DEDENT> <DEDENT> def display_name(self): <NEW_LINE> <INDENT> if self.subteam is None: <NEW_LINE> <INDENT> return self.realname <NEW_LINE> <DEDENT> prefix = self.subteam.name <NEW_LINE> aliases = self.team.get_sub_aliases(self.subteam.id) <NEW_LINE> if aliases: <NEW_LINE> <INDENT> prefix = aliases[0] <NEW_LINE> <DEDENT> return '%s/%s' % (prefix, self.realname) <NEW_LINE> <DEDENT> def name_parsers(self): <NEW_LINE> <INDENT> return self.nameparselist <NEW_LINE> <DEDENT> def muted(self): <NEW_LINE> <INDENT> return (self.id in self.team.muted_channels)
Simple object representing one channel in a group.
6259909e50812a4eaa621b0f
class RangeColumns(Ranges): <NEW_LINE> <INDENT> def __init__(self, rng): <NEW_LINE> <INDENT> self.rng = rng <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.rng.shape[1] <NEW_LINE> <DEDENT> count = property(__len__) <NEW_LINE> def autofit(self): <NEW_LINE> <INDENT> self.rng.impl.autofit(axis='c') <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for j in range(0, self.rng.shape[1]): <NEW_LINE> <INDENT> yield self.rng[:, j] <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, key): <NEW_LINE> <INDENT> return self.rng[:, key-1] <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if isinstance(key, slice): <NEW_LINE> <INDENT> return RangeRows(rng=self.rng[:, key]) <NEW_LINE> <DEDENT> elif isinstance(key, int): <NEW_LINE> <INDENT> return self.rng[:, key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Indices must be integers or slices, not %s" % type(key).__name__) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{}({})'.format( self.__class__.__name__, repr(self.rng) )
Represents the columns of a range. Do not construct this class directly, use :attr:`Range.columns` instead. Example ------- .. code-block:: python import xlwings as xw rng = xw.Range('A1:C4') assert len(rng.columns) == 3 # or rng.columns.count rng.columns[0].value = 'a' assert rng.columns[2] == xw.Range('C1:C4') assert rng.columns(2) == xw.Range('B1:B4') for c in rng.columns: print(c.address)
6259909e187af65679d2ab32
class FileLikeProvider: <NEW_LINE> <INDENT> def __init__(self, destination): <NEW_LINE> <INDENT> self.destination = destination <NEW_LINE> self.file_object = None <NEW_LINE> self.ret = None <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if not self.destination: <NEW_LINE> <INDENT> self.file_object = io.StringIO(newline='') <NEW_LINE> <DEDENT> elif isinstance(self.destination, str): <NEW_LINE> <INDENT> self.file_object = open(self.destination, 'w', newline='') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.file_object = self.destination <NEW_LINE> <DEDENT> return self.file_object.__enter__() <NEW_LINE> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> if not self.destination: <NEW_LINE> <INDENT> self.ret = self.file_object.getvalue() <NEW_LINE> <DEDENT> self.file_object.__exit__(*args) <NEW_LINE> <DEDENT> def get_return(self): <NEW_LINE> <INDENT> return self.ret
Class that produces a file-like object for different kinds of outputs. The purpose is to relieve exporters of handling different kinds of outputs. Supported output types (see doc of __init__): - to string - to file object - to file path (takes care of opening the file) Usage: # destination = None or string or other file_provider = FileLikeProvider(destination) with file_provider as file_object: file_object.write('my data') # if destination is None data = file_provider.get_return()
6259909ed8ef3951e32c8da5
class DataObjectBase(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ts = None <NEW_LINE> self.value = None
Base class for DataObject and DataObjectX
6259909e187af65679d2ab33
class AddWorkflowToManage(BaseHandler): <NEW_LINE> <INDENT> def __init__(self, component): <NEW_LINE> <INDENT> BaseHandler.__init__(self, component) <NEW_LINE> self.threadpool = ThreadPool( "WMComponent.WorkflowManager.Handler.AddWorkflowToManageSlave", self.component, 'AddWorkflowToManage', self.component.config.WorkflowManager.maxThreads) <NEW_LINE> <DEDENT> def __call__(self, event, payload): <NEW_LINE> <INDENT> self.threadpool.enqueue(event, {'event' : event, 'payload' :payload['payload']})
Default handler for addition of workflow / fileset --> workflow mapping
6259909eadb09d7d5dc0c3f2
class AddingIAMRole(object): <NEW_LINE> <INDENT> def __init__(self, cluster_identifier, iam_role_name, cmd_prefix): <NEW_LINE> <INDENT> self.cmd_prefix = cmd_prefix <NEW_LINE> self.cluster_identifier = cluster_identifier <NEW_LINE> self.iam_role_name = iam_role_name <NEW_LINE> cmd = self.cmd_prefix + ['redshift', 'modify-cluster-iam-roles', '--cluster-identifier', self.cluster_identifier, '--add-iam-roles', self.iam_role_name] <NEW_LINE> vm_util.IssueCommand(cmd)
IAM Role to associate with the cluster. IAM Role can be associated with the cluster to access to other services such as S3. Attributes: cluster_identifier: Identifier of the cluster iam_role_name: Role name of the IAM
6259909e091ae35668706ac8
class PyImageioFfmpeg(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/imageio/imageio-ffmpeg" <NEW_LINE> pypi = "imageio-ffmpeg/imageio-ffmpeg-0.4.3.tar.gz" <NEW_LINE> version('0.4.3', sha256='f826260a3207b872f1a4ba87ec0c8e02c00afba4fd03348a59049bdd8215841e') <NEW_LINE> depends_on('python@3.4:', type=('build', 'run')) <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('ffmpeg', type='run')
The purpose of this project is to provide a simple and reliable ffmpeg wrapper for working with video files. It implements two simple generator functions for reading and writing data from/to ffmpeg, which reliably terminate the ffmpeg process when done. It also takes care of publishing platform-specific wheels that include the binary ffmpeg executables.
6259909ec4546d3d9def81e8
class signal(): <NEW_LINE> <INDENT> def __init__(self,signal): <NEW_LINE> <INDENT> self.__signal = np.asarray(signal) <NEW_LINE> <DEDENT> def __getitem__(self,key): <NEW_LINE> <INDENT> return self.__signal[key] <NEW_LINE> <DEDENT> def add_signal(self,signal): <NEW_LINE> <INDENT> sCombined = list(self.__signal[:]) + list(signal) <NEW_LINE> self.__signal = sCombined <NEW_LINE> <DEDENT> def zero_pad(self,sr,sg): <NEW_LINE> <INDENT> L = len(self.__signal[:]) <NEW_LINE> if L < sg*sr: <NEW_LINE> <INDENT> sig_temp = np.zeros(sg*sr) <NEW_LINE> sig_temp[0:len(self.__signal[:])] = self.__signal[:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sig_temp = self.__signal[0:sg*sr] <NEW_LINE> <DEDENT> self.__signal = sig_temp <NEW_LINE> <DEDENT> def get_fft(self,total_time=False,sr=False, half_window=True,normalize = True,pad= 0,window=False): <NEW_LINE> <INDENT> from scipy import fftpack as fft <NEW_LINE> total_time = float(total_time) <NEW_LINE> n = self.__signal.size <NEW_LINE> if total_time: <NEW_LINE> <INDENT> timestep = total_time/n <NEW_LINE> <DEDENT> elif sr: <NEW_LINE> <INDENT> timestep = 1.0/sr <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> fourier = fft.fft(self.__signal) <NEW_LINE> fullfft = fourier <NEW_LINE> freq = fft.fftfreq(n, d=timestep) <NEW_LINE> fullfreq = freq <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> print('Please enter total_time or sample_rate') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if half_window: <NEW_LINE> <INDENT> half_n = int(n/2) <NEW_LINE> fourier = fourier[:half_n] <NEW_LINE> freq = freq[:half_n] <NEW_LINE> <DEDENT> if normalize: <NEW_LINE> <INDENT> fourier = 2.0/n * abs(fourier) <NEW_LINE> <DEDENT> fft_dict = {'freq':freq,'fourier':fourier,'fullfft':fullfft, 'fullfreq':fullfreq} <NEW_LINE> return fft_dict
Generates an object that represents the selected signal. Parameters ---------- signal : list Returns ------- signal: Class object that represents the signal Examples -------- >>> signal = signal(sig) Call the class signal and generate object signal.
6259909ed8ef3951e32c8da8
class MarginLoss(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, nb_classes, beta=1.2, margin=0.2, nu=0.0, class_specific_beta=False, **kwargs): <NEW_LINE> <INDENT> super(MarginLoss, self).__init__() <NEW_LINE> self.nb_classes = nb_classes <NEW_LINE> self.class_specific_beta = class_specific_beta <NEW_LINE> if class_specific_beta: <NEW_LINE> <INDENT> assert nb_classes is not None <NEW_LINE> beta = torch.ones(nb_classes, dtype=torch.float32) * beta <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> beta = torch.tensor([beta], dtype=torch.float32) <NEW_LINE> <DEDENT> self.beta = torch.nn.Parameter(beta) <NEW_LINE> self.margin = margin <NEW_LINE> self.nu = nu <NEW_LINE> self.sampler = Sampler() <NEW_LINE> <DEDENT> def forward(self, E, T): <NEW_LINE> <INDENT> anchor_idx, anchors, positives, negatives = self.sampler(E, T) <NEW_LINE> anchor_classes = T[anchor_idx] <NEW_LINE> if anchor_classes is not None: <NEW_LINE> <INDENT> if self.class_specific_beta: <NEW_LINE> <INDENT> beta = self.beta[anchor_classes] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> beta = self.beta <NEW_LINE> <DEDENT> beta_regularization_loss = torch.norm(beta, p=1) * self.nu <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> beta = self.beta <NEW_LINE> beta_regularization_loss = 0.0 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> d_ap = ((positives - anchors)**2).sum(dim=1) + 1e-8 <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> print(positives.shape, anchors.shape) <NEW_LINE> raise e <NEW_LINE> <DEDENT> d_ap = torch.sqrt(d_ap) <NEW_LINE> d_an = ((negatives - anchors)**2).sum(dim=1) + 1e-8 <NEW_LINE> d_an = torch.sqrt(d_an) <NEW_LINE> pos_loss = F.relu(d_ap - beta + self.margin) <NEW_LINE> neg_loss = F.relu(beta - d_an + self.margin) <NEW_LINE> pair_cnt = torch.sum((pos_loss > 0.0) + (neg_loss > 0.0)).type_as(pos_loss) <NEW_LINE> loss = torch.sum(pos_loss + neg_loss) <NEW_LINE> if pair_cnt > 0.0: <NEW_LINE> <INDENT> loss = (loss + beta_regularization_loss) / pair_cnt <NEW_LINE> <DEDENT> return loss
Margin based loss. Parameters ---------- nb_classes: int Number of classes in the train dataset. Used to initialize class-specific boundaries beta. margin : float Margin between positive and negative pairs. nu : float Regularization parameter for beta. class_specific_beta : bool Are class-specific boundaries beind used? Inputs: - anchors: sampled anchor embeddings. - positives: sampled positive embeddings. - negatives: sampled negative embeddings. - anchor_classes: labels of anchors. Used to get class-specific beta. Outputs: Loss value.
6259909e187af65679d2ab36
class FeedAppViewSet(CORSMixin, MarketplaceView, SlugOrIdMixin, ImageURLUploadMixin): <NEW_LINE> <INDENT> authentication_classes = [RestOAuthAuthentication, RestSharedSecretAuthentication, RestAnonymousAuthentication] <NEW_LINE> permission_classes = [AnyOf(AllowReadOnly, GroupPermission('Feed', 'Curate'))] <NEW_LINE> filter_backends = (OrderingFilter,) <NEW_LINE> queryset = FeedApp.objects.all() <NEW_LINE> cors_allowed_methods = ('get', 'delete', 'post', 'put', 'patch') <NEW_LINE> serializer_class = FeedAppSerializer <NEW_LINE> image_fields = (('background_image_upload_url', 'image_hash', ''),) <NEW_LINE> def list(self, request, *args, **kwargs): <NEW_LINE> <INDENT> page = self.paginate_queryset( self.filter_queryset(self.get_queryset())) <NEW_LINE> serializer = self.get_pagination_serializer(page) <NEW_LINE> return response.Response(serializer.data)
A viewset for the FeedApp class, which highlights a single app and some additional metadata (e.g. a review or a screenshot).
6259909eadb09d7d5dc0c3f6
class FunctionalTestCase(TestCase): <NEW_LINE> <INDENT> pass
Simple test case for functional tests
6259909e091ae35668706acc
class ExactSolver(Sampler): <NEW_LINE> <INDENT> properties = None <NEW_LINE> parameters = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.properties = {} <NEW_LINE> self.parameters = {} <NEW_LINE> <DEDENT> def sample(self, bqm: BinaryQuadraticModel, **kwargs) -> SampleSet: <NEW_LINE> <INDENT> kwargs = self.remove_unknown_kwargs(**kwargs) <NEW_LINE> if not len(bqm.variables): <NEW_LINE> <INDENT> return SampleSet.from_samples([], bqm.vartype, energy=[]) <NEW_LINE> <DEDENT> samples = _graycode(bqm) <NEW_LINE> if bqm.vartype is Vartype.SPIN: <NEW_LINE> <INDENT> samples = 2*samples - 1 <NEW_LINE> <DEDENT> return SampleSet.from_samples_bqm((samples, list(bqm.variables)), bqm)
A simple exact solver for testing and debugging code using your local CPU. Notes: This solver becomes slow for problems with 18 or more variables. Examples: This example solves a two-variable Ising model. >>> h = {'a': -0.5, 'b': 1.0} >>> J = {('a', 'b'): -1.5} >>> sampleset = dimod.ExactSolver().sample_ising(h, J) >>> print(sampleset) a b energy num_oc. 0 -1 -1 -2.0 1 2 +1 +1 -1.0 1 1 +1 -1 0.0 1 3 -1 +1 3.0 1 ['SPIN', 4 rows, 4 samples, 2 variables]
6259909e656771135c48af7e
class Task(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self, task_id, job_exe): <NEW_LINE> <INDENT> self._task_id = task_id <NEW_LINE> self._job_exe_id = job_exe.id <NEW_LINE> self._cpus = job_exe.cpus_scheduled <NEW_LINE> self._mem = job_exe.mem_scheduled <NEW_LINE> self._disk_in = job_exe.disk_in_scheduled <NEW_LINE> self._disk_out = job_exe.disk_out_scheduled <NEW_LINE> self._disk_total = job_exe.disk_total_scheduled <NEW_LINE> <DEDENT> def are_resources_enough(self, resources): <NEW_LINE> <INDENT> required_resources = self.get_resources() <NEW_LINE> enough_cpus = resources.cpus >= required_resources.cpus <NEW_LINE> enough_mem = resources.mem >= required_resources.mem <NEW_LINE> enough_disk = resources.disk >= required_resources.disk <NEW_LINE> return enough_cpus and enough_mem and enough_disk <NEW_LINE> <DEDENT> def get_id(self): <NEW_LINE> <INDENT> return self._task_id <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_resources(self): <NEW_LINE> <INDENT> raise NotImplementedError()
Abstract base class for a job execution task
6259909ed8ef3951e32c8dab
class ChordViz(BaseViz): <NEW_LINE> <INDENT> viz_type = "chord" <NEW_LINE> verbose_name = _("Directed Force Layout") <NEW_LINE> credits = '<a href="https://github.com/d3/d3-chord">Bostock</a>' <NEW_LINE> is_timeseries = False <NEW_LINE> def query_obj(self): <NEW_LINE> <INDENT> qry = super(ChordViz, self).query_obj() <NEW_LINE> fd = self.form_data <NEW_LINE> qry['groupby'] = [fd.get('groupby'), fd.get('columns')] <NEW_LINE> qry['metrics'] = [fd.get('metric')] <NEW_LINE> return qry <NEW_LINE> <DEDENT> def get_data(self, df): <NEW_LINE> <INDENT> df.columns = ['source', 'target', 'value'] <NEW_LINE> nodes = list(set(df['source']) | set(df['target'])) <NEW_LINE> matrix = {} <NEW_LINE> for source, target in product(nodes, nodes): <NEW_LINE> <INDENT> matrix[(source, target)] = 0 <NEW_LINE> <DEDENT> for source, target, value in df.to_records(index=False): <NEW_LINE> <INDENT> matrix[(source, target)] = value <NEW_LINE> <DEDENT> m = [[matrix[(n1, n2)] for n1 in nodes] for n2 in nodes] <NEW_LINE> return { 'nodes': list(nodes), 'matrix': m, }
A Chord diagram
6259909eadb09d7d5dc0c3fc
class Issue(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> car_id = db.Column(db.Integer, unique=False, nullable=False) <NEW_LINE> time = db.Column(db.DateTime, unique=False, nullable=False) <NEW_LINE> details = db.Column(db.String(128), unique=False, nullable=False) <NEW_LINE> resolved = db.Column(db.Boolean, unique=False, nullable=False) <NEW_LINE> car = None <NEW_LINE> def __init__(self, car_id, time, details): <NEW_LINE> <INDENT> self.car_id = car_id <NEW_LINE> self.time = time <NEW_LINE> self.details = details <NEW_LINE> self.resolved = False
Class to represent an issue for a car :param id: unique id of the issue :type id: int :param car_id: the id of the car that has the issue :type car_id: int :param time: the time (in utc format) that the issue was reported :type time: datetime :param details: details of the car's issue :type details: string :param resolved: indicates wether the issue has been resolved :type resolved: boolean :param car: variable to easily serialize issue along with a car object (not stored in database), optional :type car: app.model.car.Car
6259909e283ffb24f3cf573a
class activation_keys(models.Model): <NEW_LINE> <INDENT> id_user = models.ForeignKey(User, null=False, related_name='%(class)s_id_user') <NEW_LINE> email = models.CharField(max_length=150, verbose_name="Email") <NEW_LINE> activation_key = models.CharField(max_length=150, verbose_name = "Activation_key") <NEW_LINE> date_generated = models.DateTimeField(auto_now=True) <NEW_LINE> is_expired = models.BooleanField(default=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "%s "%(self.email)
Table necessary for create an user account, This serves to validate the email.
6259909fd8ef3951e32c8dad
class ColumnGroupRule: <NEW_LINE> <INDENT> def __init__(self, alias: str, schema: ColumnGroupSchema) -> None: <NEW_LINE> <INDENT> self.alias = alias <NEW_LINE> self.schema = schema <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> class_name = type(self).__name__ <NEW_LINE> return f"<{class_name}: alias={self.alias!r}, schema={self.schema!r}>" <NEW_LINE> <DEDENT> def member_names(self) -> Iterator[str]: <NEW_LINE> <INDENT> return yield_column_names(self.schema)
Defines a column group rule. Attributes: alias: Alias of the column group. schema: Member columns belonging to the column group. May be a sequence of member columns, or a mapping of column aliases to column names, or a combination of the two.
6259909f099cdd3c6367634b
class InputExecOrderField(Base): <NEW_LINE> <INDENT> _fields_ = [ ('BrokerID', ctypes.c_char * 11), ('InvestorID', ctypes.c_char * 13), ('InstrumentID', ctypes.c_char * 31), ('ExecOrderRef', ctypes.c_char * 13), ('UserID', ctypes.c_char * 16), ('Volume', ctypes.c_int), ('RequestID', ctypes.c_int), ('BusinessUnit', ctypes.c_char * 21), ('OffsetFlag', ctypes.c_char), ('HedgeFlag', ctypes.c_char), ('ActionType', ctypes.c_char), ('PosiDirection', ctypes.c_char), ('ReservePositionFlag', ctypes.c_char), ('CloseFlag', ctypes.c_char), ('ExchangeID', ctypes.c_char * 9), ('InvestUnitID', ctypes.c_char * 17), ('AccountID', ctypes.c_char * 13), ('CurrencyID', ctypes.c_char * 4), ('ClientID', ctypes.c_char * 11), ('IPAddress', ctypes.c_char * 16), ('MacAddress', ctypes.c_char * 21), ] <NEW_LINE> def __init__(self, BrokerID='', InvestorID='', InstrumentID='', ExecOrderRef='', UserID='', Volume=0, RequestID=0, BusinessUnit='', OffsetFlag='', HedgeFlag='', ActionType='', PosiDirection='', ReservePositionFlag='', CloseFlag='', ExchangeID='', InvestUnitID='', AccountID='', CurrencyID='', ClientID='', IPAddress='', MacAddress=''): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.BrokerID = self._to_bytes(BrokerID) <NEW_LINE> self.InvestorID = self._to_bytes(InvestorID) <NEW_LINE> self.InstrumentID = self._to_bytes(InstrumentID) <NEW_LINE> self.ExecOrderRef = self._to_bytes(ExecOrderRef) <NEW_LINE> self.UserID = self._to_bytes(UserID) <NEW_LINE> self.Volume = int(Volume) <NEW_LINE> self.RequestID = int(RequestID) <NEW_LINE> self.BusinessUnit = self._to_bytes(BusinessUnit) <NEW_LINE> self.OffsetFlag = self._to_bytes(OffsetFlag) <NEW_LINE> self.HedgeFlag = self._to_bytes(HedgeFlag) <NEW_LINE> self.ActionType = self._to_bytes(ActionType) <NEW_LINE> self.PosiDirection = self._to_bytes(PosiDirection) <NEW_LINE> self.ReservePositionFlag = self._to_bytes(ReservePositionFlag) <NEW_LINE> self.CloseFlag = self._to_bytes(CloseFlag) <NEW_LINE> self.ExchangeID = self._to_bytes(ExchangeID) <NEW_LINE> self.InvestUnitID = self._to_bytes(InvestUnitID) <NEW_LINE> self.AccountID = self._to_bytes(AccountID) <NEW_LINE> self.CurrencyID = self._to_bytes(CurrencyID) <NEW_LINE> self.ClientID = self._to_bytes(ClientID) <NEW_LINE> self.IPAddress = self._to_bytes(IPAddress) <NEW_LINE> self.MacAddress = self._to_bytes(MacAddress)
输入的执行宣告
6259909f187af65679d2ab3b
class MapGradient(GradientProcessor): <NEW_LINE> <INDENT> def __init__(self, func, regex='.*'): <NEW_LINE> <INDENT> args = inspect.getargspec(func).args <NEW_LINE> arg_num = len(args) - inspect.ismethod(func) <NEW_LINE> assert arg_num in [1, 2], "The function must take 1 or 2 arguments! ({})".format(args) <NEW_LINE> if arg_num == 1: <NEW_LINE> <INDENT> self.func = lambda grad, var: func(grad) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.func = func <NEW_LINE> <DEDENT> if not regex.endswith('$'): <NEW_LINE> <INDENT> regex = regex + '$' <NEW_LINE> <DEDENT> self.regex = regex <NEW_LINE> super(MapGradient, self).__init__() <NEW_LINE> <DEDENT> def _process(self, grads): <NEW_LINE> <INDENT> ret = [] <NEW_LINE> matched = False <NEW_LINE> for grad, var in grads: <NEW_LINE> <INDENT> if re.match(self.regex, var.op.name): <NEW_LINE> <INDENT> matched = True <NEW_LINE> grad = self.func(grad, var) <NEW_LINE> if grad is not None: <NEW_LINE> <INDENT> ret.append((grad, var)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ret.append((grad, var)) <NEW_LINE> <DEDENT> <DEDENT> if not matched: <NEW_LINE> <INDENT> logger.warn("[MapGradient] No match was found for regex {}.".format(self.regex)) <NEW_LINE> <DEDENT> return ret
Apply a function on all gradient if the name matches regex. Keep the other gradients unchanged.
6259909f091ae35668706ad5
class CdnWebApplicationFirewallPolicyPatchParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(CdnWebApplicationFirewallPolicyPatchParameters, self).__init__(**kwargs) <NEW_LINE> self.tags = kwargs.get('tags', None)
Properties required to update a CdnWebApplicationFirewallPolicy. :param tags: A set of tags. CdnWebApplicationFirewallPolicy tags. :type tags: dict[str, str]
6259909f656771135c48af83
class TransferReassemblyErrorID(enum.Enum): <NEW_LINE> <INDENT> MISSED_START_OF_TRANSFER = enum.auto() <NEW_LINE> UNEXPECTED_TOGGLE_BIT = enum.auto() <NEW_LINE> UNEXPECTED_TRANSFER_ID = enum.auto() <NEW_LINE> TRANSFER_CRC_MISMATCH = enum.auto()
Transfer reassembly error codes. Used in the extended error statistics. See the UAVCAN specification for background info. We have ``ID`` in the name to make clear that this is not an exception type.
6259909f283ffb24f3cf5740
class Hidden(Base): <NEW_LINE> <INDENT> def __init__(self, n=100): <NEW_LINE> <INDENT> self.n = n <NEW_LINE> self.count = 0 <NEW_LINE> <DEDENT> def process(self, string): <NEW_LINE> <INDENT> all_count = self.count + len(string) <NEW_LINE> if all_count < self.n: <NEW_LINE> <INDENT> self.count = all_count <NEW_LINE> return <NEW_LINE> <DEDENT> for _ in xrange(int(all_count / self.n)): <NEW_LINE> <INDENT> self.write('.') <NEW_LINE> <DEDENT> self.count = all_count % self.n <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> self.write('.\n')
Hide output and print dot every N characters.
6259909f50812a4eaa621b1b
class TroubleshootingResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'code': {'key': 'code', 'type': 'str'}, 'results': {'key': 'results', 'type': '[TroubleshootingDetails]'}, } <NEW_LINE> def __init__( self, *, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, code: Optional[str] = None, results: Optional[List["TroubleshootingDetails"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(TroubleshootingResult, self).__init__(**kwargs) <NEW_LINE> self.start_time = start_time <NEW_LINE> self.end_time = end_time <NEW_LINE> self.code = code <NEW_LINE> self.results = results
Troubleshooting information gained from specified resource. :param start_time: The start time of the troubleshooting. :type start_time: ~datetime.datetime :param end_time: The end time of the troubleshooting. :type end_time: ~datetime.datetime :param code: The result code of the troubleshooting. :type code: str :param results: Information from troubleshooting. :type results: list[~azure.mgmt.network.v2018_11_01.models.TroubleshootingDetails]
6259909fc4546d3d9def81f1
class LEDLearnedPositionalEmbedding(nn.Embedding): <NEW_LINE> <INDENT> def __init__(self, num_embeddings: int, embedding_dim: int, padding_idx: int): <NEW_LINE> <INDENT> assert padding_idx is not None, "`padding_idx` should not be None, but of type int" <NEW_LINE> super().__init__(num_embeddings, embedding_dim, padding_idx=padding_idx) <NEW_LINE> <DEDENT> def forward(self, input_ids_shape: torch.Size, past_key_values_length: int = 0): <NEW_LINE> <INDENT> bsz, seq_len = input_ids_shape[:2] <NEW_LINE> positions = torch.arange( past_key_values_length, past_key_values_length + seq_len, dtype=torch.long, device=self.weight.device ) <NEW_LINE> return super().forward(positions)
This module learns positional embeddings up to a fixed maximum size.
6259909fadb09d7d5dc0c408
class ClusterUser(UserBase): <NEW_LINE> <INDENT> USER_PREFIX = 'mcv-cluster-' <NEW_LINE> CAN_GENERATE = True <NEW_LINE> CLUSTER_USER = True <NEW_LINE> DISTRIBUTED = False <NEW_LINE> @Expose() <NEW_LINE> def is_superuser(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @property <NEW_LINE> def allow_proxy_user(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_default_config(): <NEW_LINE> <INDENT> default_config = UserBase.get_default_config() <NEW_LINE> default_config['host'] = None <NEW_LINE> return default_config <NEW_LINE> <DEDENT> @property <NEW_LINE> def node(self): <NEW_LINE> <INDENT> return self._get_config()['host'] <NEW_LINE> <DEDENT> def update_host(self, host): <NEW_LINE> <INDENT> def update_config(config): <NEW_LINE> <INDENT> config['users'][self.get_username()]['host'] = host <NEW_LINE> <DEDENT> MCVirtConfig().update_config(update_config, 'Updated host for \'%s\'' % self.get_username())
User type for cluster daemon users.
6259909f091ae35668706add
class PornConfigureInfo(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ImgReviewInfo = None <NEW_LINE> self.AsrReviewInfo = None <NEW_LINE> self.OcrReviewInfo = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("ImgReviewInfo") is not None: <NEW_LINE> <INDENT> self.ImgReviewInfo = PornImgReviewTemplateInfo() <NEW_LINE> self.ImgReviewInfo._deserialize(params.get("ImgReviewInfo")) <NEW_LINE> <DEDENT> if params.get("AsrReviewInfo") is not None: <NEW_LINE> <INDENT> self.AsrReviewInfo = PornAsrReviewTemplateInfo() <NEW_LINE> self.AsrReviewInfo._deserialize(params.get("AsrReviewInfo")) <NEW_LINE> <DEDENT> if params.get("OcrReviewInfo") is not None: <NEW_LINE> <INDENT> self.OcrReviewInfo = PornOcrReviewTemplateInfo() <NEW_LINE> self.OcrReviewInfo._deserialize(params.get("OcrReviewInfo")) <NEW_LINE> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
鉴黄任务控制参数
6259909f099cdd3c63676350
class PrimarySlotError(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.message = message
Thrown when the key is not present in the default values database.
6259909f099cdd3c63676352
class LESSStylesView(BrowserView): <NEW_LINE> <INDENT> def registry(self): <NEW_LINE> <INDENT> return getToolByName(aq_inner(self.context), 'portal_less') <NEW_LINE> <DEDENT> def skinname(self): <NEW_LINE> <INDENT> return aq_inner(self.context).getCurrentSkinName() <NEW_LINE> <DEDENT> def styles(self): <NEW_LINE> <INDENT> registry = self.registry() <NEW_LINE> registry_url = registry.absolute_url() <NEW_LINE> context = aq_inner(self.context) <NEW_LINE> styles = registry.getEvaluatedResources(context) <NEW_LINE> skinname = url_quote(self.skinname()) <NEW_LINE> result = [] <NEW_LINE> for style in styles: <NEW_LINE> <INDENT> rendering = style.getRendering() <NEW_LINE> if style.isExternalResource(): <NEW_LINE> <INDENT> src = "%s" % style.getId() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> src = "%s/%s/%s" % (registry_url, skinname, style.getId()) <NEW_LINE> <DEDENT> if rendering == 'link': <NEW_LINE> <INDENT> data = {'rendering': rendering, 'media': style.getMedia(), 'rel': style.getRel(), 'title': style.getTitle(), 'conditionalcomment': style.getConditionalcomment(), 'src': src} <NEW_LINE> <DEDENT> elif rendering == 'import': <NEW_LINE> <INDENT> data = {'rendering': rendering, 'media': style.getMedia(), 'conditionalcomment': style.getConditionalcomment(), 'src': src} <NEW_LINE> <DEDENT> elif rendering == 'inline': <NEW_LINE> <INDENT> content = registry.getInlineResource(style.getId(), context) <NEW_LINE> data = {'rendering': rendering, 'media': style.getMedia(), 'conditionalcomment': style.getConditionalcomment(), 'content': content} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unkown rendering method '%s' for style '%s'" % (rendering, style.getId())) <NEW_LINE> <DEDENT> result.append(data) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def isDevelopmentMode(self): <NEW_LINE> <INDENT> registry = queryUtility(IRegistry) <NEW_LINE> settings = registry.forInterface(ILESSCSSControlPanel, check=False) <NEW_LINE> return settings.enable_less_stylesheets <NEW_LINE> <DEDENT> def compiledCSSURL(self): <NEW_LINE> <INDENT> portal_state = getMultiAdapter((self.context, self.request), name=u'plone_portal_state') <NEW_LINE> return "%s/compiled_styles.css" % portal_state.portal_url()
Information for LESS style rendering.
6259909fadb09d7d5dc0c40e
class VideoList: <NEW_LINE> <INDENT> def __init__(self, path_response, list_id=None): <NEW_LINE> <INDENT> self.perpetual_range_selector = path_response.get('_perpetual_range_selector') <NEW_LINE> self.data = path_response <NEW_LINE> has_data = bool(path_response.get('lists')) <NEW_LINE> self.videos = OrderedDict() <NEW_LINE> self.artitem = None <NEW_LINE> self.contained_titles = None <NEW_LINE> self.videoids = None <NEW_LINE> if has_data: <NEW_LINE> <INDENT> self.videoid = common.VideoId( videoid=(list_id if list_id else next(iter(self.data['lists'])))) <NEW_LINE> self.videos = OrderedDict(resolve_refs(self.data['lists'][self.videoid.value], self.data)) <NEW_LINE> if self.videos: <NEW_LINE> <INDENT> self.artitem = list(self.videos.values())[0] <NEW_LINE> self.contained_titles = _get_titles(self.videos) <NEW_LINE> try: <NEW_LINE> <INDENT> self.videoids = _get_videoids(self.videos) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.videoids = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return _check_sentinel(self.data['lists'][self.videoid.value][key]) <NEW_LINE> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> return _check_sentinel(self.data['lists'][self.videoid.value].get(key, default))
A video list
6259909f099cdd3c63676353
class Configuration(SingletonMixin): <NEW_LINE> <INDENT> def __init__(self, fp, parser_dep=SafeConfigParser): <NEW_LINE> <INDENT> self.conf = parser_dep() <NEW_LINE> self.conf.readfp(fp) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_env(cls): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> filepath = os.environ['LOGGER_SETTINGS_FILE'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise ValueError('missing env variable LOGGER_SETTINGS_FILE') <NEW_LINE> <DEDENT> return cls.from_file(filepath) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_file(cls, filepath): <NEW_LINE> <INDENT> fp = open(filepath, 'rb') <NEW_LINE> return cls(fp) <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return getattr(self.conf, attr) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return [(section, dict(self.conf.items(section, raw=True))) for section in [section for section in self.conf.sections()]]
Acts as a proxy to the ConfigParser module
6259909fadb09d7d5dc0c410
class RowTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> copy = copytext.Copy('etc/test_copy.xls') <NEW_LINE> sheet = copy['content'] <NEW_LINE> self.row = sheet['header_title'] <NEW_LINE> <DEDENT> def test_cell_by_value_repr(self): <NEW_LINE> <INDENT> cell = repr(self.row) <NEW_LINE> self.assertTrue(isinstance(cell, basestring)) <NEW_LINE> self.assertEqual(cell, 'Across-The-Top Header') <NEW_LINE> <DEDENT> def test_cell_by_index(self): <NEW_LINE> <INDENT> cell = self.row[1] <NEW_LINE> self.assertTrue(isinstance(cell, basestring)) <NEW_LINE> self.assertEqual(cell, 'Across-The-Top Header') <NEW_LINE> <DEDENT> def test_cell_by_item_name(self): <NEW_LINE> <INDENT> cell = self.row['value'] <NEW_LINE> self.assertTrue(isinstance(cell, basestring)) <NEW_LINE> self.assertEqual(cell, 'Across-The-Top Header') <NEW_LINE> <DEDENT> def test_cell_by_prop_name(self): <NEW_LINE> <INDENT> with self.assertRaises(AttributeError): <NEW_LINE> <INDENT> self.row.value <NEW_LINE> <DEDENT> <DEDENT> def test_column_does_not_exist(self): <NEW_LINE> <INDENT> error = self.row['foo'] <NEW_LINE> self.assertTrue(isinstance(error, copytext.Error)) <NEW_LINE> self.assertEquals(error._error, 'COPY.content.1.foo [column does not exist in sheet]') <NEW_LINE> <DEDENT> def test_column_index_outside_range(self): <NEW_LINE> <INDENT> error = self.row[2] <NEW_LINE> self.assertTrue(isinstance(error, copytext.Error)) <NEW_LINE> self.assertEquals(error._error, 'COPY.content.1.2 [column index outside range]')
Test the Row object.
6259909f50812a4eaa621b22
class LambdaPrinter(StrPrinter): <NEW_LINE> <INDENT> def _print_MatrixBase(self, expr): <NEW_LINE> <INDENT> return "%s(%s)" % (expr.__class__.__name__, self._print((expr.tolist()))) <NEW_LINE> <DEDENT> _print_SparseMatrix = _print_MutableSparseMatrix = _print_ImmutableSparseMatrix = _print_Matrix = _print_DenseMatrix = _print_MutableDenseMatrix = _print_ImmutableMatrix = _print_ImmutableDenseMatrix = _print_MatrixBase <NEW_LINE> def _print_Piecewise(self, expr): <NEW_LINE> <INDENT> result = [] <NEW_LINE> i = 0 <NEW_LINE> for arg in expr.args: <NEW_LINE> <INDENT> e = arg.expr <NEW_LINE> c = arg.cond <NEW_LINE> result.append('((') <NEW_LINE> result.append(self._print(e)) <NEW_LINE> result.append(') if (') <NEW_LINE> result.append(self._print(c)) <NEW_LINE> result.append(') else (') <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> result = result[:-1] <NEW_LINE> result.append(') else None)') <NEW_LINE> result.append(')'*(2*i - 2)) <NEW_LINE> return ''.join(result) <NEW_LINE> <DEDENT> def _print_And(self, expr): <NEW_LINE> <INDENT> result = ['('] <NEW_LINE> for arg in sorted(expr.args, key=default_sort_key): <NEW_LINE> <INDENT> result.extend(['(', self._print(arg), ')']) <NEW_LINE> result.append(' and ') <NEW_LINE> <DEDENT> result = result[:-1] <NEW_LINE> result.append(')') <NEW_LINE> return ''.join(result) <NEW_LINE> <DEDENT> def _print_Or(self, expr): <NEW_LINE> <INDENT> result = ['('] <NEW_LINE> for arg in sorted(expr.args, key=default_sort_key): <NEW_LINE> <INDENT> result.extend(['(', self._print(arg), ')']) <NEW_LINE> result.append(' or ') <NEW_LINE> <DEDENT> result = result[:-1] <NEW_LINE> result.append(')') <NEW_LINE> return ''.join(result) <NEW_LINE> <DEDENT> def _print_Not(self, expr): <NEW_LINE> <INDENT> result = ['(', 'not (', self._print(expr.args[0]), '))'] <NEW_LINE> return ''.join(result) <NEW_LINE> <DEDENT> def _print_BooleanTrue(self, expr): <NEW_LINE> <INDENT> return "True" <NEW_LINE> <DEDENT> def _print_BooleanFalse(self, expr): <NEW_LINE> <INDENT> return "False" <NEW_LINE> <DEDENT> def _print_ITE(self, expr): <NEW_LINE> <INDENT> result = [ '((', self._print(expr.args[1]), ') if (', self._print(expr.args[0]), ') else (', self._print(expr.args[2]), '))' ] <NEW_LINE> return ''.join(result)
This printer converts expressions into strings that can be used by lambdify.
6259909f099cdd3c63676355
class _VertexSitesMixin: <NEW_LINE> <INDENT> def _add_vertex_sites(self, box_geom_or_site): <NEW_LINE> <INDENT> offsets = ( (-half_length, half_length) for half_length in box_geom_or_site.size) <NEW_LINE> site_positions = np.vstack(itertools.product(*offsets)) <NEW_LINE> if box_geom_or_site.pos is not None: <NEW_LINE> <INDENT> site_positions += box_geom_or_site.pos <NEW_LINE> <DEDENT> self._vertices = [] <NEW_LINE> for i, pos in enumerate(site_positions): <NEW_LINE> <INDENT> site = box_geom_or_site.parent.add( 'site', name='vertex_' + str(i), pos=pos, type='sphere', size=[0.002], rgba=constants.RED, group=constants.TASK_SITE_GROUP) <NEW_LINE> self._vertices.append(site) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def vertices(self): <NEW_LINE> <INDENT> return self._vertices
Mixin class that adds sites corresponding to the vertices of a box.
6259909fd8ef3951e32c8db8
class ThundercoinTestNet(Thundercoin): <NEW_LINE> <INDENT> name = 'test-thundercoin' <NEW_LINE> seeds = ("testnet-seed.litecointools.com", "testnet-seed.ltc.xurious.com", "dnsseed.wemine-testnet.com") <NEW_LINE> port = 64547 <NEW_LINE> message_start = b'\xfc\xc1\xb7\xdc' <NEW_LINE> base58_prefixes = { 'PUBKEY_ADDR': 111, 'SCRIPT_ADDR': 196, 'SECRET_KEY': 239 }
Class with all the necessary Thundercoin testing network information based on https://github.com/VanIerselDev/Thundercoin/blob/master/src/net.cpp (date of access: 02/21/2018)
6259909f3617ad0b5ee0800d
class QueryImpl(object): <NEW_LINE> <INDENT> def __init__(self, test, hashval): <NEW_LINE> <INDENT> self._test = test <NEW_LINE> self.hashval = hashval <NEW_LINE> <DEDENT> def __call__(self, value): <NEW_LINE> <INDENT> return self._test(value) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.hashval) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'QueryImpl{}'.format(self.hashval) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.hashval == other.hashval <NEW_LINE> <DEDENT> def __and__(self, other): <NEW_LINE> <INDENT> return QueryImpl(lambda value: self(value) and other(value), ('and', frozenset([self.hashval, other.hashval]))) <NEW_LINE> <DEDENT> def __or__(self, other): <NEW_LINE> <INDENT> return QueryImpl(lambda value: self(value) or other(value), ('or', frozenset([self.hashval, other.hashval]))) <NEW_LINE> <DEDENT> def __invert__(self): <NEW_LINE> <INDENT> return QueryImpl(lambda value: not self(value), ('not', self.hashval))
A query implementation. This query implementation wraps a test function which is run when the query is evaluated by calling the object. Queries can be combined with logical and/or and modified with logical not.
6259909f50812a4eaa621b24
class NullIterator(six.Iterator): <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> raise StopIteration()
An empty iterator that doesn't gives any result.
6259909fc4546d3d9def81f9
class CopyScreenDialog(BaseDialog): <NEW_LINE> <INDENT> expectedTitle = "Copy Screen" <NEW_LINE> locatorClass = CopyScreenDialogLocators <NEW_LINE> screenname_field = TextElement(*locatorClass.name_field) <NEW_LINE> def copyScreen(self, screen_name): <NEW_LINE> <INDENT> self.screenname_field = "" <NEW_LINE> self.screenname_field = screen_name <NEW_LINE> self.click_ok_button()
Copy Screen dialog action methods go here
6259909f3617ad0b5ee0800f
class JSON(Encode): <NEW_LINE> <INDENT> def __init__(self, sort_keys=False, ensure_ascii=False, indent=None): <NEW_LINE> <INDENT> self.sort_keys = sort_keys <NEW_LINE> self.ensure_ascii = ensure_ascii <NEW_LINE> self.indent = indent <NEW_LINE> <DEDENT> def handleDict(self, data): <NEW_LINE> <INDENT> return dumps(data, sort_keys=self.sort_keys, ensure_ascii=self.ensure_ascii, indent=self.indent) <NEW_LINE> <DEDENT> def handleCut(self, data): <NEW_LINE> <INDENT> return self.handleDict(dict(data)) <NEW_LINE> <DEDENT> handleList = handleDict
**Encode data into JSON format.** Convert a Python datastructure into JSON format. Parameters: - sort_keys(bool)(False) | Sorts keys when True - ensure_ascii(bool)(False) | When True makes sure all chars are valid ascii - ident(int)(None) | The indentation used.
6259909fc4546d3d9def81fa
class TestAlignmentEnd(TestCase): <NEW_LINE> <INDENT> def testOffsetIndexError(self): <NEW_LINE> <INDENT> error = r'^string index out of range$' <NEW_LINE> self.assertRaisesRegex(IndexError, error, alignmentEnd, '', 4, 1) <NEW_LINE> <DEDENT> def testLengthTooLarge(self): <NEW_LINE> <INDENT> error = r'^string index out of range$' <NEW_LINE> self.assertRaisesRegex(IndexError, error, alignmentEnd, 'ACCG', 2, 5) <NEW_LINE> <DEDENT> def testSequenceTooShort(self): <NEW_LINE> <INDENT> error = r'^string index out of range$' <NEW_LINE> self.assertRaisesRegex( IndexError, error, alignmentEnd, 'CC--------T-', 2, 5) <NEW_LINE> <DEDENT> def testEmptyString(self): <NEW_LINE> <INDENT> self.assertEqual(0, alignmentEnd('', 0, 0)) <NEW_LINE> <DEDENT> def testNonZeroNoGaps(self): <NEW_LINE> <INDENT> self.assertEqual(3, alignmentEnd('ACCTA', 1, 2)) <NEW_LINE> <DEDENT> def testZeroWithOneGap(self): <NEW_LINE> <INDENT> self.assertEqual(3, alignmentEnd('A-CCTA', 0, 2)) <NEW_LINE> <DEDENT> def testZeroWithTwoGaps(self): <NEW_LINE> <INDENT> self.assertEqual(4, alignmentEnd('A--CCTA', 0, 2)) <NEW_LINE> <DEDENT> def testZeroWithTwoGapsNonContiguous(self): <NEW_LINE> <INDENT> self.assertEqual(5, alignmentEnd('A-C-CTA', 0, 3)) <NEW_LINE> <DEDENT> def testNonZeroWithTwoGapsNonContiguous(self): <NEW_LINE> <INDENT> self.assertEqual(7, alignmentEnd('TTA-C-CTA', 2, 3))
Test the alignmentEnd function.
6259909f187af65679d2ab47
class TunaSkeleton(metaclass=ABCMeta): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def setTunaFeatures(self, array=[]): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getTunaFeatures(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getTunaTuple(self): <NEW_LINE> <INDENT> pass
This TunaSkeleton class is purely abstract class designed to demonstrate Inheritance capability of Python. Class: TunaSkeleton Extends: object Author: Nikolay Melnik Date created: 10/1/2018 Date last modified: 10/14/2018 Python Version: 3.7
6259909f3617ad0b5ee08011
class EmailField(TextField): <NEW_LINE> <INDENT> EMAIL_REGEXP = re.compile(r'^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]+$', flags=re.IGNORECASE) <NEW_LINE> @classmethod <NEW_LINE> def serialize(cls, value, *args, **kwargs): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> return six.text_type(value) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def deserialize(cls, value, *args, **kwargs): <NEW_LINE> <INDENT> value = super(EmailField, cls).deserialize(value) <NEW_LINE> if value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> result = cls.EMAIL_REGEXP.findall(value) <NEW_LINE> if not result: <NEW_LINE> <INDENT> raise ValueError("Can't be {}".format(cls.__name__)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return result[0]
Field class to represent e-mail addresses Is not locale-aware (does not need to be)
6259909fd8ef3951e32c8dbc
class Converter: <NEW_LINE> <INDENT> def get_amount_for_save(self, amount, source_currency): <NEW_LINE> <INDENT> if source_currency == DEFAULT_CURRENCY: <NEW_LINE> <INDENT> return amount <NEW_LINE> <DEDENT> return amount * self.get_exhange_rate(DEFAULT_CURRENCY, source_currency) <NEW_LINE> <DEDENT> def get_exhange_rate(self, to_currency, from_currency): <NEW_LINE> <INDENT> rate = self._get_rate_from_cache(to_currency, from_currency) <NEW_LINE> if rate: <NEW_LINE> <INDENT> return rate <NEW_LINE> <DEDENT> return self._get_rate_from_api(to_currency, from_currency) <NEW_LINE> <DEDENT> def _get_rate_from_cache(self, to_currency, from_currency): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def _get_rate_from_api(self, to_currency, from_currency): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> rate = CurrencyAPI().get_exchange_rate(to_currency, from_currency) <NEW_LINE> self._update_in_cache(rate, to_currency, from_currency) <NEW_LINE> return rate <NEW_LINE> <DEDENT> except CurrencyAPI.CurrencyAPIError as error: <NEW_LINE> <INDENT> self._alarm_api_error(error.info) <NEW_LINE> raise Converter.ConverterError('api_error') <NEW_LINE> <DEDENT> <DEDENT> def _update_in_cache(self, rate, to_currency, from_currency): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _alarm_api_error(self, info): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class ConverterError(ValueError): <NEW_LINE> <INDENT> @property <NEW_LINE> def info(self): <NEW_LINE> <INDENT> return self.args[0]
Stub class for currency convertation. Also handles currency API monitoring and errors alarming. Throws errors if API can't be reached. Transactions are supposed to fail then
6259909f091ae35668706af3
@dataclass <NEW_LINE> class SiteInformation: <NEW_LINE> <INDENT> site_name: str = None <NEW_LINE> location: str = None <NEW_LINE> country: str = None <NEW_LINE> northernmost_latitude: float = None <NEW_LINE> southernmost_latitude: float = None <NEW_LINE> easternmost_longitude: float = None <NEW_LINE> westernmost_longitude: float = None <NEW_LINE> elevation: float = None
Proxy site information
625990a0187af65679d2ab4b
class DummySSHServer(test_edi_gateway.DummySSHServer): <NEW_LINE> <INDENT> root = None <NEW_LINE> def create_transport(self, sock): <NEW_LINE> <INDENT> transport = super().create_transport(sock) <NEW_LINE> transport.set_subsystem_handler('sftp', paramiko.SFTPServer, DummySFTPServer) <NEW_LINE> return transport
Dummy SSH server with SFTP support
625990a0091ae35668706af5
class StatTimeQuant(Base): <NEW_LINE> <INDENT> __tablename__ = 'stat_time_quant' <NEW_LINE> __table_args__ = ({'sqlite_autoincrement': True},) <NEW_LINE> id = Column(types.Integer, primary_key=True) <NEW_LINE> time = Column(types.Time, nullable=False, unique=True)
(Node-specific) A time quant (useful for statistics).
625990a0099cdd3c6367635e
class LinearProjectorGaussianPosterior: <NEW_LINE> <INDENT> stddev_A_mean = 1e-4 <NEW_LINE> stddev_A_std = 1e-2 <NEW_LINE> mean_A_std = -3.0 <NEW_LINE> stddev_b_std = 1e-1 <NEW_LINE> mean_b_std = -10.0 <NEW_LINE> def __init__(self, input_dim, output_dim): <NEW_LINE> <INDENT> A_shape = [1, output_dim, input_dim] <NEW_LINE> self.A_mean = tf.Variable( initial_value=tf.keras.initializers.RandomNormal(stddev=self.stddev_A_mean)(shape=A_shape)) <NEW_LINE> self._A_std = tf.Variable( initial_value=tf.keras.initializers.RandomNormal(mean=self.mean_A_std, stddev=self.stddev_A_std)( shape=A_shape)) <NEW_LINE> b_shape = [1, output_dim] <NEW_LINE> self.b_mean = tf.Variable(initial_value=tf.zeros(shape=b_shape)) <NEW_LINE> self._b_std = tf.Variable( initial_value=tf.keras.initializers.RandomNormal( mean=self.mean_b_std, stddev=self.stddev_b_std)(shape=b_shape)) <NEW_LINE> self.pdf_regularization_b = tfp.distributions.Normal( loc=0.0, scale=tf.math.softplus(self.mean_b_std) + self.stddev_b_std) <NEW_LINE> self.pdf_regularization_A = tfp.distributions.Normal( loc=0.0, scale=tf.math.softplus(self.mean_A_std) + self.stddev_A_std) <NEW_LINE> self.is_jacobian_singular = input_dim != output_dim <NEW_LINE> <DEDENT> def callVI(self, x, penalty=0.0, variables=[]): <NEW_LINE> <INDENT> y, log_jacobian, v = self._callVI(x) <NEW_LINE> penalty -= log_jacobian + self.entropy() <NEW_LINE> variables += v <NEW_LINE> return y, penalty, variables <NEW_LINE> <DEDENT> def _callVI(self, x): <NEW_LINE> <INDENT> A = self.A_mean + tf.math.softplus(self._A_std) * tf.random.normal(self.A_mean.shape) <NEW_LINE> b = self.b_mean + tf.math.softplus(self._b_std) * tf.random.normal(self.b_mean.shape) <NEW_LINE> y = tf.matmul(A, x[:, :, tf.newaxis])[:, :, 0] + b <NEW_LINE> if self.is_jacobian_singular: <NEW_LINE> <INDENT> log_jacobian = tf.zeros(1, dtype=tf.float32) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log_jacobian = tf.math.log(tf.math.abs(tf.linalg.det(A))) <NEW_LINE> <DEDENT> variables = [self.A_mean, self._A_std, self.b_mean, self._b_std] <NEW_LINE> return y, log_jacobian, variables <NEW_LINE> <DEDENT> def entropy(self): <NEW_LINE> <INDENT> entropy_A = tfp.distributions.Normal(self.A_mean, tf.math.softplus(self._A_std)).entropy() <NEW_LINE> entropy_b = tfp.distributions.Normal(self.b_mean, tf.math.softplus(self._b_std)).entropy() <NEW_LINE> entropy = tf.reduce_sum(entropy_A) + tf.reduce_sum(entropy_b) <NEW_LINE> return entropy
Same as LinearProjector but uses VI with a gaussian diagonal posterior distribution, to estimate the affine projection parameters
625990a0099cdd3c6367635f
class ServiceException(ABC, Exception): <NEW_LINE> <INDENT> error_message: str = _("Unexpected exception occurred.") <NEW_LINE> app_id: int = None <NEW_LINE> error_code: int = None <NEW_LINE> status_code: int = status.HTTP_400_BAD_REQUEST <NEW_LINE> def __init__(self, **error_message_format_kwargs) -> None: <NEW_LINE> <INDENT> assert self.error_message is not None <NEW_LINE> assert self.error_code is not None <NEW_LINE> assert self.app_id is not None <NEW_LINE> self._error_message_format_kwargs = error_message_format_kwargs <NEW_LINE> <DEDENT> def _get_error_message(self) -> str: <NEW_LINE> <INDENT> if self._error_message_format_kwargs: <NEW_LINE> <INDENT> return self.error_message.format(**self._error_message_format_kwargs) <NEW_LINE> <DEDENT> return self.error_message <NEW_LINE> <DEDENT> def _get_error_code(self) -> str: <NEW_LINE> <INDENT> return f"{self.app_id:02d}{self.error_code:03d}" <NEW_LINE> <DEDENT> def _get_detail(self, json_dump: bool = False) -> Union[dict, str]: <NEW_LINE> <INDENT> detail = dict(error_code=self._get_error_code(), error_message=str(self._get_error_message()),) <NEW_LINE> if json_dump: <NEW_LINE> <INDENT> detail = json.dumps(detail) <NEW_LINE> <DEDENT> return detail <NEW_LINE> <DEDENT> @property <NEW_LINE> def detail(self) -> dict: <NEW_LINE> <INDENT> return self._get_detail() <NEW_LINE> <DEDENT> @property <NEW_LINE> def json_dumped_detail(self) -> str: <NEW_LINE> <INDENT> return self._get_detail(json_dump=True)
Service exception.
625990a0adb09d7d5dc0c428
class reCAPTCHA(CAPTCHABase): <NEW_LINE> <INDENT> _SERVER_API_URL = 'http://www.google.com/recaptcha/api' <NEW_LINE> _SERVER_ERROR_CODES = {'unknown': '#recaptcha:error-codes/unknown', 'invalid-site-public-key': '#recaptcha:error-codes/invalid-site-public-key', 'invalid-site-private-key': '#recaptcha:error-codes/invalid-site-private-key', 'invalid-request-cookie': '#recaptcha:error-codes/invalid-request-cookie', 'incorrect-captcha-sol': '#recaptcha:error-codes/incorrect-captcha-sol', 'verify-params-incorrect': '#recaptcha:error-codes/verify-params-incorrect', 'invalid-referrer': '#recaptcha:error-codes/invalid-referrer', 'recaptcha-not-reachable': '#recaptcha:error-codes/recaptcha-not-reachable', 'enqueue-failure': '#recaptcha:error-codes/enqueue-failure', 'captcha-sol-empty': '#recaptcha:error-codes/captcha-sol-empty'} <NEW_LINE> _RESPONSE_IS_INCORRECT_CODE = 'incorrect-captcha-sol' <NEW_LINE> _IMAGE_SIZE = (300, 57) <NEW_LINE> def getImageSource(self, key, *args): <NEW_LINE> <INDENT> challenge_regexp = args[0] if args[0] is not None and len(args[0]) else DEFAULT_RE_CAPTCHA_PATTERN <NEW_LINE> params = urllib.urlencode({'k': key}) <NEW_LINE> url = '%s/noscript?%s' % (self._SERVER_API_URL, params) <NEW_LINE> resp = None <NEW_LINE> challenge = None <NEW_LINE> imageUrl = None <NEW_LINE> start = time.time() <NEW_LINE> try: <NEW_LINE> <INDENT> resp = urllib.urlopen(url) <NEW_LINE> html = resp.read() <NEW_LINE> challenge = re.search(challenge_regexp, html, flags=re.DOTALL).group('challenge') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> LOG_ERROR('client can not load or parse reCAPTCHA html') <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if resp is not None: <NEW_LINE> <INDENT> resp.close() <NEW_LINE> <DEDENT> <DEDENT> resp, data = (None, None) <NEW_LINE> if challenge: <NEW_LINE> <INDENT> url = '%s/image?c=%s' % (self._SERVER_API_URL, challenge) <NEW_LINE> try: <NEW_LINE> <INDENT> resp = urllib.urlopen(url) <NEW_LINE> contentType = resp.headers.get('content-type') <NEW_LINE> if contentType == 'image/jpeg': <NEW_LINE> <INDENT> data = resp.read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOG_ERROR('Client can not load reCAPTCHA image. contentType = {0}, response code = {1}'.format(contentType, resp.code)) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> LOG_ERROR('client can not load reCAPTCHA image') <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if resp is not None: <NEW_LINE> <INDENT> resp.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> LOG_DEBUG('get image from web for %.02f seconds' % (time.time() - start)) <NEW_LINE> return (data, challenge)
Google reCAPTCHA API. See documentation http://code.google.com/apis/recaptcha/.
625990a0099cdd3c63676360
@call_signature() <NEW_LINE> class Element: <NEW_LINE> <INDENT> __type__: Any = NotImplemented <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> bound = self.__signature__.bind(*args, **kwargs) <NEW_LINE> bound.apply_defaults() <NEW_LINE> for name, val in bound.arguments.items(): <NEW_LINE> <INDENT> setattr(self, name, val) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> repr = f"<{self.__class__.__name__}" <NEW_LINE> for attr in self.__signature__.parameters: <NEW_LINE> <INDENT> val = getattr(self, attr) <NEW_LINE> repr += f" {attr}={val}" <NEW_LINE> <DEDENT> repr += ">" <NEW_LINE> return repr <NEW_LINE> <DEDENT> def __set_name__(self, owner, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __get__(self, obj, objtype=None): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return obj.__dict__[self.name] <NEW_LINE> <DEDENT> def __set__(self, obj, value) -> None: <NEW_LINE> <INDENT> obj.__dict__[self.name] = self.convert(value) <NEW_LINE> <DEDENT> def convert(self, value): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def enforce_required(self, value): <NEW_LINE> <INDENT> if value is None and self.required: <NEW_LINE> <INDENT> raise OFXSpecError(f"{self.__class__.__name__}: Value is required") <NEW_LINE> <DEDENT> return value
Python representation of OFX 'element', i.e. *ML leaf node containing text data. Pass validation parameters (e.g. maximum string length, decimal scale, required vs. optional, etc.) as arguments to __init__() when defining an Aggregate subclass. ``Element`` instances are bound to model classes (sundry ``Aggregate`` subclasses found in the ``ofxtools.models`` subpackage, as well as ``OFXHeaderV1``/``OFXHeaverV2`` classes found in the header module). Since these validators are class attributes, they are shared by all instances of a model class. Therefore ``Elements`` are implemented as data descriptors; they intercept calls to ``__get__`` and ``__set__``, which get passed as an arg the ``Aggregate`` instance whose attribute you're trying to read/write. We don't want to store the attribute value inside the ``Element`` instance, keyed by the ``Aggregate`` instance, because that will cause the long-persisting ``Element`` to keep strong references to an ``Aggregate`` instance that may have no other remaining references, thus screwing up our garbage collection & eating up memory. Instead, we stick the attribute value where it belongs (i.e on the ``Aggregate`` instance), keyed by the ``Element`` instance (or even better, some proxy therefor). We'll need a reference to the ``Element`` instance as long as any instance of the ``Aggregate`` class remains alive, but the ``Aggregate`` instances can be garbage collected when no longer needed. A good introductory discussion to this use of descriptors is here: https://realpython.com/python-descriptors/#how-to-use-python-descriptors-properly Prior to setting the data value, each ``Element`` performs validation (using the arguments passed to ``__init__()``) and type conversion (using the logic implemented in ``convert()``).
625990a0adb09d7d5dc0c42a
class DummyFingerprintDataset(FingerprintDataset): <NEW_LINE> <INDENT> DATAS = { MetadataField.BROWSER_ID: [1, 2, 3, 2, 3], MetadataField.TIME_OF_COLLECT: pd.date_range(('2021-03-12'), periods=5, freq='H'), ATTRIBUTES[0].name: ['Firefox', 'Chrome', 'Edge', 'Chrome', 'Edge'], ATTRIBUTES[1].name: [60, 120, 90, 120, 90], ATTRIBUTES[2].name: [1, 1, 1, 1, 1] } <NEW_LINE> def _set_candidate_attributes(self): <NEW_LINE> <INDENT> self._candidate_attributes = AttributeSet(ATTRIBUTES) <NEW_LINE> <DEDENT> def _process_dataset(self): <NEW_LINE> <INDENT> self._dataframe = pd.DataFrame(self.DATAS) <NEW_LINE> self._dataframe[MetadataField.TIME_OF_COLLECT] = pd.to_datetime( self._dataframe[MetadataField.TIME_OF_COLLECT]) <NEW_LINE> self._dataframe.set_index( [MetadataField.BROWSER_ID, MetadataField.TIME_OF_COLLECT], inplace=True)
Dummy fingerprint class to define the required functions.
625990a0187af65679d2ab51
class MetadataItem(Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, 'value': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, name: str, value: str, **kwargs) -> None: <NEW_LINE> <INDENT> super(MetadataItem, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.value = value
A name-value pair associated with a Batch service resource. The Batch service does not assign any meaning to this metadata; it is solely for the use of user code. All required parameters must be populated in order to send to Azure. :param name: Required. :type name: str :param value: Required. :type value: str
625990a0d8ef3951e32c8dc6
class Configurator(object): <NEW_LINE> <INDENT> def deploy_cert(self, vhost, cert, key, cert_chain=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def choose_virtual_host(self, name): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_all_names(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def enable_redirect(self, ssl_vhost): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def enable_hsts(self, ssl_vhost): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def enable_ocsp_stapling(self, ssl_vhost): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_all_certs_keys(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def enable_site(self, vhost): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def save(self, title=None, temporary=False): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def revert_challenge_config(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def rollback_checkpoints(self, rollback=1): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def display_checkpoints(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def config_test(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def restart(self, quiet=False): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def perform(self, chall_type, tup): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> raise NotImplementedError()
Generic Let's Encrypt configurator. Class represents all possible webservers and configuration editors This includes the generic webserver which wont have configuration files at all, but instead create a new process to handle the DVSNI and other challenges.
625990a0091ae35668706b0b