code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Scheduler(Resource): <NEW_LINE> <INDENT> rest_entity_path = "scheduler" <NEW_LINE> @staticmethod <NEW_LINE> def list(page = None, per_page = None): <NEW_LINE> <INDENT> conn = Qubole.agent() <NEW_LINE> url_path = Scheduler.rest_entity_path <NEW_LINE> page_attr = [] <NEW_LINE> if page is not None: <NEW_LINE> <INDENT> page_attr.append("page=%s" % page) <NEW_LINE> <DEDENT> if per_page is not None: <NEW_LINE> <INDENT> page_attr.append("per_page=%s" % per_page) <NEW_LINE> <DEDENT> if page_attr: <NEW_LINE> <INDENT> url_path = "%s?%s" % (Scheduler.rest_entity_path, "&".join(page_attr)) <NEW_LINE> <DEDENT> schedjson = conn.get(url_path) <NEW_LINE> schedlist = [] <NEW_LINE> for s in schedjson["schedules"]: <NEW_LINE> <INDENT> schedlist.append(Scheduler(s)) <NEW_LINE> <DEDENT> return schedlist <NEW_LINE> <DEDENT> def suspend(self): <NEW_LINE> <INDENT> conn = Qubole.agent() <NEW_LINE> data = {"status": "suspend"} <NEW_LINE> return conn.put(self.element_path(self.id), data) <NEW_LINE> <DEDENT> def resume(self): <NEW_LINE> <INDENT> conn = Qubole.agent() <NEW_LINE> data = {"status": "resume"} <NEW_LINE> return conn.put(self.element_path(self.id), data) <NEW_LINE> <DEDENT> def kill(self): <NEW_LINE> <INDENT> conn = Qubole.agent() <NEW_LINE> data = {"status": "kill"} <NEW_LINE> return conn.put(self.element_path(self.id), data) <NEW_LINE> <DEDENT> def list_instances(self, page=None, per_page=None): <NEW_LINE> <INDENT> conn = Qubole.agent() <NEW_LINE> url_path = self.element_path(self.id) + "/" + "instances" <NEW_LINE> page_attr = [] <NEW_LINE> if page is not None: <NEW_LINE> <INDENT> page_attr.append("page=%s" % page) <NEW_LINE> <DEDENT> if per_page is not None: <NEW_LINE> <INDENT> page_attr.append("per_page=%s" % per_page) <NEW_LINE> <DEDENT> if page_attr: <NEW_LINE> <INDENT> url_path = "%s/instances?%s" % (self.element_path(args.id), "&".join(page_attr)) <NEW_LINE> <DEDENT> cmdjson = conn.get(url_path) <NEW_LINE> cmdlist = [] <NEW_LINE> for cmd in cmdjson["commands"]: <NEW_LINE> <INDENT> cmdclass = globals()[cmd["command_type"]] <NEW_LINE> onecmd = cmdclass(cmd) <NEW_LINE> cmdlist.append(onecmd) <NEW_LINE> <DEDENT> return cmdlist <NEW_LINE> <DEDENT> def rerun(self, instance_id): <NEW_LINE> <INDENT> conn = Qubole.agent() <NEW_LINE> url_path = self.element_path(id) + "/instances/" + instance_id + "/rerun" <NEW_LINE> return conn.post(url_path)['status'] | qds_sdk.Schedule is the base Qubole Schedule class. | 6259908566673b3332c31f50 |
class PeprCoreSettings(Settings): <NEW_LINE> <INDENT> roles = {} <NEW_LINE> def validate(self, config): <NEW_LINE> <INDENT> if 'roles' in config: <NEW_LINE> <INDENT> roles = (role if issubclass(role, Role) else self.import_item(role) for role in config['roles']) <NEW_LINE> config['roles'] = { role.access: role for role in roles } <NEW_LINE> <DEDENT> return config | Settings for Pepr Core application. | 625990853317a56b869bf2ec |
class Cours(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = "cours" <NEW_LINE> <DEDENT> enseignant = models.ForeignKey('Enseignant', on_delete=models.PROTECT) <NEW_LINE> horaire = models.ForeignKey('Horaire', on_delete=models.PROTECT) <NEW_LINE> formation = models.ForeignKey('Formation', on_delete=models.PROTECT) <NEW_LINE> capacite = models.IntegerField(default=18) <NEW_LINE> ouverture = models.ForeignKey('Ouverture', on_delete=models.PROTECT) <NEW_LINE> barrette = models.ForeignKey('Barrette', on_delete=models.PROTECT) <NEW_LINE> invalide = models.BooleanField(default=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{} {} {} {} (max={})".format(self.ouverture.nom_session, self.horaire, abrege(self.enseignant, 10), abrege(self.formation, 70), self.capacite) <NEW_LINE> <DEDENT> @property <NEW_LINE> def estOuvert(self): <NEW_LINE> <INDENT> return self.ouverture.estActive(barrette=self.barrette) <NEW_LINE> <DEDENT> @property <NEW_LINE> def estVisibleAuxEleves(self): <NEW_LINE> <INDENT> return self.ouverture.estVisibleAuxEleves(barrette=self.barrette) <NEW_LINE> <DEDENT> @property <NEW_LINE> def n(self): <NEW_LINE> <INDENT> return len(Inscription.objects.filter(cours=self)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def estRecent(self): <NEW_LINE> <INDENT> return self.ouverture.estRecente(barrette=self.barrette) <NEW_LINE> <DEDENT> @property <NEW_LINE> def complet(self): <NEW_LINE> <INDENT> jauge=Inscription.objects.filter(cours=self).count() <NEW_LINE> return jauge >= self.capacite | Représente un cours, c'est à dire décrit les caractéristiques
complètes d'un cours d'AP : l'enseignant qui intervient, la granule
de formation que l'enseignant dispensera, l'heure de début, la
capacité (nombre maximal d'élèves qui peuvent voter pour ce cours),
et les dates de début/fin d'ouverture des votes pour s'y inscrire. | 62599085283ffb24f3cf53f1 |
class PolygonList(GUIStructure, PolygonList): <NEW_LINE> <INDENT> pass | Overloading PolygonList class | 6259908550812a4eaa62196d |
class VPNBrokerDomain(Element): <NEW_LINE> <INDENT> typeof = "vpn_broker_domain" <NEW_LINE> @classmethod <NEW_LINE> def create(cls, name, mac_address_prefix, file_ref, comment=None, **kw): <NEW_LINE> <INDENT> kw.update( name=name, comment=comment, mac_address_prefix=mac_address_prefix, file_ref=file_ref ) <NEW_LINE> return ElementCreator(cls, json=kw) | VPN Broker Domain element defines the virtual network that
contains the VPN Broker gateway and the VPN Broker members. | 62599085f9cc0f698b1c6074 |
class EasyModes(Pass): <NEW_LINE> <INDENT> name = "Easy addressing modes pass" <NEW_LINE> def visitMemory(self, node, env): <NEW_LINE> <INDENT> if Ops.opcodes[node.data[0]][Ops.modes.index("Relative")] is not None: <NEW_LINE> <INDENT> node.nodetype = "Relative" <NEW_LINE> return <NEW_LINE> <DEDENT> if Ops.opcodes[node.data[0]][Ops.modes.index("RelativeLong")] is not None: <NEW_LINE> <INDENT> node.nodetype = "RelativeLong" <NEW_LINE> return <NEW_LINE> <DEDENT> if node.data[1].hardcoded: <NEW_LINE> <INDENT> if not collapse_no_index(node, env): <NEW_LINE> <INDENT> node.nodetype = "Absolute" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def visitMemoryX(self, node, env): <NEW_LINE> <INDENT> if node.data[1].hardcoded: <NEW_LINE> <INDENT> if not collapse_x(node, env): <NEW_LINE> <INDENT> node.nodetype = "AbsoluteX" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def visitMemoryY(self, node, env): <NEW_LINE> <INDENT> if node.data[1].hardcoded: <NEW_LINE> <INDENT> if not collapse_y(node, env): <NEW_LINE> <INDENT> node.nodetype = "AbsoluteY" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def visitMemory2(self, node, env): <NEW_LINE> <INDENT> node.nodetype = "ZPRelative" <NEW_LINE> <DEDENT> def visitPointer(self, node, env): <NEW_LINE> <INDENT> if node.data[1].hardcoded: <NEW_LINE> <INDENT> if not collapse_no_index_ind(node, env): <NEW_LINE> <INDENT> node.nodetype = "Indirect" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def visitPointerX(self, node, env): <NEW_LINE> <INDENT> if node.data[1].hardcoded: <NEW_LINE> <INDENT> if not collapse_x_ind(node, env): <NEW_LINE> <INDENT> node.nodetype = "AbsIndX" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def visitPointerY(self, node, env): <NEW_LINE> <INDENT> if node.data[1].hardcoded: <NEW_LINE> <INDENT> if not collapse_y_ind(node, env): <NEW_LINE> <INDENT> node.nodetype = "AbsIndY" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def visitPointerSPY(self, node, env): <NEW_LINE> <INDENT> if node.data[1].hardcoded: <NEW_LINE> <INDENT> if not collapse_spy_ind(node, env): <NEW_LINE> <INDENT> node.nodetype = "AbsIndSPY" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def visitPointerZ(self, node, env): <NEW_LINE> <INDENT> if node.data[1].hardcoded: <NEW_LINE> <INDENT> if not collapse_z_ind(node, env): <NEW_LINE> <INDENT> node.nodetype = "AbsIndZ" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def visitUnknown(self, node, env): <NEW_LINE> <INDENT> pass | Assigns address modes to hardcoded and branch instructions | 6259908592d797404e389905 |
class ArdbInvXMLWriterTests(SutekhTest): <NEW_LINE> <INDENT> def test_deck_writer(self): <NEW_LINE> <INDENT> oPhysCardSet1 = make_set_1() <NEW_LINE> sCurDate = time.strftime('>%Y-%m-%d<', time.localtime()) <NEW_LINE> oWriter = WriteArdbInvXML() <NEW_LINE> sData = self._round_trip_obj(oWriter, CardSetWrapper(oPhysCardSet1)) <NEW_LINE> sData = sData.replace(sCurDate, '>DATE<') <NEW_LINE> self.assertEqual(sData, EXPECTED_1) | class for the ARDB inventory XML writer tests | 62599085a8370b77170f1f1e |
class ECKey(Key): <NEW_LINE> <INDENT> members = ["kty", "alg", "use", "kid", "crv", "x", "y", "d"] <NEW_LINE> longs = ['x', 'y', 'd'] <NEW_LINE> public_members = ["kty", "alg", "use", "kid", "crv", "x", "y"] <NEW_LINE> required = ['crv', 'key', 'x', 'y'] <NEW_LINE> def __init__(self, kty="EC", alg="", use="", kid="", key=None, crv="", x="", y="", d="", curve=None, **kwargs): <NEW_LINE> <INDENT> Key.__init__(self, kty, alg, use, kid, key, **kwargs) <NEW_LINE> self.crv = crv <NEW_LINE> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.d = d <NEW_LINE> self.curve = curve <NEW_LINE> if self.crv and not self.curve: <NEW_LINE> <INDENT> self.verify() <NEW_LINE> self.deserialize() <NEW_LINE> <DEDENT> elif self.key: <NEW_LINE> <INDENT> if not self.crv and not self.curve: <NEW_LINE> <INDENT> self.load_key(key) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def deserialize(self): <NEW_LINE> <INDENT> if not (self.x and self.y and self.crv): <NEW_LINE> <INDENT> DeSerializationNotPossible() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if not isinstance(self.x, six.integer_types): <NEW_LINE> <INDENT> self.x = deser(self.x) <NEW_LINE> <DEDENT> if not isinstance(self.y, six.integer_types): <NEW_LINE> <INDENT> self.y = deser(self.y) <NEW_LINE> <DEDENT> <DEDENT> except TypeError: <NEW_LINE> <INDENT> raise DeSerializationNotPossible() <NEW_LINE> <DEDENT> except ValueError as err: <NEW_LINE> <INDENT> raise DeSerializationNotPossible("%s" % err) <NEW_LINE> <DEDENT> self.curve = NISTEllipticCurve.by_name(self.crv) <NEW_LINE> if self.d: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if isinstance(self.d, six.string_types): <NEW_LINE> <INDENT> self.d = deser(self.d) <NEW_LINE> <DEDENT> <DEDENT> except ValueError as err: <NEW_LINE> <INDENT> raise DeSerializationNotPossible(str(err)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_key(self, private=False, **kwargs): <NEW_LINE> <INDENT> if private: <NEW_LINE> <INDENT> if self.d: <NEW_LINE> <INDENT> return self.d <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.x and self.y: <NEW_LINE> <INDENT> return self.x, self.y <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def serialize(self, private=False): <NEW_LINE> <INDENT> if not self.crv and not self.curve: <NEW_LINE> <INDENT> raise SerializationNotPossible() <NEW_LINE> <DEDENT> res = self.common() <NEW_LINE> res.update({ "crv": self.curve.name(), "x": long_to_base64(self.x), "y": long_to_base64(self.y) }) <NEW_LINE> if private and self.d: <NEW_LINE> <INDENT> res["d"] = long_to_base64(self.d) <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def load_key(self, key): <NEW_LINE> <INDENT> self.curve = key <NEW_LINE> self.d, (self.x, self.y) = key.key_pair() <NEW_LINE> return self <NEW_LINE> <DEDENT> def decryption_key(self): <NEW_LINE> <INDENT> return self.get_key(private=True) <NEW_LINE> <DEDENT> def encryption_key(self, private=False, **kwargs): <NEW_LINE> <INDENT> return self.get_key(private=private) | JSON Web key representation of a Elliptic curve key | 62599085d8ef3951e32c8c07 |
class UartAdvertisement(Advertisement): <NEW_LINE> <INDENT> uuid = '7d2fd14b-8897-48de-9719-15aa4edb5d57' <NEW_LINE> def __init__(self, bus, index, local_name): <NEW_LINE> <INDENT> Advertisement.__init__(self, bus, index, "peripheral") <NEW_LINE> self.add_service_uuid(self.uuid) <NEW_LINE> self.add_local_name(local_name) <NEW_LINE> self.include_tx_power = True | Generic UUART advertisement. | 625990855fcc89381b266f06 |
class KirillovReshetikhinGenericCrystal(AffineCrystalFromClassical): <NEW_LINE> <INDENT> def __init__(self, cartan_type, r, s, dual=None): <NEW_LINE> <INDENT> Parent.__init__(self, category=KirillovReshetikhinCrystals()) <NEW_LINE> if dual is None: <NEW_LINE> <INDENT> self._cartan_type = cartan_type <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._cartan_type = CartanType(cartan_type).dual() <NEW_LINE> <DEDENT> self._r = r <NEW_LINE> self._s = s <NEW_LINE> self._dual = dual <NEW_LINE> AffineCrystalFromClassical.__init__(self, cartan_type, self.classical_decomposition(), KirillovReshetikhinCrystals()) <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return "Kirillov-Reshetikhin crystal of type %s with (r,s)=(%d,%d)" % (self.cartan_type(), self.r(), self.s()) <NEW_LINE> <DEDENT> def _element_constructor_(self, *args, **options): <NEW_LINE> <INDENT> from sage.combinat.rigged_configurations.kr_tableaux import KirillovReshetikhinTableauxElement <NEW_LINE> if isinstance(args[0], KirillovReshetikhinTableauxElement): <NEW_LINE> <INDENT> elt = args[0] <NEW_LINE> if elt.cartan_type() != self.cartan_type() or elt.parent().r() != self._r or elt.parent().s() != self._s: <NEW_LINE> <INDENT> raise ValueError("the Kirillov-Reshetikhin tableau must have the same Cartan type and shape") <NEW_LINE> <DEDENT> to_hw = elt.to_classical_highest_weight() <NEW_LINE> rows = [] <NEW_LINE> letters = elt.parent().letters <NEW_LINE> for i, mult in sorted(to_hw[0].classical_weight()): <NEW_LINE> <INDENT> rows.append([letters(i+1)] * int(mult)) <NEW_LINE> <DEDENT> hw_elt = self(rows=rows) <NEW_LINE> f_str = reversed(to_hw[1]) <NEW_LINE> return hw_elt.f_string(f_str) <NEW_LINE> <DEDENT> return AffineCrystalFromClassical._element_constructor_(self, *args, **options) <NEW_LINE> <DEDENT> def module_generator(self): <NEW_LINE> <INDENT> R = self.weight_lattice_realization() <NEW_LINE> Lambda = R.fundamental_weights() <NEW_LINE> r = self.r() <NEW_LINE> s = self.s() <NEW_LINE> weight = s*Lambda[r] - s*Lambda[0] * Lambda[r].level() / Lambda[0].level() <NEW_LINE> return [b for b in self.module_generators if b.weight() == weight][0] <NEW_LINE> <DEDENT> def r(self): <NEW_LINE> <INDENT> return self._r <NEW_LINE> <DEDENT> def s(self): <NEW_LINE> <INDENT> return self._s <NEW_LINE> <DEDENT> @cached_method <NEW_LINE> def classically_highest_weight_vectors(self): <NEW_LINE> <INDENT> return tuple([self.retract(mg) for mg in self.classical_decomposition().module_generators]) <NEW_LINE> <DEDENT> def kirillov_reshetikhin_tableaux(self): <NEW_LINE> <INDENT> from sage.combinat.rigged_configurations.kr_tableaux import KirillovReshetikhinTableaux <NEW_LINE> return KirillovReshetikhinTableaux(self.cartan_type(), self._r, self._s) | Generic class for Kirillov-Reshetikhin crystal `B^{r,s}` of the given type.
Input is a Dynkin node ``r``, a positive integer ``s``, and a Cartan type
``cartan_type``. | 6259908566673b3332c31f52 |
class TokenPrinter: <NEW_LINE> <INDENT> currentCol, currentLine = 0, 1 <NEW_LINE> lastIdentifier = parameters = 0 <NEW_LINE> encoding = "utf-8" <NEW_LINE> def __init__(self, writer): <NEW_LINE> <INDENT> self.writer = writer <NEW_LINE> <DEDENT> def printtoken(self, type, token, sCoordinates, eCoordinates, line): <NEW_LINE> <INDENT> if hasattr(tokenize, "ENCODING") and type == tokenize.ENCODING: <NEW_LINE> <INDENT> self.encoding = token <NEW_LINE> return <NEW_LINE> <DEDENT> if not isinstance(token, bytes): <NEW_LINE> <INDENT> token = token.encode(self.encoding) <NEW_LINE> <DEDENT> (srow, scol) = sCoordinates <NEW_LINE> (erow, ecol) = eCoordinates <NEW_LINE> if self.currentLine < srow: <NEW_LINE> <INDENT> self.writer(b"\n" * (srow - self.currentLine)) <NEW_LINE> self.currentLine, self.currentCol = srow, 0 <NEW_LINE> <DEDENT> self.writer(b" " * (scol - self.currentCol)) <NEW_LINE> if self.lastIdentifier: <NEW_LINE> <INDENT> type = "identifier" <NEW_LINE> self.parameters = 1 <NEW_LINE> <DEDENT> elif type == tokenize.NAME: <NEW_LINE> <INDENT> if keyword.iskeyword(token): <NEW_LINE> <INDENT> type = "keyword" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.parameters: <NEW_LINE> <INDENT> type = "parameter" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> type = "variable" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> type = tokenize.tok_name.get(type) <NEW_LINE> assert type is not None <NEW_LINE> type = type.lower() <NEW_LINE> <DEDENT> self.writer(token, type) <NEW_LINE> self.currentCol = ecol <NEW_LINE> self.currentLine += token.count(b"\n") <NEW_LINE> if self.currentLine != erow: <NEW_LINE> <INDENT> self.currentCol = 0 <NEW_LINE> <DEDENT> self.lastIdentifier = token in (b"def", b"class") <NEW_LINE> if token == b":": <NEW_LINE> <INDENT> self.parameters = 0 | Format a stream of tokens and intermediate whitespace, for pretty-printing. | 625990854c3428357761be0d |
class Personal_Key_Dialog(MacMozyUIBase): <NEW_LINE> <INDENT> parent_match = {'AXRole': 'AXWindow', 'AXTitle': "Save"} <NEW_LINE> elements =[] <NEW_LINE> elements.append(Element("btn_cancel", {'AXRole': 'AXButton', 'AXTitle': 'Cancel'}, parent_matcher=parent_match)) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(Personal_Key_Dialog, self).__init__() | description: | 62599085656771135c48adda |
class GoslingAgentWrapper(GoslingAgent, SharedInfo): <NEW_LINE> <INDENT> def __init__(self, name: str, team: int, index: int): <NEW_LINE> <INDENT> self.flush: bool = None <NEW_LINE> self.stack: List = None <NEW_LINE> self.index = index <NEW_LINE> super(GoslingAgentWrapper, self).__init__(name, team, index) <NEW_LINE> self.initialize_agent() <NEW_LINE> <DEDENT> def get_field_info(self): <NEW_LINE> <INDENT> return self.world.field_info <NEW_LINE> <DEDENT> def update(self, routine) -> Tuple[Optional[List], bool, bool]: <NEW_LINE> <INDENT> self.controller.__init__() <NEW_LINE> self.flush = False <NEW_LINE> if not self.ready: <NEW_LINE> <INDENT> self.get_ready(self.world.packet) <NEW_LINE> <DEDENT> self.preprocess(self.world.packet) <NEW_LINE> self.stack = [routine] <NEW_LINE> self.stack[-1].run(self) <NEW_LINE> if len(self.stack) > 0: <NEW_LINE> <INDENT> return self.stack, len(self.stack) == 0, self.flush <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None, len(self.stack) == 0, self.flush <NEW_LINE> <DEDENT> <DEDENT> def flush_actions(self): <NEW_LINE> <INDENT> self.stack = [] <NEW_LINE> self.flush = True | "Gosling wrapper used to enable the use of Gosling routines together with the CCP model.
:param name: The name of the drone as given by RLBOT.
:param team: The team of the drone as given by RLBOT (0 for blue or 1 for orange).
:param index: The unique index of the drone as given by RLBOT. | 62599085f9cc0f698b1c6075 |
class _ITopicSubscriptionProxy: <NEW_LINE> <INDENT> __jsii_type__: typing.ClassVar[str] = "@aws-cdk/aws-sns.ITopicSubscription" <NEW_LINE> @jsii.member(jsii_name="bind") <NEW_LINE> def bind(self, topic: ITopic) -> "TopicSubscriptionConfig": <NEW_LINE> <INDENT> return jsii.invoke(self, "bind", [topic]) | Topic subscription. | 6259908563b5f9789fe86cbc |
class ZoneToHandlerAcceptBagCreateSet(CreateViewSet): <NEW_LINE> <INDENT> queryset = CustomerBag.objects.all() <NEW_LINE> serializer_class = ZoneToHandlerAcceptSerializer <NEW_LINE> def create(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.response = {} <NEW_LINE> self.response['result'] = [] <NEW_LINE> serializer = self.get_serializer(data=request.data) <NEW_LINE> if serializer.is_valid(raise_exception=True): <NEW_LINE> <INDENT> self.perform_create(serializer) <NEW_LINE> self.response['status'] = 200 <NEW_LINE> self.response['message'] = 'These bags are received from zoner' <NEW_LINE> return Response(self.response) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> now = datetime.datetime.now() <NEW_LINE> transaction = self.request.data.get('transaction', None) <NEW_LINE> location1 = self.request.data.get('zone_manager_qr_code', None) <NEW_LINE> location2 = self.request.data.get('handler_qr_code', None) <NEW_LINE> stored_location_id = self.request.data.get('stored_location_id', None) <NEW_LINE> zone_id = self.request.data.get('zone_id', None) <NEW_LINE> try: <NEW_LINE> <INDENT> for bag_id in stored_location_id: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> parent_obj = CustomerBag.objects.filter(status=0, stored_location_id=bag_id, transaction=0, parent_id=None).last() <NEW_LINE> child_obj = CustomerBag.objects.filter(status=0, parent_id=parent_obj) <NEW_LINE> CustomerObj = CustomerBag.objects.filter(stored_location_id=bag_id, parent_id=parent_obj).order_by( 'created_on').last() <NEW_LINE> if CustomerObj: <NEW_LINE> <INDENT> zone = CustomerObj.zone <NEW_LINE> <DEDENT> transaction_update_create(parent_obj, child_obj, bag_id, transaction, location1, location2, zone) <NEW_LINE> self.response['result'].append(bag_id) <NEW_LINE> send_message(receiver.user_id.username,parent_obj.customer_id,zone.name,CUSTOMER_REQ) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> receiver1 = ProfileDetails.objects.filter(qr_code=location1).last() <NEW_LINE> receiver2 = ProfileDetails.objects.filter(qr_code=location2).last() <NEW_LINE> mobile = receiver1.user_id.username <NEW_LINE> location1 = receiver1.user_id.first_name <NEW_LINE> location2 = receiver2.user_id.first_name <NEW_LINE> print("mobile=", mobile, "location1==",location1,"location2==",location2 ) <NEW_LINE> send_message(mobile,location1,location2,HANDLER_RECIEVER) <NEW_LINE> print("mobile=", mobile, "location1==",location2,"location2==",location1 ) <NEW_LINE> mobile = receiver2.user_id.username <NEW_LINE> send_message(mobile,location2,location1,ZONE_TRANSFER) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print ("handled exception", e) <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> except Location.DoesNotExist: <NEW_LINE> <INDENT> self.response['result'] = [] | Description: This class is used to create a Zone-Handler view set
When handler receives the bag from zoner ( Zone->Handler => receive )
Author : Sakeer P
Created At: 1st February 2018 | 625990855fcc89381b266f07 |
class command_grep(HoneyPotCommand): <NEW_LINE> <INDENT> def grep_get_contents(self, filename, match): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> contents = self.fs.file_contents(filename) <NEW_LINE> self.grep_application(contents, match) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.errorWrite("grep: {}: No such file or directory\n".format(filename)) <NEW_LINE> <DEDENT> <DEDENT> def grep_application(self, contents, match): <NEW_LINE> <INDENT> match = os.path.basename(match) <NEW_LINE> match = match.replace("\"", "") <NEW_LINE> contentsplit = contents.split('\n') <NEW_LINE> matches = re.compile(".*" + match + ".*") <NEW_LINE> for line in contentsplit: <NEW_LINE> <INDENT> if matches.match(line): <NEW_LINE> <INDENT> self.write(line + '\n') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def help(self): <NEW_LINE> <INDENT> self.write('usage: grep [-abcDEFGHhIiJLlmnOoPqRSsUVvwxZ] [-A num] [-B num] [-C[num]]\n') <NEW_LINE> self.write('\t[-e pattern] [-f file] [--binary-files=value] [--color=when]\n') <NEW_LINE> self.write('\t[--context[=num]] [--directories=action] [--label] [--line-buffered]\n') <NEW_LINE> self.write('\t[--null] [pattern] [file ...]\n') <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> if not self.args: <NEW_LINE> <INDENT> self.help() <NEW_LINE> self.exit() <NEW_LINE> return <NEW_LINE> <DEDENT> self.n = 10 <NEW_LINE> if self.args[0] == '>': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> optlist, args = getopt.getopt(self.args, 'abcDEFGHhIiJLlmnOoPqRSsUVvwxZA:B:C:e:f:') <NEW_LINE> <DEDENT> except getopt.GetoptError as err: <NEW_LINE> <INDENT> self.errorWrite("grep: invalid option -- {}\n".format(err.opt)) <NEW_LINE> self.help() <NEW_LINE> self.exit() <NEW_LINE> return <NEW_LINE> <DEDENT> for opt in optlist: <NEW_LINE> <INDENT> if opt == '-h': <NEW_LINE> <INDENT> self.help() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not self.input_data: <NEW_LINE> <INDENT> files = self.check_arguments("grep", args[1:]) <NEW_LINE> for pname in files: <NEW_LINE> <INDENT> self.grep_get_contents(pname, args[0]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.grep_application(self.input_data, args[0]) <NEW_LINE> <DEDENT> self.exit() <NEW_LINE> <DEDENT> def lineReceived(self, line): <NEW_LINE> <INDENT> log.msg(eventid='cowrie.command.input', realm='grep', input=line, format='INPUT (%(realm)s): %(input)s') <NEW_LINE> <DEDENT> def handle_CTRL_D(self): <NEW_LINE> <INDENT> self.exit() | grep command | 625990857cff6e4e811b7596 |
class HitCount(models.Model): <NEW_LINE> <INDENT> hits = models.PositiveIntegerField(default=0) <NEW_LINE> modified = models.DateTimeField(auto_now=True) <NEW_LINE> content_type = models.ForeignKey( ContentType, related_name="content_type_set_for_%(class)s", on_delete=models.CASCADE) <NEW_LINE> object_pk = models.TextField('object ID') <NEW_LINE> content_object = GenericForeignKey('content_type', 'object_pk') <NEW_LINE> objects = HitCountManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('-hits',) <NEW_LINE> get_latest_by = "modified" <NEW_LINE> verbose_name = _("hit count") <NEW_LINE> verbose_name_plural = _("hit counts") <NEW_LINE> unique_together = ("content_type", "object_pk") <NEW_LINE> db_table = "hitcount_hit_count" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '%s' % self.content_object <NEW_LINE> <DEDENT> def increase(self): <NEW_LINE> <INDENT> self.hits = F('hits') + 1 <NEW_LINE> self.save() <NEW_LINE> <DEDENT> def decrease(self): <NEW_LINE> <INDENT> self.hits = F('hits') - 1 <NEW_LINE> self.save() <NEW_LINE> <DEDENT> def hits_in_last(self, **kwargs): <NEW_LINE> <INDENT> assert kwargs, "Must provide at least one timedelta arg (eg, days=1)" <NEW_LINE> period = timezone.now() - timedelta(**kwargs) <NEW_LINE> return self.hit_set.filter(created__gte=period).count() | Model that stores the hit totals for any content object. | 62599085283ffb24f3cf53f5 |
class InteractiveInterpreter: <NEW_LINE> <INDENT> def __init__(self, locals=None): <NEW_LINE> <INDENT> if locals is None: <NEW_LINE> <INDENT> locals = {"__name__": "__console__", "__doc__": None} <NEW_LINE> <DEDENT> self.locals = locals <NEW_LINE> self.compile = CommandCompiler() <NEW_LINE> <DEDENT> async def runsource(self, source, filename="<input>", symbol="single"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> code = self.compile(source.decode(), filename, symbol) <NEW_LINE> <DEDENT> except (OverflowError, SyntaxError, ValueError): <NEW_LINE> <INDENT> await self.showsyntaxerror(filename) <NEW_LINE> return False <NEW_LINE> <DEDENT> if code is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> await self.runcode(code) <NEW_LINE> return False <NEW_LINE> <DEDENT> async def runcode(self, code): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with stdoutIO() as s: <NEW_LINE> <INDENT> exec(code, self.locals) <NEW_LINE> <DEDENT> await self.write(s.getvalue()) <NEW_LINE> <DEDENT> except SystemExit: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> await self.showtraceback() <NEW_LINE> <DEDENT> <DEDENT> async def showsyntaxerror(self, filename=None): <NEW_LINE> <INDENT> type, value, tb = sys.exc_info() <NEW_LINE> sys.last_type = type <NEW_LINE> sys.last_value = value <NEW_LINE> sys.last_traceback = tb <NEW_LINE> if filename and type is SyntaxError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> msg, (dummy_filename, lineno, offset, line) = value.args <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = SyntaxError(msg, (filename, lineno, offset, line)) <NEW_LINE> sys.last_value = value <NEW_LINE> <DEDENT> <DEDENT> if sys.excepthook is sys.__excepthook__: <NEW_LINE> <INDENT> lines = traceback.format_exception_only(type, value) <NEW_LINE> await self.write(''.join(lines)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sys.excepthook(type, value, tb) <NEW_LINE> <DEDENT> <DEDENT> async def showtraceback(self): <NEW_LINE> <INDENT> sys.last_type, sys.last_value, last_tb = ei = sys.exc_info() <NEW_LINE> sys.last_traceback = last_tb <NEW_LINE> try: <NEW_LINE> <INDENT> lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next) <NEW_LINE> if sys.excepthook is sys.__excepthook__: <NEW_LINE> <INDENT> await self.write(''.join(lines)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sys.excepthook(ei[0], ei[1], last_tb) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> last_tb = ei = None <NEW_LINE> <DEDENT> <DEDENT> async def write(self, data): <NEW_LINE> <INDENT> self.writer.write(data.encode()) <NEW_LINE> await self.writer.drain() | Base class for InteractiveConsole.
This class deals with parsing and interpreter state (the user's
namespace); it doesn't deal with input buffering or prompting or
input file naming (the filename is always passed in explicitly). | 6259908523849d37ff852c0f |
class DMM(Module): <NEW_LINE> <INDENT> def __init__(self, dim: int, n_components: int) -> None: <NEW_LINE> <INDENT> super(DMM, self).__init__() <NEW_LINE> self._dim = dim <NEW_LINE> self._n_components = n_components <NEW_LINE> mixture_logits = torch.zeros((n_components,), dtype=torch.float) <NEW_LINE> self.mixture_logits = Parameter(mixture_logits) <NEW_LINE> self.log_alphas = ParameterList() <NEW_LINE> for _ in range(n_components): <NEW_LINE> <INDENT> log_alpha = Parameter(torch.randn(dim, dtype=torch.float)/3) <NEW_LINE> self.log_alphas.append(log_alpha) <NEW_LINE> <DEDENT> <DEDENT> @overrides <NEW_LINE> def forward(self, observed_data: torch.FloatTensor): <NEW_LINE> <INDENT> batch_size = observed_data.size()[0] <NEW_LINE> prior_log_probs = F.log_softmax(self.mixture_logits, dim=-1) <NEW_LINE> membership_log_probs = torch.empty(size=(batch_size, self._n_components), requires_grad=False) <NEW_LINE> for i in range(self._n_components): <NEW_LINE> <INDENT> membership_log_probs[:, i] = prior_log_probs[i] + log_p(observed_data, self.log_alphas[i]) <NEW_LINE> <DEDENT> denom = torch.logsumexp(membership_log_probs, dim=1) <NEW_LINE> denom = denom.unsqueeze(1) <NEW_LINE> membership_log_probs -= denom <NEW_LINE> membership_probs = membership_log_probs.exp().detach() <NEW_LINE> nll = torch.empty(size=(batch_size,), requires_grad=False) <NEW_LINE> for i in range(self._n_components): <NEW_LINE> <INDENT> log_likelihood = log_p(observed_data, self.log_alphas[i]) + prior_log_probs[i] <NEW_LINE> nll[:,] -= membership_probs[:, i] * log_likelihood <NEW_LINE> <DEDENT> return nll, membership_probs | Dirichlet Mixture Model
Parameters
==========
dim : ``int``
Dimension of the observed data.
n_components : ``int``
Number of mixture components. | 6259908599fddb7c1ca63b85 |
class StatusView(Resource, ParcelOrder): <NEW_LINE> <INDENT> @jwt_required <NEW_LINE> def put(self, parcel_id): <NEW_LINE> <INDENT> user_id = get_jwt_identity() <NEW_LINE> user = UserModel().get_user_by_id(user_id) <NEW_LINE> parcel = ParcelOrder().get_parcel_by_id(parcel_id) <NEW_LINE> if user and parcel: <NEW_LINE> <INDENT> if user[4] == "admin": <NEW_LINE> <INDENT> status = ParcelOrder().change_status(parcel_id) <NEW_LINE> return make_response(jsonify(status), 200) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return make_response(jsonify({ "message": "You have no access rights to change the status of parcel order"}), 403) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return {"message": "user or parcel does not exist"}, 404 | docstring for StatusView. | 62599085a8370b77170f1f22 |
class SecretSetParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'value': {'required': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'content_type': {'key': 'contentType', 'type': 'str'}, 'secret_attributes': {'key': 'attributes', 'type': 'SecretAttributes'}, } <NEW_LINE> def __init__( self, *, value: str, tags: Optional[Dict[str, str]] = None, content_type: Optional[str] = None, secret_attributes: Optional["SecretAttributes"] = None, **kwargs ): <NEW_LINE> <INDENT> super(SecretSetParameters, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.tags = tags <NEW_LINE> self.content_type = content_type <NEW_LINE> self.secret_attributes = secret_attributes | The secret set parameters.
All required parameters must be populated in order to send to Azure.
:ivar value: Required. The value of the secret.
:vartype value: str
:ivar tags: A set of tags. Application specific metadata in the form of key-value pairs.
:vartype tags: dict[str, str]
:ivar content_type: Type of the secret value such as a password.
:vartype content_type: str
:ivar secret_attributes: The secret management attributes.
:vartype secret_attributes: ~azure.keyvault.v2016_10_01.models.SecretAttributes | 6259908563b5f9789fe86cbe |
class MicrosoftBingSearch(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.header = api_request_header <NEW_LINE> self.img_search_url = img_search_url <NEW_LINE> self.current_query = "" <NEW_LINE> self.current_offset = 0 <NEW_LINE> self.query_threshold = kwargs.get('query_threshold') <NEW_LINE> if not self.query_threshold: <NEW_LINE> <INDENT> self.query_threshold = 1000 <NEW_LINE> <DEDENT> self._number_of_queries = 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def number_of_queries(self): <NEW_LINE> <INDENT> return self._number_of_queries <NEW_LINE> <DEDENT> @number_of_queries.setter <NEW_LINE> def number_of_queries(self, value): <NEW_LINE> <INDENT> if value > self.query_threshold: <NEW_LINE> <INDENT> raise RuntimeError("This instance has made {} queries. No more queries allowed".format(self.query_threshold)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._number_of_queries = value <NEW_LINE> <DEDENT> <DEDENT> def get_img_search_response(self, query='', count=1, offset=0, safeSearch=None): <NEW_LINE> <INDENT> params = { 'q': query, 'count': count, 'offset': offset, } <NEW_LINE> if safeSearch: <NEW_LINE> <INDENT> params['safeSearch'] = safeSearch <NEW_LINE> <DEDENT> self.number_of_queries += 1 <NEW_LINE> r = requests.get(self.img_search_url, params=params, headers=self.header) <NEW_LINE> if r.status_code == 200: <NEW_LINE> <INDENT> return r.json() <NEW_LINE> <DEDENT> elif r.status_code == 403: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif r.status_code == 429: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def get_single_img_url(self, query): <NEW_LINE> <INDENT> if not query: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> if query == self.current_query: <NEW_LINE> <INDENT> self.current_offset += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.current_offset = 0 <NEW_LINE> self.current_query = query <NEW_LINE> <DEDENT> r = self.get_img_search_response(query, offset=self.current_offset) <NEW_LINE> if r['value']: <NEW_LINE> <INDENT> bing_url = r['value'][0]['contentUrl'] <NEW_LINE> imgr = requests.get(bing_url) <NEW_LINE> if imgr.status_code == 200: <NEW_LINE> <INDENT> return imgr.url <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> <DEDENT> def get_multiple_img_url(self, query, count=10): <NEW_LINE> <INDENT> if not query: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> r = self.get_img_search_response(query, count=count) <NEW_LINE> if r['value']: <NEW_LINE> <INDENT> urls = [] <NEW_LINE> for item in r['value']: <NEW_LINE> <INDENT> bing_url = item['contentUrl'] <NEW_LINE> imgr = requests.get(bing_url) <NEW_LINE> if imgr.status_code == 200: <NEW_LINE> <INDENT> urls.append(imgr.url) <NEW_LINE> <DEDENT> <DEDENT> return urls <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [] | Interact with Microsoft Bing Search API | 6259908544b2445a339b7707 |
class DatePicker(TextInput): <NEW_LINE> <INDENT> pHint_x = NumericProperty(0.9) <NEW_LINE> pHint_y = NumericProperty(0.9) <NEW_LINE> pHint = ReferenceListProperty(pHint_x ,pHint_y) <NEW_LINE> format = StringProperty("%Y/%m/%d") <NEW_LINE> def __init__(self, touch_switch=False, *args, **kwargs): <NEW_LINE> <INDENT> super(DatePicker, self).__init__(*args, **kwargs) <NEW_LINE> self.touch_switch = touch_switch <NEW_LINE> self.init_ui() <NEW_LINE> <DEDENT> def init_ui(self): <NEW_LINE> <INDENT> if not self.text: <NEW_LINE> <INDENT> self.text = datetime.now().strftime(self.format) <NEW_LINE> <DEDENT> dt = datetime.strptime(self.text, self.format) <NEW_LINE> self.cal = CalendarWidget(as_popup=True, touch_switch=self.touch_switch, active_date=[dt.day, dt.month, dt.year]) <NEW_LINE> self.popup = Popup(content=self.cal, on_dismiss=self.update_value, title="") <NEW_LINE> self.cal.parent_popup = self.popup <NEW_LINE> self.bind(focus=self.show_popup) <NEW_LINE> <DEDENT> def show_popup(self, isnt, val): <NEW_LINE> <INDENT> self.popup.size_hint=self.pHint <NEW_LINE> if val: <NEW_LINE> <INDENT> Window.release_all_keyboards() <NEW_LINE> self.popup.open() <NEW_LINE> <DEDENT> <DEDENT> def update_value(self, inst): <NEW_LINE> <INDENT> dt = self.cal.active_date <NEW_LINE> self.text = datetime(dt[2], dt[1], dt[0]).strftime(self.format) <NEW_LINE> self.focus = False | Date picker is a textinput, if it focused shows popup with calendar
which allows you to define the popup dimensions using pHint_x, pHint_y,
and the pHint lists. The `format` property defines how the date is
formatted in string using strftime() and strptime().
For example in kv:
DatePicker:
pHint: 0.7,0.4
text: "2017-03-23"
format: "%Y-%m-%s"
would result in a size_hint of 0.7,0.4 being used to create the popup | 625990857cff6e4e811b7598 |
class IPsecEnforcerNotification(object): <NEW_LINE> <INDENT> process_fqdn = 'localhost' <NEW_LINE> process_port = 8082 <NEW_LINE> @classmethod <NEW_LINE> def listener(cls): <NEW_LINE> <INDENT> LOG.info("IPsec Enforcer Notification agent started") <NEW_LINE> ipsec_enforcer_socket = Listener((cls.process_fqdn, cls.process_port)) <NEW_LINE> while True: <NEW_LINE> <INDENT> conn = ipsec_enforcer_socket.accept() <NEW_LINE> data = conn.recv() <NEW_LINE> notification_type = data.pop('notification_type') <NEW_LINE> if notification_type == 'REGISTRATION': <NEW_LINE> <INDENT> IPsecEnforcerRegistrationNotification.start(**data) <NEW_LINE> <DEDENT> elif notification_type == 'DEREGISTRATION': <NEW_LINE> <INDENT> IPsecEnforcerRegistrationNotification.start(**data) <NEW_LINE> <DEDENT> elif notification_type == 'CONFIG_UPDATE': <NEW_LINE> <INDENT> IPsecConfigUpdateNotification.record_update(**data) <NEW_LINE> <DEDENT> elif notification_type == 'CONFIG_DELETE': <NEW_LINE> <INDENT> IPsecConfigUpdateNotification.vpnbind_record_delete(**data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def client(cls, resource, record=None, record_update=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> client = Client((cls.process_fqdn, cls.process_port)) <NEW_LINE> <DEDENT> except socket_error: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if record_update is not None: <NEW_LINE> <INDENT> notification_type = 'CONFIG_UPDATE' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if resource not in VPN_BIND: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> notification_type = 'CONFIG_DELETE' <NEW_LINE> <DEDENT> notification_message = { 'notification_type': notification_type, } <NEW_LINE> if resource is not None: <NEW_LINE> <INDENT> notification_message.update({'resource': resource}) <NEW_LINE> <DEDENT> if record is not None: <NEW_LINE> <INDENT> notification_message.update({'record': record}) <NEW_LINE> <DEDENT> if record_update is not None: <NEW_LINE> <INDENT> notification_message.update({'record_update': record_update}) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> client.send(notification_message) <NEW_LINE> <DEDENT> except socket_error: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def client_ipsecenforcer_register(cls, ipsecenforcer_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> client = Client((cls.process_fqdn, cls.process_port)) <NEW_LINE> <DEDENT> except socket_error: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> mapping_records = ( IPsecEnforcerInfo.get_ipsecenforcer_to_vpnendpoint_map( ipsecenforcer_id) ) <NEW_LINE> notification_message = { 'notification_type': 'REGISTRATION', 'ipsecenforcer_id': ipsecenforcer_id, 'mapping_records': mapping_records } <NEW_LINE> try: <NEW_LINE> <INDENT> client.send(notification_message) <NEW_LINE> <DEDENT> except socket_error: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def client_ipsecenforcer_deregister(cls, ipsecenforcer_id, mapping_records): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> client = Client((cls.process_fqdn, cls.process_port)) <NEW_LINE> <DEDENT> except socket_error: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> notification_message = { 'notification_type': 'DEREGISTRATION', 'ipsecenforcer_id': ipsecenforcer_id, 'mapping_records': mapping_records } <NEW_LINE> try: <NEW_LINE> <INDENT> client.send(notification_message) <NEW_LINE> <DEDENT> except socket_error: <NEW_LINE> <INDENT> return | This notification module notifies the IPSec enforcer about
any new peer IPsec Enforcer registrations or new in modules to
runs as a independent background process.
This process is started by a custom command. The command name is
'ipsecenforcernotify'. This custom command is defined in 'management'
module. | 62599085091ae35668706797 |
class User(Model): <NEW_LINE> <INDENT> id = IDField() <NEW_LINE> query_id = TextField() <NEW_LINE> name = TextField() <NEW_LINE> contacts_statistics = MapField() <NEW_LINE> @staticmethod <NEW_LINE> def statistics_to_chart(data): <NEW_LINE> <INDENT> data = data['contacts_statistics'] <NEW_LINE> chart_data = {} <NEW_LINE> for key, item in data.items(): <NEW_LINE> <INDENT> print(key, item) <NEW_LINE> chart_data[key] = {"label": [], "data": []} <NEW_LINE> for label, value in item.items(): <NEW_LINE> <INDENT> chart_data[key]["label"].append(label) <NEW_LINE> chart_data[key]["data"].append(value) <NEW_LINE> <DEDENT> <DEDENT> return chart_data | User Class Model, extending from ```fireo.models.Model``` package.
Attributes
----------
id : str
User unique id composed by subject id given by google auth.
name : str
User display name.
contacts_statistics : dict
dictionary containg the statistics about contacts, in other words,
contacts per domain, contacts per City Adresss, contacts per organization | 625990855fc7496912d49016 |
class MultiDict: <NEW_LINE> <INDENT> def __init__(self, keylen): <NEW_LINE> <INDENT> if keylen < 10 or keylen > 26: <NEW_LINE> <INDENT> raise Exception('Error: unknown key length: {0:d}'.format(keylen)) <NEW_LINE> <DEDENT> self._dict = defaultdict(list) <NEW_LINE> self._keylen = keylen <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self._dict[self.key2hash(key, self._keylen)] <NEW_LINE> <DEDENT> def __setitem__(self, key, val): <NEW_LINE> <INDENT> self.add(key, val) <NEW_LINE> <DEDENT> def add(self, key, val): <NEW_LINE> <INDENT> h = MultiDict.key2hash(key, self._keylen) <NEW_LINE> self._dict[h].append(val) <NEW_LINE> <DEDENT> def itemcount(self): <NEW_LINE> <INDENT> return sum([len(val) for val in self._dict.values()]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def key2hash(key, keylen): <NEW_LINE> <INDENT> primes = PRIMES_26 <NEW_LINE> return reduce(mul, [p * n for (p, n) in zip(primes[-keylen:], key) if n > 0], 1) <NEW_LINE> <DEDENT> def print(self, header): <NEW_LINE> <INDENT> max_display_len = 1 <NEW_LINE> print('{0:s} (#k={1:d}, #v={2:d}):'.format( header, len(list(self._dict.keys())), self.itemcount() )) <NEW_LINE> for key in self._dict.keys(): <NEW_LINE> <INDENT> val = self._dict[key] <NEW_LINE> ellipsis = ' ... ({0:d})'.format(len(val)) <NEW_LINE> print(' key={0:s}, val={1:s}{2:s}'.format( key, val[0:max_display_len].__str__(), ellipsis if len(val) > max_display_len else '' )) <NEW_LINE> <DEDENT> <DEDENT> def test_sample_value(self): <NEW_LINE> <INDENT> return list(self._dict.values())[0] | A dict in which the values are lists. | 62599085ec188e330fdfa402 |
class TurtleOpenLoop(TurtleKinematics): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> TurtleKinematics.__init__(self) <NEW_LINE> self.turtle_pose = Pose(x=0, y=0, theta=0) <NEW_LINE> <DEDENT> def move_untill(self, time, vel, ang): <NEW_LINE> <INDENT> now = rospy.get_time() <NEW_LINE> wait_time = now + time <NEW_LINE> rate = rospy.Rate(10) <NEW_LINE> while now < wait_time: <NEW_LINE> <INDENT> self.move_general(vel, ang) <NEW_LINE> now = rospy.get_time() <NEW_LINE> rate.sleep() <NEW_LINE> <DEDENT> self.stop() <NEW_LINE> <DEDENT> def go_to_point_relative(self, x, y, vel_lin=0.5, vel_ang=0.5): <NEW_LINE> <INDENT> t_x = x/vel_lin <NEW_LINE> t_y = y/vel_lin <NEW_LINE> t_rot = 0.5*math.pi/vel_ang <NEW_LINE> self.move_untill(t_x, vel_lin, 0) <NEW_LINE> self.move_untill(t_rot, 0, vel_ang) <NEW_LINE> self.move_untill(t_y, vel_lin, 0) <NEW_LINE> <DEDENT> def go_to_point_absolute(self, x, y, vel_lin=0.5, vel_ang=0.5): <NEW_LINE> <INDENT> pass | Classe para o controle em malha aberta da tartaruga.
O movimento da tartaruga deve se basear na cinematica classica:
- Deslocamento = Velcidade x Tempo | 625990854a966d76dd5f0a3c |
@pytest.mark.crypto <NEW_LINE> class TestAes128Decrypt(object): <NEW_LINE> <INDENT> decrypted = ( (b'f\x00\xfd\x0f\x1av\n\x92\x1a\xb1|\xb4\xe0\xa7\xdd^', b'\x49\x00\x00\x00\x00\x00\x9f\x29\x28\x01\x00\x00\x00\x00\x00\x05'), (b'\xcf\x19c`\x19uP.9]?\xa40\x00\xa91', b')(\x01\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00'), (b'M4\xe1\x85\xf3t:\x8b\xb7>\xa6jL\xe8\xc2\xa3', b'I\x00\x00\x00\x00\x00\x9f\x00\x00\x00\x00\x00\x00\x00\x00\x00'), (b'\x7f5\x91\xd3o\xd5\x17\xa3{m\xe9\xe0\xdf\x93Kz', b'\x2b\x7e\x15\x16\x28\xae\xd2\xa6\xab\xf7\x15\x88\x09\xcf\x4f\x3c'), ) <NEW_LINE> @pytest.mark.parametrize('ciphertext, expected_plaintext', decrypted) <NEW_LINE> def test_decrypt_known_messages(self, default_test_key, ciphertext, expected_plaintext): <NEW_LINE> <INDENT> assert utils.aes128_decrypt(default_test_key, ciphertext) == expected_plaintext | Tests the implementation of the AES 128 decryption.
lorawan.utils.aes128_decrypt decrypts a message using AES128. | 62599085aad79263cf430311 |
class AccountHandler(Handler): <NEW_LINE> <INDENT> def render_front(self, error=""): <NEW_LINE> <INDENT> user = db.GqlQuery("SELECT * FROM User") <NEW_LINE> self.render("account.html", user=user) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> self.render_front() | Handler subclass for the "/account" extension. | 6259908560cbc95b06365b17 |
class Topology(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'created_date_time': {'readonly': True}, 'last_modified': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, 'last_modified': {'key': 'lastModified', 'type': 'iso-8601'}, 'resources': {'key': 'resources', 'type': '[TopologyResource]'}, } <NEW_LINE> def __init__( self, *, resources: Optional[List["TopologyResource"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(Topology, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.created_date_time = None <NEW_LINE> self.last_modified = None <NEW_LINE> self.resources = resources | Topology of the specified resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: GUID representing the operation id.
:vartype id: str
:ivar created_date_time: The datetime when the topology was initially created for the resource
group.
:vartype created_date_time: ~datetime.datetime
:ivar last_modified: The datetime when the topology was last modified.
:vartype last_modified: ~datetime.datetime
:param resources: A list of topology resources.
:type resources: list[~azure.mgmt.network.v2020_03_01.models.TopologyResource] | 625990857b180e01f3e49e10 |
class UnbindCmqDeadLetterRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.SourceQueueName = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.SourceQueueName = params.get("SourceQueueName") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | UnbindCmqDeadLetter请求参数结构体
| 62599085167d2b6e312b8342 |
class CustomBuild(build_ext): <NEW_LINE> <INDENT> def run(self) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> subprocess.run(['wget', '--version'], check=True) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> raise RuntimeError( "Wget must be installed to build pyquest " ) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> subprocess.run(['tar', '--version'], check=True) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> raise RuntimeError( "Tar must be installed to build pyquest: " ) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> subprocess.run(['cmake', '--version'], check=True) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> raise RuntimeError( "Cmake must be installed to build pyquest" ) <NEW_LINE> <DEDENT> for ext in self.extensions: <NEW_LINE> <INDENT> self.build_extension(ext) <NEW_LINE> <DEDENT> <DEDENT> def build_extension(self, ext: Extension) -> None: <NEW_LINE> <INDENT> if not ext.name == 'questlib': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> old_path = os.getcwd() <NEW_LINE> print('__file__', __file__) <NEW_LINE> src_path = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> print('src_path', src_path) <NEW_LINE> os.chdir(src_path) <NEW_LINE> QuEST_release_link = 'https://github.com/QuEST-Kit/QuEST/archive/v3.2.0.tar.gz' <NEW_LINE> if not os.path.exists(os.path.join(src_path, 'QuEST')): <NEW_LINE> <INDENT> print('quest_path', os.path.join(src_path, 'QuEST/')) <NEW_LINE> os.makedirs(os.path.join(src_path, 'QuEST/')) <NEW_LINE> subprocess.run(['wget', QuEST_release_link, '-O', os.path.join(src_path, 'QuEST.tar.gz')], check=True) <NEW_LINE> subprocess.run(['tar', '-xzvf', os.path.join(src_path, 'QuEST.tar.gz'), '-C', os.path.join(src_path, 'QuEST/'), '--strip-components=1'], check=True) <NEW_LINE> <DEDENT> if platform.system() == 'Darwin': <NEW_LINE> <INDENT> if 'CFLAGS' in os.environ.keys(): <NEW_LINE> <INDENT> os.environ['CFLAGS'] = os.environ['CFLAGS'] + " -Wno-implicit-function-declaration" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> os.environ['CFLAGS'] = "-Wno-implicit-function-declaration" <NEW_LINE> <DEDENT> args_for_cmake = ['-DMULTITHREADED=0', "-DCMAKE_POLICY_DEFAULT_CMP0012=NEW"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args_for_cmake = ["-DCMAKE_POLICY_DEFAULT_CMP0012=NEW"] <NEW_LINE> <DEDENT> os.chdir(os.path.join(src_path, 'pyquest_cffi/questlib/')) <NEW_LINE> run_command_make = ['cmake', os.path.join(src_path, 'QuEST/QuEST/')] + args_for_cmake <NEW_LINE> run_command_build = ['cmake', '--build', '.'] <NEW_LINE> subprocess.run(run_command_make, check=True) <NEW_LINE> subprocess.run(run_command_build, check=True) <NEW_LINE> from build_quest import build_quest_so <NEW_LINE> build_quest_so() <NEW_LINE> print(os.listdir('.')) <NEW_LINE> os.chdir(old_path) | Custom C builder | 62599085e1aae11d1e7cf5bf |
class QueryFilterAggregation(QueryAggregation): <NEW_LINE> <INDENT> def __init__(self, type: str, match: str, matching_results: int, *, aggregations: List['QueryAggregation'] = None) -> None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> self.match = match <NEW_LINE> self.matching_results = matching_results <NEW_LINE> self.aggregations = aggregations <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, _dict: Dict) -> 'QueryFilterAggregation': <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'type' in _dict: <NEW_LINE> <INDENT> args['type'] = _dict.get('type') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'type\' not present in QueryFilterAggregation JSON' ) <NEW_LINE> <DEDENT> if 'match' in _dict: <NEW_LINE> <INDENT> args['match'] = _dict.get('match') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'match\' not present in QueryFilterAggregation JSON' ) <NEW_LINE> <DEDENT> if 'matching_results' in _dict: <NEW_LINE> <INDENT> args['matching_results'] = _dict.get('matching_results') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'matching_results\' not present in QueryFilterAggregation JSON' ) <NEW_LINE> <DEDENT> if 'aggregations' in _dict: <NEW_LINE> <INDENT> args['aggregations'] = [ QueryAggregation.from_dict(x) for x in _dict.get('aggregations') ] <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> return cls.from_dict(_dict) <NEW_LINE> <DEDENT> def to_dict(self) -> Dict: <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'type') and self.type is not None: <NEW_LINE> <INDENT> _dict['type'] = self.type <NEW_LINE> <DEDENT> if hasattr(self, 'match') and self.match is not None: <NEW_LINE> <INDENT> _dict['match'] = self.match <NEW_LINE> <DEDENT> if hasattr(self, 'matching_results') and self.matching_results is not None: <NEW_LINE> <INDENT> _dict['matching_results'] = self.matching_results <NEW_LINE> <DEDENT> if hasattr(self, 'aggregations') and self.aggregations is not None: <NEW_LINE> <INDENT> _dict['aggregations'] = [x.to_dict() for x in self.aggregations] <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> return self.to_dict() <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return json.dumps(self.to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other: 'QueryFilterAggregation') -> bool: <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other: 'QueryFilterAggregation') -> bool: <NEW_LINE> <INDENT> return not self == other | A modifier that will narrow down the document set of the sub aggregations it precedes.
:attr str match: The filter written in Discovery Query Language syntax applied
to the documents before sub aggregations are run.
:attr int matching_results: Number of documents matching the filter.
:attr List[QueryAggregation] aggregations: (optional) An array of sub
aggregations. | 625990857047854f46340f0d |
class LibraryPackage(models.Model): <NEW_LINE> <INDENT> full_title = models.CharField(max_length=100) <NEW_LINE> description = models.TextField(null=True, blank=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.full_title | Model represents the library price package
Example: Free, Premium, Mixed (Some premium & some free breakdowns) | 625990855fdd1c0f98e5fad8 |
class GrowlMESender(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import gntp.notifier <NEW_LINE> self.growl = gntp.notifier.GrowlNotifier( applicationName = "MacroServer", notifications = ["Key Modifier","Debug"], defaultNotifications = ["Key Modifier"], ) <NEW_LINE> self.growl.register() <NEW_LINE> self.sendStartUpMessage() <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> logging.warning('gntp not installed') <NEW_LINE> <DEDENT> <DEDENT> def sendStartUpMessage(self): <NEW_LINE> <INDENT> self.growl.notify( noteType = "Key Modifier", title = "MacroServer Mac started up..", description = "Will Wade", icon = "http://example.com/icon.png", sticky = False, priority = -1, ) <NEW_LINE> <DEDENT> def sendMessage(self, modifier, state): <NEW_LINE> <INDENT> if state: <NEW_LINE> <INDENT> msg = 'On' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = 'Off' <NEW_LINE> <DEDENT> self.growl.notify( noteType = "Key Modifier", title = modifier+" has been turned "+msg, description = modifier+" has been turned "+msg, icon = "http://example.com/icon.png", sticky = False, priority = -1, ) | https://github.com/kfdm/gntp
NB: You need to pip install --upgrade gntp, and then http://www.canebas.org/LWC/Tutorials/Growl_networking/ | 625990853617ad0b5ee07ca9 |
class MyghtyTemplatePlugin(object): <NEW_LINE> <INDENT> extension = "myt" <NEW_LINE> def __init__(self, extra_vars_func=None, options=None): <NEW_LINE> <INDENT> if options is None: <NEW_LINE> <INDENT> options = {} <NEW_LINE> <DEDENT> myt_opts = {} <NEW_LINE> for k, v in options.iteritems(): <NEW_LINE> <INDENT> if k.startswith('myghty.'): <NEW_LINE> <INDENT> myt_opts[k[7:]] = v <NEW_LINE> <DEDENT> <DEDENT> import myghty.interp <NEW_LINE> self.extra_vars = extra_vars_func <NEW_LINE> self.interpreter = myghty.interp.Interpreter(**myt_opts) <NEW_LINE> <DEDENT> def load_template(self, template_path): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def render(self, info, format="html", fragment=False, template=None, output_encoding=None, encoding_errors=None, disable_unicode=None): <NEW_LINE> <INDENT> buf = StringIO() <NEW_LINE> global_args = info.pop('_global_args') <NEW_LINE> if self.extra_vars: <NEW_LINE> <INDENT> global_args.update(self.extra_vars()) <NEW_LINE> <DEDENT> optional_args = {} <NEW_LINE> if fragment: <NEW_LINE> <INDENT> optional_args['disable_wrapping'] = True <NEW_LINE> <DEDENT> if output_encoding: <NEW_LINE> <INDENT> optional_args['output_encoding'] = output_encoding <NEW_LINE> <DEDENT> if encoding_errors: <NEW_LINE> <INDENT> optional_args['encoding_errors'] = encoding_errors <NEW_LINE> <DEDENT> if disable_unicode: <NEW_LINE> <INDENT> optional_args['disable_unicode'] = disable_unicode <NEW_LINE> <DEDENT> self.interpreter.execute(template, request_args=info, global_args=global_args, out_buffer=buf, **optional_args) <NEW_LINE> return buf.getvalue() | Myghty Template Plugin
This Myghty Template Plugin varies from the official BuffetMyghty in that
it will properly populate all the default Myghty variables needed and
render fragments. | 62599085aad79263cf430313 |
class MatchTUNNELID(MatchField): <NEW_LINE> <INDENT> name = 'tun_id' <NEW_LINE> oxm_field = OxmOfbMatchField.OFPXMT_OFB_TUNNEL_ID <NEW_LINE> def as_of_tlv(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(self.value) <NEW_LINE> mask = None <NEW_LINE> oxm_hasmask = False <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> value, mask = map(int, self.value.split('/')) <NEW_LINE> oxm_hasmask = True <NEW_LINE> <DEDENT> value_bytes = value.to_bytes(8, 'big') <NEW_LINE> if mask: <NEW_LINE> <INDENT> value_bytes += mask.to_bytes(8, 'big') <NEW_LINE> <DEDENT> return OxmTLV(oxm_field=self.oxm_field, oxm_hasmask=oxm_hasmask, oxm_value=value_bytes) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_of_tlv(cls, tlv): <NEW_LINE> <INDENT> value = int.from_bytes(tlv.oxm_value[:8], 'big') <NEW_LINE> if tlv.oxm_hasmask: <NEW_LINE> <INDENT> tunnel_mask = int.from_bytes(tlv.oxm_value[8:], 'big') <NEW_LINE> value = f'{value}/{tunnel_mask}' <NEW_LINE> <DEDENT> return cls(value) | Match for tunnel id. | 6259908550812a4eaa621971 |
class AlphaBetaAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> action,value = self.maximizer(gameState, self.depth, -100000000, 100000000) <NEW_LINE> return action <NEW_LINE> <DEDENT> def maximizer(self,gameState,depth, alpha, betha): <NEW_LINE> <INDENT> if gameState.isWin() or gameState.isLose() or depth==0: <NEW_LINE> <INDENT> return None, self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> value = -100000000 <NEW_LINE> for action in gameState.getLegalActions(0): <NEW_LINE> <INDENT> a,v = self.minimizer(gameState.generateSuccessor(0,action), depth, 1, alpha, betha) <NEW_LINE> if v > value: <NEW_LINE> <INDENT> baction = action <NEW_LINE> value = v <NEW_LINE> <DEDENT> if value > betha: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> alpha = max(alpha, value) <NEW_LINE> <DEDENT> return baction, value <NEW_LINE> <DEDENT> def minimizer(self, gameState, depth, ghost, alpha, betha): <NEW_LINE> <INDENT> if gameState.isWin() or gameState.isLose() or depth==0: <NEW_LINE> <INDENT> return None, self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> value = 100000000 <NEW_LINE> for action in gameState.getLegalActions(ghost): <NEW_LINE> <INDENT> if ghost == gameState.getNumAgents()-1: <NEW_LINE> <INDENT> a,v = self.maximizer(gameState.generateSuccessor(ghost,action), depth - 1, alpha, betha) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> a,v = self.minimizer(gameState.generateSuccessor(ghost,action), depth, ghost + 1, alpha, betha) <NEW_LINE> <DEDENT> if v < value: <NEW_LINE> <INDENT> baction = action <NEW_LINE> value = v <NEW_LINE> <DEDENT> if value < alpha: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> betha = min(betha, value) <NEW_LINE> <DEDENT> return baction, value | Your minimax agent with alpha-beta pruning (question 3) | 62599085f9cc0f698b1c6078 |
class CleanupPlannerNode: <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> rospy.init_node("cleanup_planner_node") <NEW_LINE> self.pioneer = pioneer_wrapper.PioneerWrapper() <NEW_LINE> rospy.Subscriber("robot_pose", Pose2D, self.robot_pose_callback) <NEW_LINE> rospy.Subscriber("goal_pose", Pose2D, self.goal_pose_callback) <NEW_LINE> rospy.Subscriber("cleanup_objects", CleanupObjectArray, self.object_callback) <NEW_LINE> rospy.Subscriber("cleanup_zones", CleanupZoneArray, self.cleanup_zone_callback) <NEW_LINE> self.control = CleanupControl(self) <NEW_LINE> self.control.setPrintFunction(rospy.loginfo) <NEW_LINE> rospy.on_shutdown(self.shutdown_hook) <NEW_LINE> r = rospy.Rate(30) <NEW_LINE> while not rospy.is_shutdown(): <NEW_LINE> <INDENT> self.control.run() <NEW_LINE> r.sleep() <NEW_LINE> <DEDENT> <DEDENT> def robot_pose_callback(self, msg): <NEW_LINE> <INDENT> self.control.robot_pose.x = msg.x <NEW_LINE> self.control.robot_pose.y = msg.y <NEW_LINE> self.control.robot_pose.theta = msg.theta <NEW_LINE> <DEDENT> def goal_pose_callback(self, msg): <NEW_LINE> <INDENT> if (msg.x != self.control.user_goal_pose.x or msg.y != self.control.user_goal_pose.y): <NEW_LINE> <INDENT> self.control.user_goal_pose.x = msg.x <NEW_LINE> self.control.user_goal_pose.y = msg.y <NEW_LINE> self.control.user_goal_pose.theta = msg.theta <NEW_LINE> rospy.loginfo("Updated robot goal pose") <NEW_LINE> self.control.switchTo('visit_objects') <NEW_LINE> <DEDENT> if msg.x == -1337: <NEW_LINE> <INDENT> self.control.switchTo('stop_robot') <NEW_LINE> rospy.loginfo("Removed robot goal pose") <NEW_LINE> <DEDENT> <DEDENT> def object_callback(self, msg): <NEW_LINE> <INDENT> self.control.cleanup_objects = msg.objects <NEW_LINE> <DEDENT> def cleanup_zone_callback(self, msg): <NEW_LINE> <INDENT> self.control.set_cleanup_zones(msg.zones) <NEW_LINE> <DEDENT> def shutdown_hook(self): <NEW_LINE> <INDENT> rospy.logdebug("Storing gripper on shutdown!") <NEW_LINE> self.pioneer.store_gripper() <NEW_LINE> rospy.sleep(2.0) | ROS node that spins a FSA to control for the cleanup task | 62599085e1aae11d1e7cf5c0 |
class Summary: <NEW_LINE> <INDENT> def __init__(self, debug_logger: logging.Logger): <NEW_LINE> <INDENT> self.return_codes = [] <NEW_LINE> self.return_codes_frequencies = {} <NEW_LINE> self.return_codes_iterations = {int: []} <NEW_LINE> self.debug_logger = debug_logger <NEW_LINE> <DEDENT> def add_return_code(self, return_code: int): <NEW_LINE> <INDENT> self.return_codes.append(return_code) <NEW_LINE> <DEDENT> def print_summary(self): <NEW_LINE> <INDENT> self.__make_analysis() <NEW_LINE> self.debug_logger.debug('Printing summary of return codes') <NEW_LINE> print('Summary:') <NEW_LINE> for key in self.return_codes_frequencies: <NEW_LINE> <INDENT> print(f'Return code: {key};' f' Frequency: {self.return_codes_frequencies[key]};' f' Iterations: {self.return_codes_iterations[key]}') <NEW_LINE> <DEDENT> <DEDENT> def get_most_frequent(self): <NEW_LINE> <INDENT> return max(self.return_codes_frequencies, key=self.return_codes_frequencies.get) if self.return_codes_frequencies else 1 <NEW_LINE> <DEDENT> def summarize_and_exit(self): <NEW_LINE> <INDENT> self.print_summary() <NEW_LINE> sys.exit(self.get_most_frequent()) <NEW_LINE> <DEDENT> def __make_analysis(self): <NEW_LINE> <INDENT> self.debug_logger.debug('Analyzing frequency of return codes') <NEW_LINE> for index in range(len(self.return_codes)): <NEW_LINE> <INDENT> current_return_code = self.return_codes[index] <NEW_LINE> self.__aggregate_frequency(current_return_code) <NEW_LINE> self.__add_iteration(current_return_code, index) <NEW_LINE> <DEDENT> <DEDENT> def __aggregate_frequency(self, return_code): <NEW_LINE> <INDENT> if return_code in self.return_codes_frequencies: <NEW_LINE> <INDENT> self.return_codes_frequencies[return_code] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.return_codes_frequencies[return_code] = 1 <NEW_LINE> <DEDENT> <DEDENT> def __add_iteration(self, return_code, index): <NEW_LINE> <INDENT> if return_code in self.return_codes_iterations: <NEW_LINE> <INDENT> self.return_codes_iterations[return_code].append(index) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.return_codes_iterations[return_code] = [index] | Class holding functionality of summary of the
runner session.
Return codes of each command are aggregated.
A summary of frequencies and iterations
can be printed and most frequent return code returned. | 6259908571ff763f4b5e9307 |
class _SpinnakerRequestReadDataRequest(object): <NEW_LINE> <INDENT> def __init__(self, channel, region_id, start_address, space_to_be_read): <NEW_LINE> <INDENT> if not isinstance(channel, list): <NEW_LINE> <INDENT> self._channel = [channel] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._channel = channel <NEW_LINE> <DEDENT> if not isinstance(region_id, list): <NEW_LINE> <INDENT> self._region_id = [region_id] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._region_id = region_id <NEW_LINE> <DEDENT> if not isinstance(start_address, list): <NEW_LINE> <INDENT> self._start_address = [start_address] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._start_address = start_address <NEW_LINE> <DEDENT> if not isinstance(space_to_be_read, list): <NEW_LINE> <INDENT> self._space_to_be_read = [space_to_be_read] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._space_to_be_read = space_to_be_read <NEW_LINE> <DEDENT> <DEDENT> def channel(self, request_id): <NEW_LINE> <INDENT> if len(self._channel) > request_id: <NEW_LINE> <INDENT> return self._channel[request_id] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> SpinnmanInvalidParameterTypeException( "request_id", "integer", "channel request needs to be" "comprised between 0 and {0:d}; current value: " "{1:d}".format(len(self._channel) - 1, request_id)) <NEW_LINE> <DEDENT> <DEDENT> def region_id(self, request_id): <NEW_LINE> <INDENT> if len(self._region_id) > request_id: <NEW_LINE> <INDENT> return self._region_id[request_id] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> SpinnmanInvalidParameterTypeException( "request_id", "integer", "region id request needs to be" "comprised between 0 and {0:d}; current value: " "{1:d}".format(len(self._region_id) - 1, request_id)) <NEW_LINE> <DEDENT> <DEDENT> def start_address(self, request_id): <NEW_LINE> <INDENT> if len(self._start_address) > request_id: <NEW_LINE> <INDENT> return self._start_address[request_id] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> SpinnmanInvalidParameterTypeException( "request_id", "integer", "start address request needs to be" "comprised between 0 and {0:d}; current value: " "{1:d}".format(len(self._start_address) - 1, request_id)) <NEW_LINE> <DEDENT> <DEDENT> def space_to_be_read(self, request_id): <NEW_LINE> <INDENT> if len(self._space_to_be_read) > request_id: <NEW_LINE> <INDENT> return self._space_to_be_read[request_id] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> SpinnmanInvalidParameterTypeException( "request_id", "integer", "space to be read request needs to be" "comprised between 0 and {0:d}; current value: " "{1:d}".format(len(self._space_to_be_read) - 1, request_id)) | Contains a set of requests which refer to the channels used
| 625990857cff6e4e811b759c |
class BaseSerializer(object): <NEW_LINE> <INDENT> def __init__(self, activity_class): <NEW_LINE> <INDENT> self.activity_class = activity_class <NEW_LINE> <DEDENT> def check_type(self, data): <NEW_LINE> <INDENT> if not isinstance(data, Activity): <NEW_LINE> <INDENT> raise ValueError('we only know how to dump activities') <NEW_LINE> <DEDENT> <DEDENT> def loads(self, serialized_activity): <NEW_LINE> <INDENT> activity = serialized_activity <NEW_LINE> return activity <NEW_LINE> <DEDENT> def dumps(self, activity): <NEW_LINE> <INDENT> self.check_type(activity) <NEW_LINE> return activity | The base serializer class, only defines the signature for
loads and dumps
It serializes Activity objects | 625990855fc7496912d49018 |
class ComponentTestGitRepository(GitRepository): <NEW_LINE> <INDENT> def submodule_status(self): <NEW_LINE> <INDENT> out, err, ret = self._git_inout('submodule', ['status'], capture_stderr=True) <NEW_LINE> if ret: <NEW_LINE> <INDENT> raise GitRepositoryError("Cannot get submodule status: %s" % err.strip()) <NEW_LINE> <DEDENT> submodules = {} <NEW_LINE> for line in out.decode().splitlines(): <NEW_LINE> <INDENT> module = line.strip() <NEW_LINE> status = module[0] <NEW_LINE> if status == '-': <NEW_LINE> <INDENT> sha1, path = module[1:].rsplit(' ', 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> commitpath = module[1:].rsplit(' ', 1)[0] <NEW_LINE> sha1, path = commitpath.split(' ', 1) <NEW_LINE> <DEDENT> submodules[path] = (status, sha1) <NEW_LINE> <DEDENT> return submodules <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def check_testdata(cls, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> repo = cls('.') <NEW_LINE> <DEDENT> except GitRepositoryError: <NEW_LINE> <INDENT> raise SkipTest("Skipping '%s', since this is not a git checkout." % __name__) <NEW_LINE> <DEDENT> submodules = repo.submodule_status() <NEW_LINE> try: <NEW_LINE> <INDENT> status = submodules[data] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise SkipTest("Skipping '%s', testdata directory not a known " "submodule." % __name__) <NEW_LINE> <DEDENT> if status[0] == '-': <NEW_LINE> <INDENT> raise SkipTest("Skipping '%s', testdata directory not initialized. " "Consider doing 'git submodule update'" % __name__) <NEW_LINE> <DEDENT> <DEDENT> def ls_tree(self, treeish): <NEW_LINE> <INDENT> objs = self.list_tree(treeish, True) <NEW_LINE> blobs = [obj[3] for obj in objs if obj[1] == 'blob'] <NEW_LINE> return set(blobs) <NEW_LINE> <DEDENT> def get_head_author_subject(self): <NEW_LINE> <INDENT> out, err, ret = self._git_inout('format-patch', ['-1', '--stdout', '--subject-prefix='], capture_stderr=True) <NEW_LINE> if ret: <NEW_LINE> <INDENT> raise GitRepositoryError("Cannot get head author/subject: %s" % err.strip()) <NEW_LINE> <DEDENT> output = out.decode('utf-8') <NEW_LINE> for line in output.split('\n'): <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if not line: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if line.startswith('From:'): <NEW_LINE> <INDENT> author = line.replace('From:', '').strip() <NEW_LINE> <DEDENT> elif line.startswith('Subject:'): <NEW_LINE> <INDENT> subject = line.replace('Subject:', '').strip() <NEW_LINE> <DEDENT> <DEDENT> return author, subject | Git repository class for component tests | 62599085283ffb24f3cf53fb |
class Constraint(object): <NEW_LINE> <INDENT> def __call__(self, w): <NEW_LINE> <INDENT> return w <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> return {} | Constraint template | 62599085dc8b845886d55115 |
class SequencingChemistry (DataEntityType): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'SequencingChemistry') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/tmp/tmpHOdumhxsds/PacBioBaseDataModel.xsd', 473, 1) <NEW_LINE> _ElementMap = DataEntityType._ElementMap.copy() <NEW_LINE> _AttributeMap = DataEntityType._AttributeMap.copy() <NEW_LINE> __DyeSet = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'DyeSet'), 'DyeSet', '__httppacificbiosciences_comPacBioBaseDataModel_xsd_SequencingChemistry_httppacificbiosciences_comPacBioBaseDataModel_xsdDyeSet', False, pyxb.utils.utility.Location('/tmp/tmpHOdumhxsds/PacBioBaseDataModel.xsd', 480, 5), ) <NEW_LINE> DyeSet = property(__DyeSet.value, __DyeSet.set, None, None) <NEW_LINE> _ElementMap.update({ __DyeSet.name() : __DyeSet }) <NEW_LINE> _AttributeMap.update({ }) | A container for a set of analogs | 62599085d486a94d0ba2db12 |
class UserViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> queryset = User.objects.all() <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> lookup_field = "username" | This viewset automatically provides `list` and `detail` actions. | 6259908544b2445a339b770a |
class AeffServicePar: <NEW_LINE> <INDENT> def __init__(self,ebins,czbins,aeff_egy_par,aeff_coszen_par,**params): <NEW_LINE> <INDENT> logging.info('Initializing AeffServicePar...') <NEW_LINE> self.ebins = ebins <NEW_LINE> self.czbins = czbins <NEW_LINE> aeff_coszen_par_str = from_json(find_resource(aeff_coszen_par)) <NEW_LINE> aeff2d_nc = self.get_aeff_flavor('NC',aeff_egy_par,aeff_coszen_par_str) <NEW_LINE> aeff2d_nc_bar = self.get_aeff_flavor('NC_bar',aeff_egy_par,aeff_coszen_par_str) <NEW_LINE> self.aeff_dict = {} <NEW_LINE> logging.info("Creating effective area parametric dict...") <NEW_LINE> for flavor in ['nue','nue_bar','numu','numu_bar','nutau','nutau_bar']: <NEW_LINE> <INDENT> flavor_dict = {} <NEW_LINE> logging.debug("Working on %s effective areas"%flavor) <NEW_LINE> aeff2d = self.get_aeff_flavor(flavor,aeff_egy_par,aeff_coszen_par_str) <NEW_LINE> flavor_dict['cc'] = aeff2d <NEW_LINE> flavor_dict['nc'] = aeff2d_nc_bar if 'bar' in flavor else aeff2d_nc <NEW_LINE> self.aeff_dict[flavor] = flavor_dict <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def get_aeff_flavor(self,flavor,aeff_egy_par,aeff_coszen_par): <NEW_LINE> <INDENT> aeff_file = aeff_egy_par[flavor] <NEW_LINE> aeff_arr = np.loadtxt(open_resource(aeff_file)).T <NEW_LINE> aeff_func = interp1d(aeff_arr[0], aeff_arr[1], kind='linear', bounds_error=False, fill_value=0) <NEW_LINE> czcen = get_bin_centers(self.czbins) <NEW_LINE> ecen = get_bin_centers(self.ebins) <NEW_LINE> aeff1d = aeff_func(ecen) <NEW_LINE> if aeff1d[-1] == 0.0: aeff1d[-1] = aeff1d[-2] <NEW_LINE> aeff2d = np.reshape(np.repeat(aeff1d, len(czcen)), (len(ecen), len(czcen))) <NEW_LINE> cz_dep = eval(aeff_coszen_par[flavor.strip('_bar')])(czcen) <NEW_LINE> cz_dep *= len(cz_dep)/np.sum(cz_dep) <NEW_LINE> return (aeff2d*cz_dep) <NEW_LINE> <DEDENT> def get_aeff(self,*kwargs): <NEW_LINE> <INDENT> return self.aeff_dict | Takes a .json file with the names of .dat files, and
creates a dictionary of the 2D effective area in terms of energy
and coszen, for each flavor (nue,nue_bar,numu,...) and interaction
type (CC, NC)
The final aeff dict for each flavor is in units of [m^2] in each
energy/coszen bin. | 625990857c178a314d78e998 |
class ColorLevelFormatter(logging.Formatter, object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> colors = kwargs.pop('colors', None) <NEW_LINE> super(ColorLevelFormatter, self).__init__(*args, **kwargs) <NEW_LINE> if colors is not None: <NEW_LINE> <INDENT> self._level_color_mapping = dict(colors) <NEW_LINE> <DEDENT> <DEDENT> _level_color_mapping = { logging.DEBUG : 'magenta', logging.INFO : 'green', logging.WARN : 'yellow', logging.ERROR : 'red' } <NEW_LINE> def get_color(self, levelno): <NEW_LINE> <INDENT> if not hasattr(self, '_color_levels'): <NEW_LINE> <INDENT> self._color_levels = list(sorted(self._level_color_mapping.keys())) <NEW_LINE> <DEDENT> color = None <NEW_LINE> for color_level in self._color_levels: <NEW_LINE> <INDENT> if levelno >= color_level: <NEW_LINE> <INDENT> color = self._level_color_mapping[color_level] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return color <NEW_LINE> <DEDENT> def format(self, record): <NEW_LINE> <INDENT> color = self.get_color(record.levelno) <NEW_LINE> if color is not None: <NEW_LINE> <INDENT> record.clevelname = color_text(record.levelname, color) <NEW_LINE> record.cstart, record.cstop = color_text("=", color).split("=") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> record.clevelname = record.levelname <NEW_LINE> record.cstart, record.cstop = color_text("=", 'default').split("=") <NEW_LINE> <DEDENT> record.dstart, record.dstop = color_text("=", 'default').split("=") <NEW_LINE> return super(ColorLevelFormatter, self).format(record) | A formatter for colors. | 62599085ad47b63b2c5a93ae |
class BlogTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = Client() <NEW_LINE> <DEDENT> def _test_get(self, url, status_code, active_page=None, **kwargs): <NEW_LINE> <INDENT> response = self.client.get(url, **kwargs) <NEW_LINE> self.assertEqual(response.status_code, status_code) <NEW_LINE> if active_page: <NEW_LINE> <INDENT> self.assertEqual(response.context['activepage'], active_page) <NEW_LINE> <DEDENT> <DEDENT> def test_achievements(self): <NEW_LINE> <INDENT> self._test_get('/achievements/', 200, 'Achievements') <NEW_LINE> <DEDENT> def test_activities(self): <NEW_LINE> <INDENT> self._test_get('/activities/', 200, 'Activities') | Test blog app | 6259908566673b3332c31f5c |
class RandomSampleCrop(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.sample_options = ( None, ) <NEW_LINE> <DEDENT> def __call__(self, image): <NEW_LINE> <INDENT> height, width, _ = image.shape <NEW_LINE> while True: <NEW_LINE> <INDENT> mode = random.choice(self.sample_options) <NEW_LINE> if mode is None: <NEW_LINE> <INDENT> return image <NEW_LINE> <DEDENT> for _ in range(50): <NEW_LINE> <INDENT> current_image = image <NEW_LINE> w = random.uniform(0.5 * width, width) <NEW_LINE> h = random.uniform(0.5 * height, height) <NEW_LINE> if h / w < 0.5 or h / w > 2: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> left = random.uniform(width - w) <NEW_LINE> top = random.uniform(height - h) <NEW_LINE> rect = np.array([int(left), int(top), int(left+w), int(top+h)]) <NEW_LINE> current_image = current_image[rect[1]:rect[3], rect[0]:rect[2], :] <NEW_LINE> return current_image | Crop
Arguments:
img (Image): the image being input during training
boxes (Tensor): the original bounding boxes in pt form
labels (Tensor): the class labels for each bbox
mode (float tuple): the min and max jaccard overlaps
Return:
(img, boxes, classes)
img (Image): the cropped image
boxes (Tensor): the adjusted bounding boxes in pt form
labels (Tensor): the class labels for each bbox | 625990854c3428357761be17 |
class Checksum(ChecksumBase): <NEW_LINE> <INDENT> _check_result = None <NEW_LINE> _check_result_littleendian = None <NEW_LINE> def __init__(self, width, initvalue=0, byteorder='big'): <NEW_LINE> <INDENT> super(Checksum, self).__init__(initvalue, byteorder) <NEW_LINE> width = int(width) <NEW_LINE> if width <= 0 or width % 8 != 0: <NEW_LINE> <INDENT> raise ValueError("width must be postive and a multiple of 8") <NEW_LINE> <DEDENT> self._width = width <NEW_LINE> self._mask = (1 << width) - 1 | General additive checksum.
Args:
width (int): bit width of checksum. Must be positive and a multiple of 8.
initvalue (int): Initial value. If None then the default value for the class is used.
byteorder ('big' or 'little'): byte order (endianness) used when reading the input bytes. | 6259908550812a4eaa621973 |
class CheckForStrUnicodeExc(BaseASTChecker): <NEW_LINE> <INDENT> CHECK_DESC = ('N325 str() and unicode() cannot be used on an ' 'exception. Remove or use six.text_type()') <NEW_LINE> def __init__(self, tree, filename): <NEW_LINE> <INDENT> super(CheckForStrUnicodeExc, self).__init__(tree, filename) <NEW_LINE> self.name = [] <NEW_LINE> self.already_checked = [] <NEW_LINE> <DEDENT> def visit_TryExcept(self, node): <NEW_LINE> <INDENT> for handler in node.handlers: <NEW_LINE> <INDENT> if handler.name: <NEW_LINE> <INDENT> self.name.append(handler.name.id) <NEW_LINE> super(CheckForStrUnicodeExc, self).generic_visit(node) <NEW_LINE> self.name = self.name[:-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(CheckForStrUnicodeExc, self).generic_visit(node) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def visit_ExceptHandler(self, node): <NEW_LINE> <INDENT> if node.name: <NEW_LINE> <INDENT> self.name.append(node.name) <NEW_LINE> super(CheckForStrUnicodeExc, self).generic_visit(node) <NEW_LINE> self.name = self.name[:-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(CheckForStrUnicodeExc, self).generic_visit(node) <NEW_LINE> <DEDENT> <DEDENT> def visit_Call(self, node): <NEW_LINE> <INDENT> if self._check_call_names(node, ['str', 'unicode']): <NEW_LINE> <INDENT> if node not in self.already_checked: <NEW_LINE> <INDENT> self.already_checked.append(node) <NEW_LINE> if isinstance(node.args[0], ast.Name): <NEW_LINE> <INDENT> if node.args[0].id in self.name: <NEW_LINE> <INDENT> self.add_error(node.args[0]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> super(CheckForStrUnicodeExc, self).generic_visit(node) | Checks for the use of str() or unicode() on an exception.
This currently only handles the case where str() or unicode()
is used in the scope of an exception handler. If the exception
is passed into a function, returned from an assertRaises, or
used on an exception created in the same scope, this does not
catch it. | 62599085283ffb24f3cf53fd |
class FPSDisplay: <NEW_LINE> <INDENT> update_period = 0.25 <NEW_LINE> def __init__(self, window): <NEW_LINE> <INDENT> from time import time <NEW_LINE> from pyglet.text import Label <NEW_LINE> self.label = Label('', x=10, y=10, font_size=24, bold=True, color=(127, 127, 127, 127)) <NEW_LINE> self.window = window <NEW_LINE> self._window_flip = window.flip <NEW_LINE> window.flip = self._hook_flip <NEW_LINE> self.time = 0.0 <NEW_LINE> self.last_time = time() <NEW_LINE> self.count = 0 <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> from time import time <NEW_LINE> t = time() <NEW_LINE> self.count += 1 <NEW_LINE> self.time += t - self.last_time <NEW_LINE> self.last_time = t <NEW_LINE> if self.time >= self.update_period: <NEW_LINE> <INDENT> self.set_fps(self.count / self.time) <NEW_LINE> self.time %= self.update_period <NEW_LINE> self.count = 0 <NEW_LINE> <DEDENT> <DEDENT> def set_fps(self, fps): <NEW_LINE> <INDENT> self.label.text = '%.2f' % fps <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> gl.glMatrixMode(gl.GL_MODELVIEW) <NEW_LINE> gl.glPushMatrix() <NEW_LINE> gl.glLoadIdentity() <NEW_LINE> gl.glMatrixMode(gl.GL_PROJECTION) <NEW_LINE> gl.glPushMatrix() <NEW_LINE> gl.glLoadIdentity() <NEW_LINE> gl.glOrtho(0, self.window.width, 0, self.window.height, -1, 1) <NEW_LINE> self.label.draw() <NEW_LINE> gl.glPopMatrix() <NEW_LINE> gl.glMatrixMode(gl.GL_MODELVIEW) <NEW_LINE> gl.glPopMatrix() <NEW_LINE> <DEDENT> def _hook_flip(self): <NEW_LINE> <INDENT> self.update() <NEW_LINE> self._window_flip() | Display of a window's framerate.
This is a convenience class to aid in profiling and debugging. Typical
usage is to create an `FPSDisplay` for each window, and draw the display
at the end of the windows' :py:meth:`~pyglet.window.Window.on_draw` event handler::
window = pyglet.window.Window()
fps_display = FPSDisplay(window)
@window.event
def on_draw():
# ... perform ordinary window drawing operations ...
fps_display.draw()
The style and position of the display can be modified via the :py:func:`~pyglet.text.Label`
attribute. Different text can be substituted by overriding the
`set_fps` method. The display can be set to update more or less often
by setting the `update_period` attribute. Note: setting the `update_period`
to a value smaller than your Window refresh rate will cause inaccurate readings.
:Ivariables:
`label` : Label
The text label displaying the framerate. | 6259908599fddb7c1ca63b89 |
class RandomSummarizer(AbstractSummarizer): <NEW_LINE> <INDENT> def __call__(self, document, sentences_count): <NEW_LINE> <INDENT> sentences = document.sentences <NEW_LINE> ratings = self._get_random_ratings(sentences) <NEW_LINE> return self._get_best_sentences(sentences, sentences_count, ratings) <NEW_LINE> <DEDENT> def _get_random_ratings(self, sentences): <NEW_LINE> <INDENT> ratings = list(range(len(sentences))) <NEW_LINE> random.shuffle(ratings) <NEW_LINE> return dict((s, r) for s, r in zip(sentences, ratings)) | Summarizer that picks sentences randomly. | 62599085167d2b6e312b8345 |
class InvalidChecksumError(TChannelError): <NEW_LINE> <INDENT> pass | Represent invalid checksum type in the message | 625990854428ac0f6e65a08b |
class MobileRedirectMiddleware(object): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> domain = request.META.get('HTTP_HOST', '') <NEW_LINE> mobile_domain = settings.OPPS_DOMAIN_MOBILE <NEW_LINE> current_cookie = request.COOKIES.get('template_mode', None) <NEW_LINE> template_mode = request.GET.get('template_mode', None) <NEW_LINE> settings.TEMPLATE_DIRS = settings.TEMPLATE_DIRS_WEB <NEW_LINE> is_mobile_domain = domain == mobile_domain <NEW_LINE> if not template_mode and not current_cookie: <NEW_LINE> <INDENT> if is_mobile_domain: <NEW_LINE> <INDENT> template_mode = u'mobile' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> if is_mobile_domain and template_mode == u'desktop': <NEW_LINE> <INDENT> prot = settings.OPPS_PROTOCOL_WEB <NEW_LINE> web_domain = settings.OPPS_DOMAIN_WEB <NEW_LINE> url = u"{}://{}/?template_mode=desktop".format(prot, web_domain) <NEW_LINE> return HttpResponseRedirect(url) <NEW_LINE> <DEDENT> elif not is_mobile_domain and template_mode == u'mobile': <NEW_LINE> <INDENT> prot = settings.OPPS_PROTOCOL_MOBILE <NEW_LINE> url = u"{}://{}/?template_mode=mobile".format(prot, mobile_domain) <NEW_LINE> settings.CACHE_MIDDLEWARE_KEY_PREFIX = u"opps_site-{}-{}".format( settings.SITE_ID, random.getrandbits(32)) <NEW_LINE> return HttpResponseRedirect(url) <NEW_LINE> <DEDENT> request._resp_cookies = SimpleCookie() <NEW_LINE> request.set_cookie = MethodType(_set_cookie, request, HttpRequest) <NEW_LINE> request.delete_cookie = MethodType( _delete_cookie, request, HttpRequest ) <NEW_LINE> if template_mode: <NEW_LINE> <INDENT> request.set_cookie('template_mode', template_mode) <NEW_LINE> current_cookie = template_mode <NEW_LINE> <DEDENT> if current_cookie and current_cookie.strip().lower() == u"mobile": <NEW_LINE> <INDENT> settings.TEMPLATE_DIRS = settings.TEMPLATE_DIRS_MOBILE <NEW_LINE> <DEDENT> <DEDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> if hasattr(request, '_resp_cookies') and request._resp_cookies: <NEW_LINE> <INDENT> response.cookies.update(request._resp_cookies) <NEW_LINE> <DEDENT> return response | Allows setting and deleting of cookies from requests in exactly the same
way as responses.
request.set_cookie('name', 'value')
The set_cookie and delete_cookie are exactly the same as the ones built
into the Django HttpResponse class.
http://docs.djangoproject.com/en/dev/ref/request-response
/#django.http.HttpResponse.set_cookie | 625990852c8b7c6e89bd5343 |
class Experience(models.Model): <NEW_LINE> <INDENT> id = models.UUIDField(primary_key=True, editable=False, default=uuid.uuid4) <NEW_LINE> place = models.CharField('work place', max_length=120) <NEW_LINE> title = models.CharField(max_length=120) <NEW_LINE> description = models.TextField(blank=True) <NEW_LINE> start = models.IntegerField('start year') <NEW_LINE> end = models.IntegerField('end date', blank=True, null=True) <NEW_LINE> still_working = models.BooleanField(default=False) <NEW_LINE> is_published = models.BooleanField(default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('-still_working', '-end', '-start') <NEW_LINE> verbose_name = 'Work Experience' <NEW_LINE> verbose_name_plural = 'Work Experiences' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title | Work experience history. | 6259908555399d3f05628072 |
class FrozenLakeQlearningBrain( Brain ): <NEW_LINE> <INDENT> def __init__( self, n_states, n_actions, epsilon = 0.5, gamma = 0.9, learning_rate = 0.1 ): <NEW_LINE> <INDENT> super().__init__( n_states, n_actions ) <NEW_LINE> self._epsilon = epsilon <NEW_LINE> self._gamma = gamma <NEW_LINE> self._learning_rate = learning_rate <NEW_LINE> self._q_function = np.random.rand( n_states, n_actions ) <NEW_LINE> return <NEW_LINE> <DEDENT> def print( self, str ): <NEW_LINE> <INDENT> print( "----------------------------------" ) <NEW_LINE> print( "FrozenLakeQlearningBrain" ) <NEW_LINE> print( self ) <NEW_LINE> print( str ) <NEW_LINE> print( "_n_states : \n", self._n_states ) <NEW_LINE> print( "_n_actions : \n", self._n_actions ) <NEW_LINE> print( "_epsilon : \n", self._epsilon ) <NEW_LINE> print( "_gamma : \n", self._gamma ) <NEW_LINE> print( "_learning_rate : \n", self._learning_rate ) <NEW_LINE> print( "_q_function : \n", self._q_function ) <NEW_LINE> print( "----------------------------------" ) <NEW_LINE> return <NEW_LINE> <DEDENT> def get_q_function( self ): <NEW_LINE> <INDENT> return self._q_function <NEW_LINE> <DEDENT> def decay_epsilon( self ): <NEW_LINE> <INDENT> self._epsilon = self._epsilon / 2.0 <NEW_LINE> return <NEW_LINE> <DEDENT> def action( self, state ): <NEW_LINE> <INDENT> if( self._epsilon >= np.random.rand() ): <NEW_LINE> <INDENT> action = np.random.randint( self._n_actions ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> action = np.nanargmax( self._q_function[state, :] ) <NEW_LINE> <DEDENT> if( action == np.nan ): <NEW_LINE> <INDENT> action = 0 <NEW_LINE> <DEDENT> return action <NEW_LINE> <DEDENT> def update_q_function( self, state, action, next_state, reward ): <NEW_LINE> <INDENT> self._q_function[ state, action ] += self._learning_rate * ( reward + self._gamma * np.nanmax( self._q_function[ next_state, : ] ) - self._q_function[ state, action ] ) <NEW_LINE> return self._q_function | FrozenLake の Brain
・経路選択ロジックは、Q学習
[public]
[protected] 変数名の前にアンダースコア _ を付ける
_q_function : list<float> / shape = [n_states, n_actions]
行動状態関数 Q(s,a)
行を状態 s, 列を行動 a とする表形式表現
_epsilon : <float> ε-greedy 法の ε 値
_gamma : <float> 割引利得の γ 値
_learning_rate : <float> 学習率
[private] 変数名の前にダブルアンダースコア __ を付ける(Pythonルール) | 62599085bf627c535bcb3031 |
class QueryBuilder: <NEW_LINE> <INDENT> def __init__(self, dois = None): <NEW_LINE> <INDENT> self.__dois = dois <NEW_LINE> <DEDENT> @property <NEW_LINE> def dois(self): <NEW_LINE> <INDENT> return self.__dois <NEW_LINE> <DEDENT> @dois.setter <NEW_LINE> def dois(self, dois): <NEW_LINE> <INDENT> self.__dois = dois <NEW_LINE> <DEDENT> def build_query(self): <NEW_LINE> <INDENT> return 'search publications where {0} return publications [doi + title + open_access]' .format(self.__build_doi_section()) <NEW_LINE> <DEDENT> def __build_doi_section(self): <NEW_LINE> <INDENT> doi_section = 'doi="{0}"'.format(self.__dois[0]) <NEW_LINE> if len(self.__dois) > 1: <NEW_LINE> <INDENT> remaining_dois = self.__dois[1:len(self.__dois)] <NEW_LINE> for doi in remaining_dois: <NEW_LINE> <INDENT> doi_section = doi_section + ' or doi="{0}"'.format(doi) <NEW_LINE> <DEDENT> <DEDENT> return doi_section | Creates the query string to send to Dimensions, using their Domain Specific Language.
You can test the DSL here: https://app.dimensions.ai/dsl .
It is documented here: http://docs.dimensions.ai/dsl/1.5.0/
At present, for the bare-minimum release, the query searches publications by DOI, using an
or modifier to search for multiple publications. Only the doi, title and open_access fields are
returned. Only the open_access field is required for our initial research, but we shall use
the other two for testing / reconciliation purposes.
This class could become a place where the Dimensions API client could be comprehensively extended,
behaviours added to search different entities, in different ways, and return all sorts of different
fields. Hence finding a pattern (like a Decorator or something) to make it easier to extend in
future might be a worthwhile exercise in the next release beyond the bare minimum. | 6259908523849d37ff852c19 |
class MockRecvServer(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, port): <NEW_LINE> <INDENT> self._sock = socket.socket() <NEW_LINE> self._sock.bind(('localhost', port)) <NEW_LINE> self._buf = BytesIO() <NEW_LINE> threading.Thread.__init__(self) <NEW_LINE> self.start() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> s = self._sock <NEW_LINE> s.listen(1) <NEW_LINE> con, _ = s.accept() <NEW_LINE> while True: <NEW_LINE> <INDENT> d = con.recv(4096) <NEW_LINE> if not d: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self._buf.write(d) <NEW_LINE> <DEDENT> con.close() <NEW_LINE> s.close() <NEW_LINE> self._sock = None <NEW_LINE> <DEDENT> def wait(self): <NEW_LINE> <INDENT> while self._sock: <NEW_LINE> <INDENT> time.sleep(0.1) <NEW_LINE> <DEDENT> <DEDENT> def get_recieved(self): <NEW_LINE> <INDENT> self.wait() <NEW_LINE> self._buf.seek(0) <NEW_LINE> return list(Unpacker(self._buf, encoding='utf-8')) | Single threaded server accepts one connection and recv until EOF. | 62599085167d2b6e312b8346 |
class Unpool3DLayer(lasagne.layers.Layer): <NEW_LINE> <INDENT> def __init__(self, incoming, scale_factor, **kwargs): <NEW_LINE> <INDENT> super(Unpool3DLayer, self).__init__(incoming, **kwargs) <NEW_LINE> self.scale_factor = (scale_factor, scale_factor, scale_factor) <NEW_LINE> <DEDENT> def get_output_shape_for(self, input_shape): <NEW_LINE> <INDENT> output_shape = list(input_shape) <NEW_LINE> if output_shape[2] is not None: <NEW_LINE> <INDENT> output_shape[2] *= self.scale_factor[0] <NEW_LINE> <DEDENT> if output_shape[3] is not None: <NEW_LINE> <INDENT> output_shape[3] *= self.scale_factor[1] <NEW_LINE> <DEDENT> if output_shape[4] is not None: <NEW_LINE> <INDENT> output_shape[4] *= self.scale_factor[2] <NEW_LINE> <DEDENT> <DEDENT> def get_output_for(self, input, **kwargs): <NEW_LINE> <INDENT> a = self.scale_factor[0] <NEW_LINE> s0,s1,s2,_,_ = input.shape <NEW_LINE> upscaled = T.zeros(shape=(s0,s1,s2*a,s2*a,s2*a), dtype=theano.config.floatX) <NEW_LINE> indices = [x * a + a/2 for x in T.mgrid[0:s2,0:s2,0:s2]] <NEW_LINE> return T.set_subtensor(upscaled[:,:,indices[0],indices[1],indices[2]], input) | please use the lasagne new API
Note that this implementation: [1,2] --> [0,1,0,2], however the lasagne new Unpool3DLayer_new API get [1,0,2,0]
based on the Upscale2DLayer
------------
usages:
>>> x = T.tensor5()
>>> p = T.iscalar() # integer
>>> l_in = InputLayer((None,)*5, input_var=x)
>>> l1 = Unpool3DLayer(l_in, scale_factor=p)
>>> y = get_output(l1)
>>> f = theano.function([x, p], y)
>>> x_test = np.reshape(np.arange(2*2*2).astype('float32'), (1,1,2,2,2))
>>> f(x_test, 2) # doctest: +ELLIPSIS
array([[[[[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.]],
<BLANKLINE>
[[ 0., 0., 0., 0.],
[ 0., 0., 0., 1.],
[ 0., 0., 0., 0.],
[ 0., 2., 0., 3.]],
<BLANKLINE>
[[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 0., 0.]],
<BLANKLINE>
[[ 0., 0., 0., 0.],
[ 0., 4., 0., 5.],
[ 0., 0., 0., 0.],
[ 0., 6., 0., 7.]]]]], dtype=float32) | 625990852c8b7c6e89bd5345 |
class ShowIsisSegmentRoutingPrefixSidMapSchema(MetaParser): <NEW_LINE> <INDENT> schema = { 'process_id': { Any(): { 'policy': { Any(): { 'sid': { Any(): { 'prefix': str, 'range': int, Optional('flags'): str, }, }, 'number_of_mapping_entries': int, }, } }, } } | Schema for:
* show isis segment-routing prefix-sid-map active-policy
* show isis segment-routing prefix-sid-map backup-policy | 62599085283ffb24f3cf5400 |
class AttrDict(dict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(AttrDict, self).__init__(*args, **kwargs) <NEW_LINE> self.__dict__ = self <NEW_LINE> for key, value in self.__dict__.items(): <NEW_LINE> <INDENT> if isinstance(value, dict): <NEW_LINE> <INDENT> self.__dict__[key] = AttrDict(value) <NEW_LINE> <DEDENT> elif isinstance(value, (list, tuple)): <NEW_LINE> <INDENT> if isinstance(value[0], dict): <NEW_LINE> <INDENT> self.__dict__[key] = [AttrDict(item) for item in value] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__dict__[key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def yaml(self): <NEW_LINE> <INDENT> yaml_dict = {} <NEW_LINE> for key, value in self.__dict__.items(): <NEW_LINE> <INDENT> if isinstance(value, AttrDict): <NEW_LINE> <INDENT> yaml_dict[key] = value.yaml() <NEW_LINE> <DEDENT> elif isinstance(value, list): <NEW_LINE> <INDENT> if isinstance(value[0], AttrDict): <NEW_LINE> <INDENT> new_l = [] <NEW_LINE> for item in value: <NEW_LINE> <INDENT> new_l.append(item.yaml()) <NEW_LINE> <DEDENT> yaml_dict[key] = new_l <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yaml_dict[key] = value <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> yaml_dict[key] = value <NEW_LINE> <DEDENT> <DEDENT> return yaml_dict <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> ret_str = [] <NEW_LINE> for key, value in self.__dict__.items(): <NEW_LINE> <INDENT> if isinstance(value, AttrDict): <NEW_LINE> <INDENT> ret_str.append('{}:'.format(key)) <NEW_LINE> child_ret_str = value.__repr__().split('\n') <NEW_LINE> for item in child_ret_str: <NEW_LINE> <INDENT> ret_str.append(' ' + item) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(value, list): <NEW_LINE> <INDENT> if isinstance(value[0], AttrDict): <NEW_LINE> <INDENT> ret_str.append('{}:'.format(key)) <NEW_LINE> for item in value: <NEW_LINE> <INDENT> child_ret_str = item.__repr__().split('\n') <NEW_LINE> for item in child_ret_str: <NEW_LINE> <INDENT> ret_str.append(' ' + item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ret_str.append('{}: {}'.format(key, value)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ret_str.append('{}: {}'.format(key, value)) <NEW_LINE> <DEDENT> <DEDENT> return '\n'.join(ret_str) | Dict as attribute trick. | 62599085d486a94d0ba2db16 |
class Payment(models.Model): <NEW_LINE> <INDENT> customer=models.ForeignKey('Customer',verbose_name='缴费学员',related_name='payment',on_delete=models.CASCADE) <NEW_LINE> course=models.ForeignKey('Course',verbose_name='所报课程',related_name='payment',on_delete=models.CASCADE) <NEW_LINE> amount=models.PositiveIntegerField(verbose_name='数额',default=500) <NEW_LINE> consultant=models.ForeignKey('UserProfile',verbose_name='课程顾问',related_name='payment',on_delete=models.CASCADE) <NEW_LINE> date=models.DateTimeField("交款日期",auto_now_add=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '%s %s'%(self.customer,self.amount) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name='缴费记录' <NEW_LINE> verbose_name_plural='缴费记录' | 缴费记录 | 625990855fcc89381b266f0d |
class Graph: <NEW_LINE> <INDENT> def __init__(self, **kw_args): <NEW_LINE> <INDENT> super().__init__(**kw_args) <NEW_LINE> self._vertices = set() <NEW_LINE> self._pred = dict() <NEW_LINE> self._succ = dict() <NEW_LINE> <DEDENT> def vertices(self): <NEW_LINE> <INDENT> return iter(self._vertices) <NEW_LINE> <DEDENT> def succ(self, vertex): <NEW_LINE> <INDENT> return iter(self._succ[vertex]) <NEW_LINE> <DEDENT> def pred(self, vertex): <NEW_LINE> <INDENT> return iter(self._pred[vertex]) <NEW_LINE> <DEDENT> def add(self, vertex): <NEW_LINE> <INDENT> if vertex in self._vertices: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._vertices.add(vertex) <NEW_LINE> self._pred[vertex] = set() <NEW_LINE> self._succ[vertex] = list() <NEW_LINE> <DEDENT> def remove(self, vertex): <NEW_LINE> <INDENT> removed = set() <NEW_LINE> for pred in self.pred(vertex): <NEW_LINE> <INDENT> removed.add(pred) <NEW_LINE> <DEDENT> for pred in removed: <NEW_LINE> <INDENT> self.disconnect(pred, vertex) <NEW_LINE> <DEDENT> removed = set() <NEW_LINE> for succ in self.succ(vertex): <NEW_LINE> <INDENT> removed.add(succ) <NEW_LINE> <DEDENT> for succ in removed: <NEW_LINE> <INDENT> self.disconnect(vertex, succ) <NEW_LINE> <DEDENT> self._vertices.remove(vertex) <NEW_LINE> del self._pred[vertex] <NEW_LINE> del self._succ[vertex] <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self._vertices.clear() <NEW_LINE> self._pred.clear() <NEW_LINE> self._succ.clear() <NEW_LINE> <DEDENT> def connect(self, pred, succ): <NEW_LINE> <INDENT> if succ not in self._succ[pred]: <NEW_LINE> <INDENT> self._succ[pred].append(succ) <NEW_LINE> <DEDENT> self._pred[succ].add(pred) <NEW_LINE> <DEDENT> def disconnect(self, pred, succ): <NEW_LINE> <INDENT> self._succ[pred].remove(succ) <NEW_LINE> self._pred[succ].remove(pred) <NEW_LINE> <DEDENT> def in_degree(self, vertex): <NEW_LINE> <INDENT> return len(self._pred[vertex]) <NEW_LINE> <DEDENT> def out_degree(self, vertex): <NEW_LINE> <INDENT> return len(self._succ[vertex]) <NEW_LINE> <DEDENT> def num_vertices(self): <NEW_LINE> <INDENT> return len(self._vertices) <NEW_LINE> <DEDENT> gp_vertices = vertices <NEW_LINE> gp_succ = succ <NEW_LINE> gp_add = add <NEW_LINE> gp_remove = remove <NEW_LINE> gp_connect = connect <NEW_LINE> gp_disconnect = disconnect <NEW_LINE> gp_pred = pred <NEW_LINE> gp_clear = clear <NEW_LINE> gp_in_degree = in_degree <NEW_LINE> gp_out_degree = out_degree <NEW_LINE> gp_num_vertices = num_vertices | Basic graph class.
This is an implementation of the graph protocol defined at
http://pycog.codiecodemonkey.com/graph-protocol.html. | 62599085aad79263cf43031a |
class Manager(object, metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> verify_attribute(self, "options", dict) <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def run(self, options, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError() | Base class of all managers
| 62599085099cdd3c636761aa |
class TestDeleteCategory: <NEW_LINE> <INDENT> def test_not_authenticated(self, client, mock_category): <NEW_LINE> <INDENT> assert client.delete(f'/api/category/{mock_category.id}/delete').status_code == 401 <NEW_LINE> <DEDENT> def test_unauthorized(self, client, fake_auth, mock_category): <NEW_LINE> <INDENT> student = User.find_by_canvas_user_id(8765432) <NEW_LINE> fake_auth.login(student.id) <NEW_LINE> assert client.delete(f'/api/category/{mock_category.id}/delete').status_code == 401 <NEW_LINE> <DEDENT> def test_delete_group(self, client, fake_auth, mock_category): <NEW_LINE> <INDENT> teacher = User.find_by_canvas_user_id(9876543) <NEW_LINE> fake_auth.login(teacher.id) <NEW_LINE> assert client.delete(f'/api/category/{mock_category.id}/delete').status_code == 200 <NEW_LINE> assert client.get(f'/api/category/{mock_category.id}').status_code == 404 | Delete Category API. | 625990857c178a314d78e99a |
class Losses: <NEW_LINE> <INDENT> losses = [] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.losses = [] <NEW_LINE> <DEDENT> def addLoss(self, name, formula, **kwargs): <NEW_LINE> <INDENT> self.losses.append(Compound(name=name, formula=formula)) <NEW_LINE> <DEDENT> def hasLoss(self): <NEW_LINE> <INDENT> return True if len(self.losses) > 0 else False <NEW_LINE> <DEDENT> def getLosses(self): <NEW_LINE> <INDENT> for x in self.losses: <NEW_LINE> <INDENT> yield x | An abstract class containing attributes and methods dealing with neutral
losses of a compound.
Attributes:
losses (list): List of Compounds describing a neutral loss of a part of
the molecule.
Arguments:
None | 62599085aad79263cf43031b |
class ClientSSHTransportDHGroupExchangeBaseCase(ClientSSHTransportBaseCase): <NEW_LINE> <INDENT> def test_KEXINIT_groupexchange(self): <NEW_LINE> <INDENT> self.proto.supportedKeyExchanges = [self.kexAlgorithm] <NEW_LINE> self.proto.dataReceived(self.transport.value()) <NEW_LINE> self.assertEqual(self.packets, [( transport.MSG_KEX_DH_GEX_REQUEST, b'\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00\x20\x00')]) <NEW_LINE> <DEDENT> def test_KEX_DH_GEX_GROUP(self): <NEW_LINE> <INDENT> self.test_KEXINIT_groupexchange() <NEW_LINE> self.proto.ssh_KEX_DH_GEX_GROUP( b'\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02') <NEW_LINE> self.assertEqual(self.proto.p, 15) <NEW_LINE> self.assertEqual(self.proto.g, 2) <NEW_LINE> self.assertEqual(common.MP(self.proto.x)[5:], b'\x99' * 40) <NEW_LINE> self.assertEqual(self.proto.e, common.MP(pow(2, self.proto.x, 15))) <NEW_LINE> self.assertEqual(self.packets[1:], [(transport.MSG_KEX_DH_GEX_INIT, self.proto.e)]) <NEW_LINE> <DEDENT> def test_KEX_DH_GEX_REPLY(self): <NEW_LINE> <INDENT> self.test_KEX_DH_GEX_GROUP() <NEW_LINE> sharedSecret = common._MPpow(3, self.proto.x, self.proto.p) <NEW_LINE> h = self.hashProcessor() <NEW_LINE> h.update(common.NS(self.proto.ourVersionString) * 2) <NEW_LINE> h.update(common.NS(self.proto.ourKexInitPayload) * 2) <NEW_LINE> h.update(common.NS(self.blob)) <NEW_LINE> h.update(b'\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00\x20\x00') <NEW_LINE> h.update(b'\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02') <NEW_LINE> h.update(self.proto.e) <NEW_LINE> h.update(b'\x00\x00\x00\x01\x03') <NEW_LINE> h.update(sharedSecret) <NEW_LINE> exchangeHash = h.digest() <NEW_LINE> def _cbTestKEX_DH_GEX_REPLY(value): <NEW_LINE> <INDENT> self.assertIs(value, None) <NEW_LINE> self.assertEqual(self.calledVerifyHostKey, True) <NEW_LINE> self.assertEqual(self.proto.sessionID, exchangeHash) <NEW_LINE> <DEDENT> signature = self.privObj.sign(exchangeHash) <NEW_LINE> d = self.proto.ssh_KEX_DH_GEX_REPLY( common.NS(self.blob) + b'\x00\x00\x00\x01\x03' + common.NS(signature)) <NEW_LINE> d.addCallback(_cbTestKEX_DH_GEX_REPLY) <NEW_LINE> return d <NEW_LINE> <DEDENT> def test_disconnectGEX_REPLYBadSignature(self): <NEW_LINE> <INDENT> self.test_KEX_DH_GEX_REPLY() <NEW_LINE> self.proto._continueGEX_REPLY(None, self.blob, 3, b"bad signature") <NEW_LINE> self.checkDisconnected(transport.DISCONNECT_KEY_EXCHANGE_FAILED) | Diffie-Hellman group exchange tests for SSHClientTransport. | 62599085283ffb24f3cf5401 |
class Velocity: <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> def __call__(self, oldValue, dt, index=None): <NEW_LINE> <INDENT> if index is None: <NEW_LINE> <INDENT> return oldValue + self.value * dt <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return oldValue + self.value[index] * dt <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, i): <NEW_LINE> <INDENT> return self.value[i] <NEW_LINE> <DEDENT> def __setitem__(self, i, v): <NEW_LINE> <INDENT> self.value[i] = v | A class that can be used as an integration function for Value and Vector.
The given velocity should match the data type and number of elements.
The velocity vector contents can be accessed transparently from this class. | 625990854527f215b58eb750 |
class Order: <NEW_LINE> <INDENT> def __init__(self, user_name: str, city: str, postoffice: int, items: list, vehicle=None): <NEW_LINE> <INDENT> self.orderId = randint(100000000, 999999999) <NEW_LINE> self.user_name = user_name <NEW_LINE> self.location = Location(city, postoffice) <NEW_LINE> self.items = items <NEW_LINE> self.vehicle = vehicle <NEW_LINE> print(f"Your order number is {self.orderId}.") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"Order: {[item.__str__() for item in self.items]}" <NEW_LINE> <DEDENT> def calculateAmount(self): <NEW_LINE> <INDENT> return sum(item.price for item in self.items) <NEW_LINE> <DEDENT> def assignVehicle(self, vehicle: Vehicle): <NEW_LINE> <INDENT> self.vehicle = vehicle | Order class which contains all info about order and user | 6259908597e22403b383ca59 |
class Project(models.GitHubCore): <NEW_LINE> <INDENT> CUSTOM_HEADERS = {"Accept": "application/vnd.github.inertia-preview+json"} <NEW_LINE> def _update_attributes(self, project): <NEW_LINE> <INDENT> self._api = project["url"] <NEW_LINE> self.body = project["body"] <NEW_LINE> self.created_at = self._strptime(project["created_at"]) <NEW_LINE> self.creator = users.ShortUser(project["creator"], self) <NEW_LINE> self.id = project["id"] <NEW_LINE> self.name = project["name"] <NEW_LINE> self.number = project["number"] <NEW_LINE> self.owner_url = project["owner_url"] <NEW_LINE> self.updated_at = self._strptime(project["updated_at"]) <NEW_LINE> <DEDENT> def _repr(self): <NEW_LINE> <INDENT> return "<Project [#{0}]>".format(self.id) <NEW_LINE> <DEDENT> def column(self, id): <NEW_LINE> <INDENT> url = self._build_url("projects", "columns", str(id)) <NEW_LINE> json = self._json(self._get(url, headers=Project.CUSTOM_HEADERS), 200) <NEW_LINE> return self._instance_or_null(ProjectColumn, json) <NEW_LINE> <DEDENT> def columns(self, number=-1, etag=None): <NEW_LINE> <INDENT> url = self._build_url("projects", str(self.id), "columns") <NEW_LINE> return self._iter( int(number), url, ProjectColumn, headers=Project.CUSTOM_HEADERS, etag=etag, ) <NEW_LINE> <DEDENT> @requires_auth <NEW_LINE> def create_column(self, name): <NEW_LINE> <INDENT> url = self._build_url("columns", base_url=self._api) <NEW_LINE> json = None <NEW_LINE> if name: <NEW_LINE> <INDENT> json = self._json( self._post( url, data={"name": name}, headers=Project.CUSTOM_HEADERS ), 201, ) <NEW_LINE> <DEDENT> return self._instance_or_null(ProjectColumn, json) <NEW_LINE> <DEDENT> @requires_auth <NEW_LINE> def delete(self): <NEW_LINE> <INDENT> return self._boolean( self._delete(self._api, headers=self.CUSTOM_HEADERS), 204, 404 ) <NEW_LINE> <DEDENT> @requires_auth <NEW_LINE> def update(self, name=None, body=None): <NEW_LINE> <INDENT> data = {"name": name, "body": body} <NEW_LINE> json = None <NEW_LINE> self._remove_none(data) <NEW_LINE> if data: <NEW_LINE> <INDENT> json = self._json( self._patch( self._api, data=dumps(data), headers=self.CUSTOM_HEADERS ), 200, ) <NEW_LINE> <DEDENT> if json: <NEW_LINE> <INDENT> self._update_attributes(json) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False | Object representing a single project from the API.
See http://developer.github.com/v3/projects/ for more details.
.. attribute:: body
The Markdown formatted text describing the project.
.. attribute:: created_at
A :class:`~datetime.datetime` representing the date and time when
this project was created.
.. attribute:: creator
A :class:`~github3.users.ShortUser` instance representing the user who
created this project.
.. attribute:: id
The unique identifier for this project on GitHub.
.. attribute:: name
The name given to this project.
.. attribute:: number
The repository-local identifier of this project.
.. attribute:: owner_url
The URL of the resource in the API of the owning resource - either
a repository or an organization.
.. attribute:: updated_at
A :class:`~datetime.datetime` representing the date and time when
this project was last updated. | 625990852c8b7c6e89bd5347 |
class SearchArgs(ma.Schema): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> strict = True <NEW_LINE> <DEDENT> name = Str(missing=None, strict=True) <NEW_LINE> exact = Bool(missing=False, strict=True, required=False) <NEW_LINE> @validates_schema(pass_original=True) <NEW_LINE> def check_unknown_fields(self, data, original_data): <NEW_LINE> <INDENT> unknown = set(original_data) - set(self.fields) <NEW_LINE> if len(unknown) > 0: <NEW_LINE> <INDENT> raise BadRequestKeyError("Unknown field {}".format(unknown)) | Read in arguments for searching | 62599085dc8b845886d5511b |
class Folder(models.Model, Serializer): <NEW_LINE> <INDENT> title = models.CharField(max_length=80) <NEW_LINE> user = models.ForeignKey( User, related_name='folders', on_delete=models.CASCADE ) <NEW_LINE> parent = models.ForeignKey( 'self', related_name='subfolders', null=True, blank=True, on_delete=models.CASCADE ) <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated_at = models.DateTimeField(auto_now=True, null=True) <NEW_LINE> dict_fields = ['id', 'title', 'parent_id', 'created_at', 'updated_at'] <NEW_LINE> def clean(self): <NEW_LINE> <INDENT> if self.title == '': <NEW_LINE> <INDENT> raise ValidationError('title cannot be empty') <NEW_LINE> <DEDENT> if len(self.title) > 80: <NEW_LINE> <INDENT> raise ValidationError('title is too long') <NEW_LINE> <DEDENT> <DEDENT> def full_save(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.full_clean() <NEW_LINE> <DEDENT> except ValidationError as e: <NEW_LINE> <INDENT> raise BadInput(', '.join(e.messages)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.save() <NEW_LINE> <DEDENT> except IntegrityError: <NEW_LINE> <INDENT> raise BadInput('failed to save the object') <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Folder ID%d' % self.id | Folder is a container for notepads or other folders | 625990857cff6e4e811b75a4 |
class Square: <NEW_LINE> <INDENT> def __init__(self, size=0): <NEW_LINE> <INDENT> if isinstance(size, int): <NEW_LINE> <INDENT> if size < 0: <NEW_LINE> <INDENT> raise ValueError("size must be >= 0") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__size = size <NEW_LINE> <DEDENT> <DEDENT> elif not isinstance(size, int): <NEW_LINE> <INDENT> raise TypeError("size must be an integer") | Sq Class defines sq by size, a private instance attribute | 62599085aad79263cf43031c |
class TestCreateLock(unittest.TestCase): <NEW_LINE> <INDENT> def test_create_lock(self): <NEW_LINE> <INDENT> xml = creator.create_lock() <NEW_LINE> self.assertEqual(xml, '<lockinfo xmlns="DAV:"><lockscope>' '<exclusive /></lockscope><locktype><write />' '</locktype></lockinfo>') <NEW_LINE> <DEDENT> def test_create_illegal_scope(self): <NEW_LINE> <INDENT> self.assertRaises( ValueError, creator.create_lock, scope="everything" ) <NEW_LINE> <DEDENT> def test_create_lock_owner(self): <NEW_LINE> <INDENT> xml = creator.create_lock(owner="me") <NEW_LINE> self.assertEqual(xml, '<lockinfo xmlns="DAV:"><lockscope><exclusive />' '</lockscope><locktype><write /></locktype>' '<owner>me</owner></lockinfo>') <NEW_LINE> <DEDENT> def test_create_lock_owner_element(self): <NEW_LINE> <INDENT> owner = Element("name") <NEW_LINE> owner.text = "me" <NEW_LINE> xml = creator.create_lock(owner=owner) <NEW_LINE> self.assertEqual(xml, '<lockinfo xmlns="DAV:"><lockscope><exclusive />' '</lockscope><locktype><write /></locktype>' '<owner><name>me</name></owner></lockinfo>') | Test creator.create_lock function. | 62599085be7bc26dc9252c07 |
class App: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.root = build_root_window() <NEW_LINE> self.time_label = tk.Label(text="", font=( "Mono", 18), bg="black", fg="orange") <NEW_LINE> self.time_label.pack() <NEW_LINE> Hovertip(self.time_label, date.today()) <NEW_LINE> self.root.geometry("+20+20") <NEW_LINE> self.root.bind("<Escape>", lambda x: self.root.destroy()) <NEW_LINE> self.update_time() <NEW_LINE> self.root.mainloop() <NEW_LINE> <DEDENT> def update_time(self): <NEW_LINE> <INDENT> self.time_label.configure(text=time.strftime("%H:%M:%S")) <NEW_LINE> self.root.after(1000, self.update_time) | Application object for the clock. | 62599085aad79263cf43031d |
class TFChainTypeFactory: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._transaction_factory = TransactionFactory() <NEW_LINE> self._fulfillment_factory = FulfillmentFactory() <NEW_LINE> self._condition_factory = ConditionFactory() <NEW_LINE> self._threebot_types_factory = ThreeBotTypesFactory() <NEW_LINE> <DEDENT> @property <NEW_LINE> def transactions(self): <NEW_LINE> <INDENT> return self._transaction_factory <NEW_LINE> <DEDENT> @property <NEW_LINE> def fulfillments(self): <NEW_LINE> <INDENT> return self._fulfillment_factory <NEW_LINE> <DEDENT> @property <NEW_LINE> def conditions(self): <NEW_LINE> <INDENT> return self._condition_factory <NEW_LINE> <DEDENT> @property <NEW_LINE> def threebot(self): <NEW_LINE> <INDENT> return self._threebot_types_factory <NEW_LINE> <DEDENT> def currency_new(self, value=0): <NEW_LINE> <INDENT> return Currency(value=value) <NEW_LINE> <DEDENT> def blockstake_new(self, value=0): <NEW_LINE> <INDENT> return Blockstake(value=value) <NEW_LINE> <DEDENT> def hash_new(self, value=None): <NEW_LINE> <INDENT> return Hash(value=value) <NEW_LINE> <DEDENT> def binary_data_new(self, value=None, fixed_size=None, strencoding=None): <NEW_LINE> <INDENT> return BinaryData(value=value, fixed_size=fixed_size, strencoding=strencoding) <NEW_LINE> <DEDENT> def public_key_new(self, hash=None): <NEW_LINE> <INDENT> if not hash: <NEW_LINE> <INDENT> return PublicKey() <NEW_LINE> <DEDENT> return PublicKey(specifier=PublicKeySpecifier.ED25519, hash=hash) <NEW_LINE> <DEDENT> def public_key_from_json(self, obj): <NEW_LINE> <INDENT> return PublicKey.from_json(obj) <NEW_LINE> <DEDENT> def merkle_tree_new(self): <NEW_LINE> <INDENT> return Tree(hash_func=lambda o: bytes.fromhex(blake2_string(o))) | TFChain Types Factory class | 62599085a8370b77170f1f2f |
class UserChangeForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ['username', 'email'] | Форма для обновления данных пользователей. Нужна только для того, чтобы не
видеть постоянных ошибок "Не заполнено поле password" при обновлении данных
пользователя. | 62599085283ffb24f3cf5404 |
class TestOuterString(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testOuterString(self): <NEW_LINE> <INDENT> pass | OuterString unit test stubs | 62599085d8ef3951e32c8c10 |
class WorkspaceForm(BorgModelForm): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(WorkspaceForm, self).__init__(*args, **kwargs) <NEW_LINE> if 'instance' in kwargs and kwargs['instance'] and kwargs['instance'].pk: <NEW_LINE> <INDENT> self.fields['name'].widget.attrs['readonly'] = True <NEW_LINE> self.fields['publish_channel'].widget = self.fields['publish_channel'].widget.widget <NEW_LINE> self.fields['publish_channel'].widget.attrs['readonly'] = True <NEW_LINE> <DEDENT> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Workspace <NEW_LINE> fields = "__all__" <NEW_LINE> widgets = { 'publish_channel': BorgSelect(), } | A form for Workspace Model | 625990855fcc89381b266f0f |
class GenericIdentityProvider(IdentityProvider): <NEW_LINE> <INDENT> def fetch_pubkey(self, euid): <NEW_LINE> <INDENT> eics = EICs.fetch(euid, PUBLIC_ID_SYMKEY, self.storage_providers) | Implements an access provider solely tasked with unlocking
identities. | 62599085d486a94d0ba2db1a |
class Terminal: <NEW_LINE> <INDENT> def __init__(self, top_x, top_y, width, height, font_path, font_color, font_size, background_color, destination): <NEW_LINE> <INDENT> self.top_x = top_x <NEW_LINE> self.top_y = top_y <NEW_LINE> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.font_path = font_path <NEW_LINE> self.font_color = font_color <NEW_LINE> self.font_size = font_size <NEW_LINE> self.background_color = background_color <NEW_LINE> self.destination = destination <NEW_LINE> self.terminal_rect = [self.top_x, self.top_y, self.width, self.height] <NEW_LINE> self.score_content_pos = [self.top_x + 5, self.top_y + 5] <NEW_LINE> self.lives_content_pos = [self.top_x + self.width - 80, self.top_y + 5] <NEW_LINE> self.font = None <NEW_LINE> self.is_updated = True <NEW_LINE> self.score_content = 'SCORE: 0' <NEW_LINE> self.lives_content = 'LIVES: 3' <NEW_LINE> self.game_over_content = '' <NEW_LINE> self.load_font() <NEW_LINE> <DEDENT> def load_font(self): <NEW_LINE> <INDENT> self.font = pygame.font.Font(self.font_path, self.font_size) <NEW_LINE> <DEDENT> def render_text(self, text): <NEW_LINE> <INDENT> return self.font.render(text, True, self.font_color) <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> if self.is_updated: <NEW_LINE> <INDENT> pygame.draw.rect(self.destination, self.background_color, self.terminal_rect) <NEW_LINE> if self.game_over_content: <NEW_LINE> <INDENT> self.destination.blit(self.render_text(self.game_over_content), self.score_content_pos) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.destination.blit(self.render_text(self.score_content), self.score_content_pos) <NEW_LINE> self.destination.blit(self.render_text(self.lives_content), self.lives_content_pos) <NEW_LINE> <DEDENT> pygame.display.update(self.terminal_rect) <NEW_LINE> self.is_updated = False <NEW_LINE> <DEDENT> <DEDENT> def update_score(self, score): <NEW_LINE> <INDENT> self.score_content = 'SCORE: ' + str(score) <NEW_LINE> self.is_updated = True <NEW_LINE> <DEDENT> def update_lives(self, num_of_lives): <NEW_LINE> <INDENT> self.lives_content = 'LIVES: ' + str(num_of_lives) <NEW_LINE> self.is_updated = True <NEW_LINE> <DEDENT> def create_game_over_message(self, score): <NEW_LINE> <INDENT> self.game_over_content = ('Game is over! Your score is {}.' + '(Ctrl + N for new Game).').format(score) <NEW_LINE> self.is_updated = True <NEW_LINE> <DEDENT> def reset_terminal(self): <NEW_LINE> <INDENT> self.score_content = 'SCORE: 0' <NEW_LINE> self.lives_content = 'LIVES: 3' <NEW_LINE> self.game_over_content = '' <NEW_LINE> self.is_updated = True | This class is responsible for the functionality of the message terminal
on the bottom of the play screen (or anywhere you put it). | 62599085ad47b63b2c5a93b6 |
class TestAmenity(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.amenity = Amenity() <NEW_LINE> cls.amenity.name = "Breakfast" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def teardown(cls): <NEW_LINE> <INDENT> del cls.amenity <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove("file.json") <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_pep8_Amenity(self): <NEW_LINE> <INDENT> style = pep8.StyleGuide(quiet=True) <NEW_LINE> p = style.check_files(['models/amenity.py']) <NEW_LINE> self.assertEqual(p.total_errors, 0, "fix pep8") <NEW_LINE> <DEDENT> def test_checking_for_docstring_Amenity(self): <NEW_LINE> <INDENT> self.assertIsNotNone(Amenity.__doc__) <NEW_LINE> <DEDENT> def test_attributes_Amenity(self): <NEW_LINE> <INDENT> self.assertTrue('id' in self.amenity.__dict__) <NEW_LINE> self.assertTrue('created_at' in self.amenity.__dict__) <NEW_LINE> self.assertTrue('updated_at' in self.amenity.__dict__) <NEW_LINE> self.assertTrue('name' in self.amenity.__dict__) <NEW_LINE> <DEDENT> def test_is_subclass_Amenity(self): <NEW_LINE> <INDENT> self.assertTrue(issubclass(self.amenity.__class__, BaseModel), True) <NEW_LINE> <DEDENT> def test_attribute_types_Amenity(self): <NEW_LINE> <INDENT> self.assertEqual(type(self.amenity.name), str) <NEW_LINE> <DEDENT> @unittest.skipIf(os.getenv("HBNB_TYPE_STORAGE") == "db", "Skip if db storage is enabled") <NEW_LINE> def test_save_Amenity(self): <NEW_LINE> <INDENT> self.amenity.save() <NEW_LINE> self.assertNotEqual(self.amenity.created_at, self.amenity.updated_at) <NEW_LINE> <DEDENT> def test_to_dict_Amenity(self): <NEW_LINE> <INDENT> self.assertEqual('to_dict' in dir(self.amenity), True) | this will test the Amenity class | 625990854a966d76dd5f0a4a |
class TodoSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Todo <NEW_LINE> fields = ( 'id', 'task', 'description', ) <NEW_LINE> read_only_fields = ('id',) | Serialize a todos | 62599085adb09d7d5dc0c0c0 |
class PCA(BaseLearner, Transformer): <NEW_LINE> <INDENT> def __init__(self, n_components=0.95, tolerance=1.0e-9, max_iterations=1000): <NEW_LINE> <INDENT> BaseLearner.__init__(self) <NEW_LINE> Transformer.__init__(self) <NEW_LINE> self._n_components = n_components <NEW_LINE> self._tolerance = tolerance <NEW_LINE> self._max_iterations = max_iterations <NEW_LINE> <DEDENT> def _train(self, X, y=None): <NEW_LINE> <INDENT> self._X = X <NEW_LINE> self._n = len(X) <NEW_LINE> self._m = len(X[0]) <NEW_LINE> if self._m > 20: <NEW_LINE> <INDENT> raise NotImplementedError("Jacobi decomposition can be unstable with N>20 symmetric matrices!") <NEW_LINE> <DEDENT> if isinstance(self._n_components, float): <NEW_LINE> <INDENT> self._n_components = int(round(self._n_components * self._m)) <NEW_LINE> <DEDENT> self._X_means = mean(self._X, axis=0) <NEW_LINE> X_whitened = subtract(self._X, self._X_means) <NEW_LINE> cov = covariance(X_whitened) <NEW_LINE> self._v, self._w = eigen(cov, self.tolerance, self.max_iterations, normalise=False, sort=True) <NEW_LINE> self._feat_vect = [[self._w[row][column] for column in range(self.n_components)] for row in range(self._m)] <NEW_LINE> return self <NEW_LINE> <DEDENT> def _transform(self, X): <NEW_LINE> <INDENT> X_whitened = subtract(X, self._X_means) <NEW_LINE> return transpose(dot_product(transpose(self._feat_vect), transpose(X_whitened))) <NEW_LINE> <DEDENT> def _inverse(self, X): <NEW_LINE> <INDENT> return add(dot_product(X, transpose(self.eigenvectors)), self._X_means) <NEW_LINE> <DEDENT> @property <NEW_LINE> def tolerance(self): <NEW_LINE> <INDENT> return self._tolerance <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_iterations(self): <NEW_LINE> <INDENT> return self._max_iterations <NEW_LINE> <DEDENT> @property <NEW_LINE> def eigenvalues(self): <NEW_LINE> <INDENT> return self._v <NEW_LINE> <DEDENT> @property <NEW_LINE> def eigenvectors(self): <NEW_LINE> <INDENT> return self._feat_vect <NEW_LINE> <DEDENT> @property <NEW_LINE> def explained_variance(self): <NEW_LINE> <INDENT> return self._v <NEW_LINE> <DEDENT> @property <NEW_LINE> def explained_variance_ratio(self): <NEW_LINE> <INDENT> return [eig / sum(self.eigenvalues) for eig in self.eigenvalues] <NEW_LINE> <DEDENT> @property <NEW_LINE> def n_components(self): <NEW_LINE> <INDENT> return self._n_components | Principal component analysis | 62599085dc8b845886d5511f |
class OSError(Exception): <NEW_LINE> <INDENT> pass | OSError. | 6259908576e4537e8c3f10e5 |
class IncorrectPageExpection(Exception): <NEW_LINE> <INDENT> pass | The expection should be thrown when the page cannot be displayed. | 625990857cff6e4e811b75a8 |
class Person(core.XmlElement): <NEW_LINE> <INDENT> name = Name <NEW_LINE> email = Email <NEW_LINE> uri = Uri | A foundation class which atom:author and atom:contributor extend.
A person contains information like name, email address, and web page URI for
an author or contributor to an Atom feed. | 62599085fff4ab517ebcf37c |
class Bullet(Sprite): <NEW_LINE> <INDENT> def __init__(self, ai_settings, screen, ship): <NEW_LINE> <INDENT> super(Bullet,self).__init__() <NEW_LINE> self.screen = screen <NEW_LINE> self.rect = pygame.Rect(0,0,ai_settings.bullet_width, ai_settings.bullet_height) <NEW_LINE> self.rect.centerx = ship.rect.centerx <NEW_LINE> self.rect.top = ship.rect.top <NEW_LINE> self.y = float(self.rect.y) <NEW_LINE> self.color = ai_settings.bullet_color <NEW_LINE> self.speed_factor = ai_settings.bullet_speed_factor <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.y -=self.speed_factor <NEW_LINE> self.rect.y = self.y <NEW_LINE> <DEDENT> def draw_bullet(self): <NEW_LINE> <INDENT> pygame.draw.rect(self.screen, self.color, self.rect) | Класс для управления пулями | 625990855fc7496912d4901e |
class DescribeProxyAndStatisticsListenersRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ProjectId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ProjectId = params.get("ProjectId") | DescribeProxyAndStatisticsListeners request structure.
| 625990853317a56b869bf2f7 |
class BertTransformer(layers.Layer): <NEW_LINE> <INDENT> def __init__(self, hidden_size, seq_length, num_hidden_layers, num_attention_heads=12, intermediate_size=3072, attention_probs_dropout_prob=0.1, use_one_hot_embeddings=False, initializer_range=0.02, hidden_dropout_prob=0.1, use_relative_positions=False, hidden_act="gelu", compute_type=ts.float32, return_all_encoders=False): <NEW_LINE> <INDENT> super(BertTransformer, self).__init__() <NEW_LINE> self.return_all_encoders = return_all_encoders <NEW_LINE> slayers = [] <NEW_LINE> for _ in range(num_hidden_layers): <NEW_LINE> <INDENT> layer = BertEncoderLayer(hidden_size=hidden_size, seq_length=seq_length, num_attention_heads=num_attention_heads, intermediate_size=intermediate_size, attention_probs_dropout_prob=attention_probs_dropout_prob, use_one_hot_embeddings=use_one_hot_embeddings, initializer_range=initializer_range, hidden_dropout_prob=hidden_dropout_prob, use_relative_positions=use_relative_positions, hidden_act=hidden_act, compute_type=compute_type) <NEW_LINE> slayers.append(layer) <NEW_LINE> <DEDENT> self.layers = layers.LayerList(slayers) <NEW_LINE> self.reshape = P.Reshape() <NEW_LINE> self.shape = (-1, hidden_size) <NEW_LINE> self.out_shape = (-1, seq_length, hidden_size) <NEW_LINE> <DEDENT> def construct(self, input_tensor, attention_mask): <NEW_LINE> <INDENT> prev_output = self.reshape(input_tensor, self.shape) <NEW_LINE> all_encoder_layers = () <NEW_LINE> for layer_module in self.layers: <NEW_LINE> <INDENT> layer_output = layer_module(prev_output, attention_mask) <NEW_LINE> prev_output = layer_output <NEW_LINE> if self.return_all_encoders: <NEW_LINE> <INDENT> layer_output = self.reshape(layer_output, self.out_shape) <NEW_LINE> all_encoder_layers = all_encoder_layers + (layer_output,) <NEW_LINE> <DEDENT> <DEDENT> if not self.return_all_encoders: <NEW_LINE> <INDENT> prev_output = self.reshape(prev_output, self.out_shape) <NEW_LINE> all_encoder_layers = all_encoder_layers + (prev_output,) <NEW_LINE> <DEDENT> return all_encoder_layers | Multi-layer bert transformer.
Args:
hidden_size (int): Size of the encoder layers.
seq_length (int): Length of input sequence.
num_hidden_layers (int): Number of hidden layers in encoder layers.
num_attention_heads (int): Number of attention heads in encoder layers. Default: 12.
intermediate_size (int): Size of intermediate layer in encoder layers. Default: 3072.
attention_probs_dropout_prob (float): The dropout probability for
BertAttention. Default: 0.1.
use_one_hot_embeddings (bool): Specifies whether to use one hot encoding form. Default: False.
initializer_range (float): Initialization value of TruncatedNormal. Default: 0.02.
hidden_dropout_prob (float): The dropout probability for BertOutput. Default: 0.1.
use_relative_positions (bool): Specifies whether to use relative positions. Default: False.
hidden_act (str): Activation function used in the encoder layers. Default: "gelu".
compute_type (:class:`tinyms.dtype`): Compute type in BertTransformer. Default: ts.float32.
return_all_encoders (bool): Specifies whether to return all encoders. Default: False. | 625990854c3428357761be21 |
class Future(object): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> raise NotImplementedError | Generic class to defer some work.
Handled specially in RegexLexerMeta, to support regex string construction at
first use. | 62599085283ffb24f3cf5407 |
class DeleteUserFeedback(FeedbackUserMixin, DeleteView): <NEW_LINE> <INDENT> model = models.UserFeedback <NEW_LINE> success_url = reverse_lazy("feedback:user_feedback") <NEW_LINE> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> return HttpResponseNotAllowed(["POST"]) | View for deleting user feedback. | 6259908560cbc95b06365b1f |
class SimpleCacheBackend(BaseCacheBackend): <NEW_LINE> <INDENT> thumbnails = {} <NEW_LINE> def _get(self, thumbnail_name): <NEW_LINE> <INDENT> if thumbnail_name in self.thumbnails: <NEW_LINE> <INDENT> return self.thumbnails[thumbnail_name] <NEW_LINE> <DEDENT> <DEDENT> def _set(self, thumbnail_name, thumbnail): <NEW_LINE> <INDENT> self.thumbnails[thumbnail_name] = thumbnail | Cache backend that stores objects in a dict on the backend instance. | 62599085dc8b845886d55121 |
class GameDetail(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100) <NEW_LINE> platform = models.CharField(max_length=100) <NEW_LINE> shorthand_name = models.CharField(max_length=10) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name + ' on ' + self.platform | This is the actual game definition. Should only have one instance per game per platform. | 6259908563b5f9789fe86cd0 |
class BatchSamplerWithNegativeSamples(torch.utils.data.Sampler): <NEW_LINE> <INDENT> def __init__(self, pos_sampler, neg_sampler, batch_size, items): <NEW_LINE> <INDENT> self._pos_sampler = pos_sampler <NEW_LINE> self._neg_sampler = neg_sampler <NEW_LINE> self._items = items <NEW_LINE> assert batch_size % 2 == 0, 'Batch size must be divisible by two for negative samples.' <NEW_LINE> self._batch_size = batch_size <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> batch, neg_batch = [], [] <NEW_LINE> neg_sampler = iter(self._neg_sampler) <NEW_LINE> for pos_idx in self._pos_sampler: <NEW_LINE> <INDENT> batch.append(pos_idx) <NEW_LINE> neg_idx = pos_idx <NEW_LINE> while self._items[neg_idx] == self._items[pos_idx]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> neg_idx = next(neg_sampler) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> neg_sampler = iter(self._neg_sampler) <NEW_LINE> neg_idx = next(neg_sampler) <NEW_LINE> <DEDENT> <DEDENT> neg_batch.append(neg_idx) <NEW_LINE> if len(batch) == self._batch_size // 2: <NEW_LINE> <INDENT> batch.extend(neg_batch) <NEW_LINE> yield batch <NEW_LINE> batch, neg_batch = [], [] <NEW_LINE> <DEDENT> <DEDENT> return <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._pos_sampler) // self._batch_size | Samples batches where first half is positive, second half are negative.
We discard the last batch and check that we never use the same positive and negative sample. | 62599085a8370b77170f1f33 |
class ApiVersion(proto.Enum): <NEW_LINE> <INDENT> V2 = 0 <NEW_LINE> V1 = 1 | Logging API version. | 625990857c178a314d78e99e |
class Page(Article): <NEW_LINE> <INDENT> pass | Holds information about an individual page. | 625990855fdd1c0f98e5fae3 |
class Contact(BaseModel): <NEW_LINE> <INDENT> name: Optional[str] = None <NEW_LINE> url: Optional[AnyUrl] = None <NEW_LINE> email: Optional[str] = None <NEW_LINE> class Config: <NEW_LINE> <INDENT> extra = Extra.allow <NEW_LINE> schema_extra = { "examples": [ {"name": "API Support", "url": "http://www.example.com/support", "email": "[email protected]"} ] } | Contact information for the exposed API.
See Also:
- https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#contactObject | 625990867cff6e4e811b75ac |
Subsets and Splits