code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class BaseSurcharge: <NEW_LINE> <INDENT> def calculate(self, basket, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError | Surcharge interface class
This is the superclass to the classes in surcharges.py. This allows using all
surcharges interchangeably (aka polymorphism).
The interface is all properties. | 6259908aad47b63b2c5a9449 |
class Surface(object): <NEW_LINE> <INDENT> def __init__(self, model, lidcontrol): <NEW_LINE> <INDENT> self._model = model <NEW_LINE> self._lidcontrol = lidcontrol <NEW_LINE> self._lidcontrolid = lidcontrol._lidcontrolid <NEW_LINE> <DEDENT> @property <NEW_LINE> def thickness(self): <NEW_LINE> <INDENT> return self._model.getLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.thickness.value) <NEW_LINE> <DEDENT> @thickness.setter <NEW_LINE> def thickness(self, param): <NEW_LINE> <INDENT> return self._model.setLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.thickness.value, param) <NEW_LINE> <DEDENT> @property <NEW_LINE> def void_fraction(self): <NEW_LINE> <INDENT> return self._model.getLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.voidFrac.value) <NEW_LINE> <DEDENT> @void_fraction.setter <NEW_LINE> def void_fraction(self, param): <NEW_LINE> <INDENT> return self._model.setLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.voidFrac.value, param) <NEW_LINE> <DEDENT> @property <NEW_LINE> def roughness(self): <NEW_LINE> <INDENT> return self._model.getLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.roughness.value) <NEW_LINE> <DEDENT> @roughness.setter <NEW_LINE> def roughness(self, param): <NEW_LINE> <INDENT> return self._model.setLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.roughness.value, param) <NEW_LINE> <DEDENT> @property <NEW_LINE> def slope(self): <NEW_LINE> <INDENT> return self._model.getLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.surfSlope.value) <NEW_LINE> <DEDENT> @slope.setter <NEW_LINE> def slope(self, param): <NEW_LINE> <INDENT> return self._model.setLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.surfSlope.value, param) <NEW_LINE> <DEDENT> @property <NEW_LINE> def side_slope(self): <NEW_LINE> <INDENT> return self._model.getLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.sideSlope.value) <NEW_LINE> <DEDENT> @side_slope.setter <NEW_LINE> def side_slope(self, param): <NEW_LINE> <INDENT> return self._model.setLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.sideSlope.value, param) <NEW_LINE> <DEDENT> @property <NEW_LINE> def alpha(self): <NEW_LINE> <INDENT> return self._model.getLidCParam(self._lidcontrolid, LidLayers.surface.value, LidLayersProperty.alpha.value) | +--------------------+--------------------+--------------------+--------------------+
| Layer | Parameter | Setter Before Sim | Setter During Sim |
+====================+====================+====================+====================+
| Surface | thickness | enabled | disabled |
+--------------------+--------------------+--------------------+--------------------+
| Surface | void_fraction | enabled | disabled |
+--------------------+--------------------+--------------------+--------------------+
| Surface | roughness | enabled | enabled |
+--------------------+--------------------+--------------------+--------------------+
| Surface | slope | enabled | disabled |
+--------------------+--------------------+--------------------+--------------------+
| Surface | side_slope | enabled | disabled |
+--------------------+--------------------+--------------------+--------------------+
| Surface | alpha | enabled | disabled |
+--------------------+--------------------+--------------------+--------------------+ | 6259908a55399d3f0562810b |
class UuidMixin(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._uuid = None <NEW_LINE> <DEDENT> def get_uuid(self): <NEW_LINE> <INDENT> if not self._uuid: <NEW_LINE> <INDENT> self._uuid = hashlib.sha1(b("%s:%d" % (self.id, self.driver.type))).hexdigest() <NEW_LINE> <DEDENT> return self._uuid <NEW_LINE> <DEDENT> @property <NEW_LINE> def uuid(self): <NEW_LINE> <INDENT> return self.get_uuid() | Mixin class for get_uuid function. | 6259908a3346ee7daa33845e |
class MasterOfIntrigue(Feature): <NEW_LINE> <INDENT> name = "Master of Intrigue" <NEW_LINE> source = "Rogue (Mastermind)" | When you choose this archetype at 3rd level, you gain proficiency with the
disguise kit, the forgery kit, and one gaming set Of your choice. You also
learn two languages of your choice. Additionally, you can unerringly mimic
the speech patterns and accent of a creature that you hear speak for at
least 1 minute, enabling you to pass yourself off as a native speaker of a
particular land, provided that you know the language. | 6259908adc8b845886d551b0 |
class TestUrlsApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = lockss_metadata.api.urls_api.UrlsApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_urls_doi(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_urls_open_url(self): <NEW_LINE> <INDENT> pass | UrlsApi unit test stubs | 6259908a50812a4eaa6219c1 |
class Group(TokenConverter): <NEW_LINE> <INDENT> def __init__( self, expr ): <NEW_LINE> <INDENT> super(Group,self).__init__( expr ) <NEW_LINE> self.saveAsList = True <NEW_LINE> <DEDENT> def postParse( self, instring, loc, tokenlist ): <NEW_LINE> <INDENT> return [ tokenlist ] | Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. | 6259908aa8370b77170f1fc5 |
class ServiceMetadata(Model): <NEW_LINE> <INDENT> def __init__(self, display_name: str=None, image_url: str=None, long_description: str=None, provider_display_name: str=None, documentation_url: str=None, support_url: str=None, extras: object=None): <NEW_LINE> <INDENT> self.swagger_types = { 'display_name': str, 'image_url': str, 'long_description': str, 'provider_display_name': str, 'documentation_url': str, 'support_url': str, 'extras': object } <NEW_LINE> self.attribute_map = { 'display_name': 'displayName', 'image_url': 'imageUrl', 'long_description': 'longDescription', 'provider_display_name': 'providerDisplayName', 'documentation_url': 'documentationUrl', 'support_url': 'supportUrl', 'extras': 'extras' } <NEW_LINE> self._display_name = display_name <NEW_LINE> self._image_url = image_url <NEW_LINE> self._long_description = long_description <NEW_LINE> self._provider_display_name = provider_display_name <NEW_LINE> self._documentation_url = documentation_url <NEW_LINE> self._support_url = support_url <NEW_LINE> self._extras = extras <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'ServiceMetadata': <NEW_LINE> <INDENT> return deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def display_name(self) -> str: <NEW_LINE> <INDENT> return self._display_name <NEW_LINE> <DEDENT> @display_name.setter <NEW_LINE> def display_name(self, display_name: str): <NEW_LINE> <INDENT> self._display_name = display_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def image_url(self) -> str: <NEW_LINE> <INDENT> return self._image_url <NEW_LINE> <DEDENT> @image_url.setter <NEW_LINE> def image_url(self, image_url: str): <NEW_LINE> <INDENT> self._image_url = image_url <NEW_LINE> <DEDENT> @property <NEW_LINE> def long_description(self) -> str: <NEW_LINE> <INDENT> return self._long_description <NEW_LINE> <DEDENT> @long_description.setter <NEW_LINE> def long_description(self, long_description: str): <NEW_LINE> <INDENT> self._long_description = long_description <NEW_LINE> <DEDENT> @property <NEW_LINE> def provider_display_name(self) -> str: <NEW_LINE> <INDENT> return self._provider_display_name <NEW_LINE> <DEDENT> @provider_display_name.setter <NEW_LINE> def provider_display_name(self, provider_display_name: str): <NEW_LINE> <INDENT> self._provider_display_name = provider_display_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def documentation_url(self) -> str: <NEW_LINE> <INDENT> return self._documentation_url <NEW_LINE> <DEDENT> @documentation_url.setter <NEW_LINE> def documentation_url(self, documentation_url: str): <NEW_LINE> <INDENT> self._documentation_url = documentation_url <NEW_LINE> <DEDENT> @property <NEW_LINE> def support_url(self) -> str: <NEW_LINE> <INDENT> return self._support_url <NEW_LINE> <DEDENT> @support_url.setter <NEW_LINE> def support_url(self, support_url: str): <NEW_LINE> <INDENT> self._support_url = support_url <NEW_LINE> <DEDENT> @property <NEW_LINE> def extras(self) -> object: <NEW_LINE> <INDENT> return self._extras <NEW_LINE> <DEDENT> @extras.setter <NEW_LINE> def extras(self, extras: object): <NEW_LINE> <INDENT> self._extras = extras | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259908ae1aae11d1e7cf610 |
class SyncMapFormatJSON(SyncMapFormatBase): <NEW_LINE> <INDENT> TAG = u"SyncMapFormatJSON" <NEW_LINE> DEFAULT = "json" <NEW_LINE> def parse(self, input_text, syncmap): <NEW_LINE> <INDENT> contents_dict = json.loads(input_text) <NEW_LINE> for fragment in contents_dict["fragments"]: <NEW_LINE> <INDENT> self._add_fragment( syncmap=syncmap, identifier=fragment["id"], language=fragment["language"], lines=fragment["lines"], begin=gf.time_from_ssmmm(fragment["begin"]), end=gf.time_from_ssmmm(fragment["end"]) ) <NEW_LINE> <DEDENT> <DEDENT> def format(self, syncmap): <NEW_LINE> <INDENT> return syncmap.json_string | Handler for JSON I/O format. | 6259908a97e22403b383caf0 |
class ValidateAttributesTest(TestCase): <NEW_LINE> <INDENT> def test_running_ok(self): <NEW_LINE> <INDENT> svc_systemv.SvcSystemV(MagicMock(), "foo", {'running': True}) <NEW_LINE> svc_systemv.SvcSystemV(MagicMock(), "foo", {'running': False}) <NEW_LINE> <DEDENT> def test_running_not_ok(self): <NEW_LINE> <INDENT> with self.assertRaises(BundleError): <NEW_LINE> <INDENT> svc_systemv.SvcSystemV(MagicMock(), "foo", {'running': 0}) <NEW_LINE> <DEDENT> with self.assertRaises(BundleError): <NEW_LINE> <INDENT> svc_systemv.SvcSystemV(MagicMock(), "foo", {'running': 1}) | Tests bundlewrap.items.svc_systemv.SvcSystemV.validate_attributes. | 6259908aadb09d7d5dc0c153 |
class TextDumper(RecvmsgDatagramProtocol): <NEW_LINE> <INDENT> def __init__(self, outfile, protocol=None): <NEW_LINE> <INDENT> self._outfile = outfile <NEW_LINE> self._outfile.write("# Generated by aiocoap.dump %s\n"%datetime.now()) <NEW_LINE> self._outfile.write("# Convert to pcap-ng by using:\n#\n") <NEW_LINE> self._outfile.write("""# text2pcap -n -u 5683,5683 -D -t "%Y-%m-%d %H:%M:%S."\n\n""") <NEW_LINE> self._protocol = protocol <NEW_LINE> self._transport = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def endpointfactory(cls, outfile, actual_protocol): <NEW_LINE> <INDENT> def factory(): <NEW_LINE> <INDENT> dumper = cls(outfile, actual_protocol()) <NEW_LINE> return dumper <NEW_LINE> <DEDENT> return factory <NEW_LINE> <DEDENT> protocol = property(lambda self: self._protocol) <NEW_LINE> def datagram_msg_received(self, data, ancdata, flags, address): <NEW_LINE> <INDENT> self._outfile.write("I %s 000 %s\n"%(datetime.now(), " ".join("%02x"%c for c in data))) <NEW_LINE> if self._protocol is not None: <NEW_LINE> <INDENT> self._protocol.datagram_msg_received(data, ancdata, flags, address) <NEW_LINE> <DEDENT> <DEDENT> def sendmsg(self, data, ancdata, flags, address): <NEW_LINE> <INDENT> self._outfile.write("O %s 000 %s\n"%(datetime.now(), " ".join("%02x"%c for c in data))) <NEW_LINE> if self._protocol is not None: <NEW_LINE> <INDENT> self._transport.sendmsg(data, ancdata, flags, address) <NEW_LINE> <DEDENT> <DEDENT> def connection_made(self, transport): <NEW_LINE> <INDENT> self._transport = transport <NEW_LINE> self._protocol.connection_made(self) <NEW_LINE> <DEDENT> _sock = property(lambda self: self._transport._sock) <NEW_LINE> def close(self): <NEW_LINE> <INDENT> self._outfile.close() <NEW_LINE> self._transport.close() <NEW_LINE> <DEDENT> def connection_lost(self, exc): <NEW_LINE> <INDENT> self._protocol.connection_lost(exc) <NEW_LINE> <DEDENT> def get_extra_info(self, name, default=None): <NEW_LINE> <INDENT> return self._transport.get_extra_info(name, default) | Plain text network data dumper
A TextDumper can be used to log network traffic into a file that can be
converted to a PCAP-NG file as described in its header.
Currently, this discards information like addresses; it is unknown how that
information can be transferred into a dump reader easily while
simultaneously staying at application level and staying ignorant of
particular underlying protocols' data structures.
It could previously be used stand-alone (outside of the asyncio
transport/protocol mechanisms) when instanciated only with an output file
(the :meth:`datagram_received` and :meth:`sendto` were used), but with the
:meth:`datagram_msg_received` substitute method, this is probably
impractical now.
To use it between an asyncio transport and protocol, use the
:meth:endpointfactory method. | 6259908a656771135c48ae2d |
class XapiCredentialsListSchema(object): <NEW_LINE> <INDENT> swagger_types = { 'xapi_credentials': 'list[XapiCredentialSchema]', 'more': 'str' } <NEW_LINE> attribute_map = { 'xapi_credentials': 'xapiCredentials', 'more': 'more' } <NEW_LINE> def __init__(self, xapi_credentials=None, more=None): <NEW_LINE> <INDENT> self._xapi_credentials = None <NEW_LINE> self._more = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.xapi_credentials = xapi_credentials <NEW_LINE> if more is not None: <NEW_LINE> <INDENT> self.more = more <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def xapi_credentials(self): <NEW_LINE> <INDENT> return self._xapi_credentials <NEW_LINE> <DEDENT> @xapi_credentials.setter <NEW_LINE> def xapi_credentials(self, xapi_credentials): <NEW_LINE> <INDENT> if xapi_credentials is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `xapi_credentials`, must not be `None`") <NEW_LINE> <DEDENT> self._xapi_credentials = xapi_credentials <NEW_LINE> <DEDENT> @property <NEW_LINE> def more(self): <NEW_LINE> <INDENT> return self._more <NEW_LINE> <DEDENT> @more.setter <NEW_LINE> def more(self, more): <NEW_LINE> <INDENT> self._more = more <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(XapiCredentialsListSchema, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, XapiCredentialsListSchema): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259908aec188e330fdfa4a7 |
class UsersAvgManager(object): <NEW_LINE> <INDENT> lock = threading.Lock() <NEW_LINE> _total_num = 0 <NEW_LINE> _total_count = 0.0 <NEW_LINE> users_avg = dict() <NEW_LINE> last_updated_ts = -1 <NEW_LINE> last_reported_ts = -1 <NEW_LINE> @classmethod <NEW_LINE> def add(cls, user_id, num): <NEW_LINE> <INDENT> with cls.lock: <NEW_LINE> <INDENT> cls._total_num += num <NEW_LINE> cls._total_count += 1 <NEW_LINE> cls.users_avg.setdefault(user_id, AvgCalc()) <NEW_LINE> cls.users_avg[user_id].add(num) <NEW_LINE> cls.last_updated_ts = time.time() <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def get_total_avg(cls): <NEW_LINE> <INDENT> with cls.lock: <NEW_LINE> <INDENT> if cls.last_updated_ts > cls.last_reported_ts: <NEW_LINE> <INDENT> cls.last_reported_ts = time.time() <NEW_LINE> return cls._total_num/cls._total_count <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_user_avg(cls, user_id): <NEW_LINE> <INDENT> return cls.users_avg[user_id].get_avg() | manages the average for each user | 6259908af9cc0f698b1c60c8 |
class EnumSubset(InterfaceItemBase): <NEW_LINE> <INDENT> def __init__(self, name, enum, description=None, design_description=None, issues=None, todos=None, platform=None, allowed_elements=None, since=None, until=None, deprecated=None, removed=None, history=None): <NEW_LINE> <INDENT> super(EnumSubset, self).__init__( name, description=description, design_description=design_description, issues=issues, todos=todos, platform=platform, since=since, until=until, deprecated=deprecated, removed=removed, history=history) <NEW_LINE> self.enum = enum <NEW_LINE> self.allowed_elements = allowed_elements if allowed_elements is not None else {} | Enumeration subset.
:param name: item name
:param description: list of string description elements
:param design_description: list of string design description elements
:param issues: list of issues
:param todos: list of string todo elements
:param platform: optional platform (string or None)
:param since: string that defines the rpc spec version an element was introduced
:param until: string that defines the rpc spec version an element was removed, deprecated, or changed
:param deprecated: boolean that defines if an element is planned to be removed in a future release
:param removed: boolean that defines if an element was removed from the api
:param history: array of api element signature changes
:param enum: enumeration
:param allowed_elements: dictionary of elements of enumeration
which are allowed in this subset | 6259908a167d2b6e312b8394 |
class TestApplicationCustomer(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return ApplicationCustomer( id = 56, created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), integration_id = '0', attributes = None, account_id = 56, closed_sessions = 56, total_sales = 1.337, loyalty_memberships = [ talon_one.models.loyalty_membership.LoyaltyMembership( joined = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), loyalty_program_id = 56, ) ], audience_memberships = [ talon_one.models.audience_membership.AudienceMembership( id = 56, name = '0', ) ], last_activity = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f') ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return ApplicationCustomer( id = 56, created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), integration_id = '0', attributes = None, account_id = 56, closed_sessions = 56, total_sales = 1.337, last_activity = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ) <NEW_LINE> <DEDENT> <DEDENT> def testApplicationCustomer(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True) | ApplicationCustomer unit test stubs | 6259908a3617ad0b5ee07d4c |
class SamplerSender(object): <NEW_LINE> <INDENT> def __init__(self, namebook, net_type='socket'): <NEW_LINE> <INDENT> assert len(namebook) > 0, 'namebook cannot be empty.' <NEW_LINE> assert net_type in ('socket', 'mpi'), 'Unknown network type.' <NEW_LINE> self._namebook = namebook <NEW_LINE> self._sender = _create_sender(net_type) <NEW_LINE> for ID, addr in self._namebook.items(): <NEW_LINE> <INDENT> ip_port = addr.split(':') <NEW_LINE> assert len(ip_port) == 2, 'Uncorrect format of IP address.' <NEW_LINE> _add_receiver_addr(self._sender, ip_port[0], int(ip_port[1]), ID) <NEW_LINE> <DEDENT> _sender_connect(self._sender) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> _finalize_sender(self._sender) <NEW_LINE> <DEDENT> def send(self, nodeflow, recv_id): <NEW_LINE> <INDENT> assert recv_id >= 0, 'recv_id cannot be a negative number.' <NEW_LINE> _send_nodeflow(self._sender, nodeflow, recv_id) <NEW_LINE> <DEDENT> def batch_send(self, nf_list, id_list): <NEW_LINE> <INDENT> assert len(nf_list) > 0, 'nf_list cannot be empty.' <NEW_LINE> assert len(nf_list) == len(id_list), 'The length of nf_list must be equal to id_list.' <NEW_LINE> for i in range(len(nf_list)): <NEW_LINE> <INDENT> assert id_list[i] >= 0, 'recv_id cannot be a negative number.' <NEW_LINE> _send_nodeflow(self._sender, nf_list[i], id_list[i]) <NEW_LINE> <DEDENT> <DEDENT> def signal(self, recv_id): <NEW_LINE> <INDENT> assert recv_id >= 0, 'recv_id cannot be a negative number.' <NEW_LINE> _send_sampler_end_signal(self._sender, recv_id) | SamplerSender for DGL distributed training.
Users use SamplerSender to send sampled subgraphs (NodeFlow)
to remote SamplerReceiver. Note that, a SamplerSender can connect
to multiple SamplerReceiver currently. The underlying implementation
will send different subgraphs to different SamplerReceiver in parallel
via multi-threading.
Parameters
----------
namebook : dict
IP address namebook of SamplerReceiver, where the
key is recevier's ID (start from 0) and value is receiver's address, e.g.,
{ 0:'168.12.23.45:50051',
1:'168.12.23.21:50051',
2:'168.12.46.12:50051' }
net_type : str
networking type, e.g., 'socket' (default) or 'mpi'. | 6259908a2c8b7c6e89bd53e1 |
class RecordsStatistics: <NEW_LINE> <INDENT> def __init__(self, records): <NEW_LINE> <INDENT> self._records = records <NEW_LINE> <DEDENT> @property <NEW_LINE> def count(self): <NEW_LINE> <INDENT> return len(self._records) <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_defect(self): <NEW_LINE> <INDENT> return sum(record.defect_count for record in self._records) <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_comment(self): <NEW_LINE> <INDENT> return sum(record.comment_count for record in self._records) <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_loc(self): <NEW_LINE> <INDENT> return sum(record.loc for record in self._records) <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_loc_changed(self): <NEW_LINE> <INDENT> return sum(record.loc_changed for record in self._records) <NEW_LINE> <DEDENT> @property <NEW_LINE> def comment_density_uploaded(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (self.total_comment * 1000) / self.total_loc <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def comment_density_changed(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (self.total_comment * 1000) / self.total_loc_changed <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def defect_density_uploaded(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (self.total_defect * 1000) / self.total_loc <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def defect_density_changed(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (self.total_defect * 1000) / self.total_loc_changed <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def total_person_time_in_second(self): <NEW_LINE> <INDENT> return sum( record.total_person_time_in_second for record in self._records ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_person_time_in_hour(self): <NEW_LINE> <INDENT> return self.total_person_time_in_second / (60 * 60) <NEW_LINE> <DEDENT> @property <NEW_LINE> def inspection_rate(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (self.total_loc_changed / (self.total_person_time_in_hour * 1000)) <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> return 0 | Review records statistics class.
Args:
records: A list of record. | 6259908a7cff6e4e811b763d |
class DnsName(vstruct.VArray): <NEW_LINE> <INDENT> def __init__(self, name=None): <NEW_LINE> <INDENT> vstruct.VArray.__init__(self) <NEW_LINE> if name != None: <NEW_LINE> <INDENT> for part in name.split('.'): <NEW_LINE> <INDENT> self.vsAddElement( DnsNameLabel( part ) ) <NEW_LINE> <DEDENT> self.vsAddElement( DnsNameLabel('') ) <NEW_LINE> <DEDENT> <DEDENT> def getTypeVal(self): <NEW_LINE> <INDENT> nametype = None <NEW_LINE> namepointer = None <NEW_LINE> labels = [] <NEW_LINE> for fname,fobj in self.vsGetFields(): <NEW_LINE> <INDENT> nametype = fobj.getNameType() <NEW_LINE> if nametype == DNS_NAMETYPE_LABEL and fobj.length != 0: <NEW_LINE> <INDENT> labels.append(fobj.label) <NEW_LINE> <DEDENT> if nametype == DNS_NAMETYPE_POINTER: <NEW_LINE> <INDENT> namepointer = fobj.getNamePointer() <NEW_LINE> if labels: <NEW_LINE> <INDENT> nametype = DNS_NAMETYPE_LABELPOINTER <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> joinedlabels = '.'.join(labels) <NEW_LINE> if nametype == DNS_NAMETYPE_LABEL: <NEW_LINE> <INDENT> return nametype,joinedlabels <NEW_LINE> <DEDENT> elif nametype == DNS_NAMETYPE_POINTER: <NEW_LINE> <INDENT> return nametype,namepointer <NEW_LINE> <DEDENT> elif nametype == DNS_NAMETYPE_LABELPOINTER: <NEW_LINE> <INDENT> return nametype,(joinedlabels,namepointer) <NEW_LINE> <DEDENT> raise DnsParseError('Unrecognized label.') <NEW_LINE> <DEDENT> def vsParse(self, bytez, offset=0): <NEW_LINE> <INDENT> while offset < len(bytez): <NEW_LINE> <INDENT> nl = DnsNameLabel() <NEW_LINE> labelofs = offset <NEW_LINE> offset = nl.vsParse(bytez, offset=offset) <NEW_LINE> self.vsAddElement(nl) <NEW_LINE> if nl.isNamePointer() and nl.getNamePointer() >= labelofs: <NEW_LINE> <INDENT> raise DnsParseError('Label points forward (or to self).') <NEW_LINE> <DEDENT> if nl.isNameTerm(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return offset | The contiguous labels (DnsNameLabel()) in a DNS Name field. Note that the
last label may simply be a pointer to an offset earlier in the DNS message. | 6259908a3346ee7daa338460 |
class RNNDecoder(nn.Module): <NEW_LINE> <INDENT> def __init__( self, num_features, embeddingsize, hiddensize, padding_idx=0, rnn_class='lstm', numlayers=2, dropout=0.1, bidir_input=False, attn_type='none', attn_time='pre', attn_length=-1, sparse=False, ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.dropout = nn.Dropout(p=dropout) <NEW_LINE> self.layers = numlayers <NEW_LINE> self.hsz = hiddensize <NEW_LINE> self.esz = embeddingsize <NEW_LINE> self.lt = nn.Embedding( num_features, embeddingsize, padding_idx=padding_idx, sparse=sparse ) <NEW_LINE> self.rnn = rnn_class( embeddingsize, hiddensize, numlayers, dropout=dropout if numlayers > 1 else 0, batch_first=True, ) <NEW_LINE> self.attn_type = attn_type <NEW_LINE> self.attn_time = attn_time <NEW_LINE> self.attention = AttentionLayer( attn_type=attn_type, hiddensize=hiddensize, embeddingsize=embeddingsize, bidirectional=bidir_input, attn_length=attn_length, attn_time=attn_time, ) <NEW_LINE> <DEDENT> def forward(self, xs, encoder_output, incremental_state=None): <NEW_LINE> <INDENT> enc_state, enc_hidden, attn_mask = encoder_output <NEW_LINE> attn_params = (enc_state, attn_mask) <NEW_LINE> if incremental_state is not None: <NEW_LINE> <INDENT> hidden = _transpose_hidden_state(incremental_state) <NEW_LINE> xs = xs[:, -1:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hidden = _transpose_hidden_state(enc_hidden) <NEW_LINE> <DEDENT> if isinstance(hidden, tuple): <NEW_LINE> <INDENT> hidden = tuple(x.contiguous() for x in hidden) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hidden = hidden.contiguous() <NEW_LINE> <DEDENT> seqlen = xs.size(1) <NEW_LINE> xes = self.dropout(self.lt(xs)) <NEW_LINE> if self.attn_time == 'pre': <NEW_LINE> <INDENT> new_xes = [] <NEW_LINE> for i in range(seqlen): <NEW_LINE> <INDENT> nx, _ = self.attention(xes[:, i : i + 1], hidden, attn_params) <NEW_LINE> new_xes.append(nx) <NEW_LINE> <DEDENT> xes = torch.cat(new_xes, 1).to(xes.device) <NEW_LINE> <DEDENT> if self.attn_time != 'post': <NEW_LINE> <INDENT> output, new_hidden = self.rnn(xes, hidden) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_hidden = hidden <NEW_LINE> output = [] <NEW_LINE> for i in range(seqlen): <NEW_LINE> <INDENT> o, new_hidden = self.rnn(xes[:, i, :].unsqueeze(1), new_hidden) <NEW_LINE> o, _ = self.attention(o, new_hidden, attn_params) <NEW_LINE> output.append(o) <NEW_LINE> <DEDENT> output = torch.cat(output, dim=1).to(xes.device) <NEW_LINE> <DEDENT> return output, _transpose_hidden_state(new_hidden) | Recurrent decoder module.
Can be used as a standalone language model or paired with an encoder. | 6259908af9cc0f698b1c60c9 |
class SortableDict(dict): <NEW_LINE> <INDENT> def sortedkeys(self): <NEW_LINE> <INDENT> keys = sorted(self.keys()) <NEW_LINE> return keys <NEW_LINE> <DEDENT> def sortedvalues(self): <NEW_LINE> <INDENT> return [self[key] for key in self.sortedkeys()] | Dictionary with additional sorting methods
Tip: use key starting with with '_' for sorting before small letters
and with '~' for sorting after small letters. | 6259908a167d2b6e312b8395 |
class Node(object): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> super(Node, self).__init__() <NEW_LINE> self.value = value <NEW_LINE> self.left = None <NEW_LINE> self.right= None <NEW_LINE> <DEDENT> def inOrder(self): <NEW_LINE> <INDENT> def findNext(node): <NEW_LINE> <INDENT> stack = [] <NEW_LINE> while node : <NEW_LINE> <INDENT> stack.append(node) <NEW_LINE> if node.left: <NEW_LINE> <INDENT> node = node.left <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node = node.right <NEW_LINE> <DEDENT> <DEDENT> return stack <NEW_LINE> <DEDENT> stack = findNext(self) <NEW_LINE> while len(stack) > 0: <NEW_LINE> <INDENT> c = stack.pop() <NEW_LINE> print(c.value,end="") <NEW_LINE> if len(stack) > 0: <NEW_LINE> <INDENT> c = stack.pop() <NEW_LINE> print(c.value,end="") <NEW_LINE> if c.right: <NEW_LINE> <INDENT> stack += findNext(c.right) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print("") <NEW_LINE> <DEDENT> def preOrder(self): <NEW_LINE> <INDENT> stack = [self] <NEW_LINE> while len(stack) > 0: <NEW_LINE> <INDENT> node = stack.pop() <NEW_LINE> print(node.value,end="") <NEW_LINE> if node.right: <NEW_LINE> <INDENT> stack += [node.right] <NEW_LINE> <DEDENT> if node.left: <NEW_LINE> <INDENT> stack += [node.left] <NEW_LINE> <DEDENT> <DEDENT> print("\n") <NEW_LINE> <DEDENT> def postOrder(self): <NEW_LINE> <INDENT> def findNext(node): <NEW_LINE> <INDENT> stack = [] <NEW_LINE> while node : <NEW_LINE> <INDENT> stack.append(node) <NEW_LINE> if node.left: <NEW_LINE> <INDENT> node = node.left <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node = node.right <NEW_LINE> <DEDENT> <DEDENT> return stack <NEW_LINE> <DEDENT> stack = findNext(self) <NEW_LINE> while len(stack)>0: <NEW_LINE> <INDENT> c = stack.pop() <NEW_LINE> print(c.value,end="") <NEW_LINE> if len(stack) > 0: <NEW_LINE> <INDENT> top = stack[len(stack)-1] <NEW_LINE> if c == top.left: <NEW_LINE> <INDENT> stack += findNext(top.right) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print("") <NEW_LINE> <DEDENT> def toString(self): <NEW_LINE> <INDENT> queue = [self] <NEW_LINE> while len(queue) > 0: <NEW_LINE> <INDENT> c,level_queue = None,[] <NEW_LINE> while True: <NEW_LINE> <INDENT> if len(queue)==0: <NEW_LINE> <INDENT> queue += level_queue <NEW_LINE> break <NEW_LINE> <DEDENT> c = queue.pop(0) <NEW_LINE> print(c.value,end="") <NEW_LINE> if c.left: <NEW_LINE> <INDENT> queue.append(c.left) <NEW_LINE> <DEDENT> if c.right: <NEW_LINE> <INDENT> queue.append(c.right) | docstring for node | 6259908a23849d37ff852cb7 |
class MockBaseTest(tests.support.DnfBaseTestCase): <NEW_LINE> <INDENT> REPOS = ["main"] <NEW_LINE> def test_add_remote_rpms(self): <NEW_LINE> <INDENT> pkgs = self.base.add_remote_rpms([tests.support.TOUR_50_PKG_PATH]) <NEW_LINE> self.assertIsInstance(pkgs[0], dnf.package.Package) <NEW_LINE> self.assertEqual(pkgs[0].name, 'tour') | Test the Base methods that need a Sack. | 6259908a7b180e01f3e49e63 |
class LARMORMultiPeriodEventModeLoading(stresstesting.MantidStressTest): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(LARMORMultiPeriodEventModeLoading, self).__init__() <NEW_LINE> self.success = True <NEW_LINE> <DEDENT> def _get_position_and_rotation(self, workspace): <NEW_LINE> <INDENT> instrument = workspace.getInstrument() <NEW_LINE> component = instrument.getComponentByName("DetectorBench") <NEW_LINE> position = component.getPos() <NEW_LINE> rotation = component.getRotation() <NEW_LINE> return position, rotation <NEW_LINE> <DEDENT> def _clean_up(self, base_name, number_of_workspaces): <NEW_LINE> <INDENT> for index in range(1, number_of_workspaces + 1): <NEW_LINE> <INDENT> workspace_name = base_name + str(index) <NEW_LINE> monitor_name = workspace_name + "_monitors" <NEW_LINE> AnalysisDataService.remove(workspace_name) <NEW_LINE> AnalysisDataService.remove(monitor_name) <NEW_LINE> <DEDENT> AnalysisDataService.remove("80tubeCalibration_18-04-2016_r9330-9335") <NEW_LINE> <DEDENT> def _check_if_all_multi_period_workspaces_have_the_same_position(self, base_name, number_of_workspaces): <NEW_LINE> <INDENT> reference_name = base_name + str(1) <NEW_LINE> reference_workspace = AnalysisDataService.retrieve(reference_name) <NEW_LINE> reference_position, reference_rotation = self._get_position_and_rotation(reference_workspace) <NEW_LINE> for index in range(2, number_of_workspaces + 1): <NEW_LINE> <INDENT> ws_name = base_name + str(index) <NEW_LINE> workspace = AnalysisDataService.retrieve(ws_name) <NEW_LINE> position, rotation = self._get_position_and_rotation(workspace) <NEW_LINE> if position != reference_position or rotation != reference_rotation: <NEW_LINE> <INDENT> self.success = False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def runTest(self): <NEW_LINE> <INDENT> LARMOR() <NEW_LINE> Set1D() <NEW_LINE> Detector("DetectorBench") <NEW_LINE> MaskFile('USER_Larmor_163F_HePATest_r13038.txt') <NEW_LINE> AssignSample('13038') <NEW_LINE> base_name = "13038_sans_nxs_" <NEW_LINE> number_of_workspaces = 4 <NEW_LINE> self._check_if_all_multi_period_workspaces_have_the_same_position(base_name, number_of_workspaces) <NEW_LINE> self._clean_up(base_name, number_of_workspaces) <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return self.success | This test checks if the positioning of all workspaces of a
multi-period event-type file are the same. | 6259908a099cdd3c636761f8 |
class TestController(unittest.TestCase): <NEW_LINE> <INDENT> def test_index(self): <NEW_LINE> <INDENT> c = Client() <NEW_LINE> response = c.get('/') <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_API_stoplist(self): <NEW_LINE> <INDENT> c = Client() <NEW_LINE> response = c.get('/api/stoplist/?query=hbf&latitude=48.35882&longitude=10.90529') <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertRaises(Exception, json.loads(response.content)) <NEW_LINE> data = json.loads(response.content) <NEW_LINE> self.assertTrue("suggestions" in data) <NEW_LINE> self.assertTrue(len(data["suggestions"]), 3) <NEW_LINE> self.assertEqual(data["suggestions"][0]["value"], u'Augsburg, Augsburg Hauptbahnhof') <NEW_LINE> self.assertEqual(data["suggestions"][0]["data"], u'2000100') <NEW_LINE> <DEDENT> def test_API_getWalkingPath(self): <NEW_LINE> <INDENT> c = Client() <NEW_LINE> response = c.get('/api/walkingpath/?originLatitude=48.1234&originLongitude=11.2034&destinationLatitude=48.4532&destinationLongitude=11.4563') <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertRaises(Exception, json.loads(response.content)) <NEW_LINE> data = json.loads(response.content) <NEW_LINE> self.assertTrue("path" in data) <NEW_LINE> self.assertTrue(len(data), 5) <NEW_LINE> self.assertTrue(len(data["path"]), 888) <NEW_LINE> <DEDENT> def test_API_getRoute(self): <NEW_LINE> <INDENT> c = Client() <NEW_LINE> response = c.get('/api/route/?latitude=48.35882&longitude=10.90529&stopid=2000100') <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertRaises(Exception, json.loads(response.content)) <NEW_LINE> data = json.loads(response.content) <NEW_LINE> self.assertTrue("data" in data) <NEW_LINE> self.assertTrue(len(data), 4) | Testclass for Controllerfunctions | 6259908a60cbc95b06365b6a |
class Assignatura: <NEW_LINE> <INDENT> def __init__(self, nom, codi,f): <NEW_LINE> <INDENT> self.nom = nom <NEW_LINE> self.codi = codi <NEW_LINE> self.grups = [] <NEW_LINE> self.facu = f <NEW_LINE> <DEDENT> def afegeixGrup(self,grup): <NEW_LINE> <INDENT> for x in range(0,len(self.grups)): <NEW_LINE> <INDENT> if self.grups[x].esIgual(grup): <NEW_LINE> <INDENT> self.grups[x].num = str(self.grups[x].num) + '/' +str(grup.num) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> <DEDENT> grup.assig = self <NEW_LINE> grup.facu = self.facu <NEW_LINE> self.grups.append(grup) | docstring for assignatura. | 6259908a4527f215b58eb79e |
class TaskHandler(logging.Handler): <NEW_LINE> <INDENT> def emit(self, record): <NEW_LINE> <INDENT> task = _tasks.get(task_key()) <NEW_LINE> if not task: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> task[1].write("%s\n" % self.format(record)) | Per-task logger.
Used to log all task specific events to a per-task cuckoo.log log file. | 6259908a283ffb24f3cf549e |
class UnknownResponse(SuggestedResponse): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_json(cls, json): <NEW_LINE> <INDENT> response = super(UnknownResponse, cls).from_json(json) <NEW_LINE> response.raw_response = json <NEW_LINE> return response <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def property_mapping(cls): <NEW_LINE> <INDENT> mapping = super(UnknownResponse, cls).property_mapping() <NEW_LINE> mapping.update({ 'type': 'type' }) <NEW_LINE> return mapping | This response type is returned by the response factory when it encounters an unknown response type.
It's `type` attribute is set to the type of the response, and it's `raw_response` attribute contains the raw JSON
response received | 6259908a23849d37ff852cb9 |
class MultiPointField(ShapelyField): <NEW_LINE> <INDENT> @property <NEW_LINE> def shape(self) -> MultiPointType: <NEW_LINE> <INDENT> return MultiPoint <NEW_LINE> <DEDENT> def validate(self, value: MultiPoint): <NEW_LINE> <INDENT> super(MultiPointField, self).validate(value) <NEW_LINE> <DEDENT> def to_mongo(self, value: MultiPoint) -> MultiPointDict: <NEW_LINE> <INDENT> return super(MultiPointField, self).to_mongo(value) <NEW_LINE> <DEDENT> def to_python(self, value: MultiPointDict) -> MultiPoint: <NEW_LINE> <INDENT> return super(MultiPointField, self).to_python(value) | Substitution for :class:`mongoengine.MultiPointField`
utilizing :class:`MultiPoint` geometry type.
Instead of storing GeoJSON-like mapping as a value, a
`shapely`__ geometry instance is used instead via
the `__geo_interface__`__ protocol.
__ https://shapely.readthedocs.io/en/stable/
__ https://gist.github.com/zzpwelkin/2279867 | 6259908a3346ee7daa338462 |
class VariableNameGenerator(set): <NEW_LINE> <INDENT> KEYFORM = "$var%s" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(VariableNameGenerator, self).__init__() <NEW_LINE> self.__iNum = 0 <NEW_LINE> <DEDENT> def generate_name(self): <NEW_LINE> <INDENT> sName = self.KEYFORM % self.__iNum <NEW_LINE> while sName in self: <NEW_LINE> <INDENT> self.__iNum += 1 <NEW_LINE> sName = self.KEYFORM % self.__iNum <NEW_LINE> <DEDENT> self.add(sName) <NEW_LINE> return sName | Generate a unique name for a variable in a filter. | 6259908adc8b845886d551b8 |
class EmbeddingLayer(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_dim, embed_dim): <NEW_LINE> <INDENT> super(EmbeddingLayer, self).__init__() <NEW_LINE> self.embedding = torch.nn.Embedding(input_dim, embed_dim) <NEW_LINE> torch.nn.init.xavier_uniform_(self.embedding.weight.data) <NEW_LINE> <DEDENT> def forward(self, feature_id, feature_val=None): <NEW_LINE> <INDENT> if feature_val is None: <NEW_LINE> <INDENT> return self.embedding(feature_id) <NEW_LINE> <DEDENT> return self.embedding(feature_id) * feature_val.unsqueeze(-1) | Embedding module.
It is a sparse to dense operation that lookup embedding for given features. | 6259908a167d2b6e312b8397 |
class TestSearchTMResponseDtoV3(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testSearchTMResponseDtoV3(self): <NEW_LINE> <INDENT> pass | SearchTMResponseDtoV3 unit test stubs | 6259908a283ffb24f3cf54a0 |
class StationOperationGroup(models.Model): <NEW_LINE> <INDENT> station = models.ForeignKey('seisnet.Station', on_delete=models.CASCADE) <NEW_LINE> operation_time = models.DateTimeField() | 在同一个台站的一次操作 | 6259908a99fddb7c1ca63bdc |
class Globals: <NEW_LINE> <INDENT> def building_requirements(self, unit_type, requirement=True, one_at_time=False): <NEW_LINE> <INDENT> if one_at_time and self.already_pending(unit_type): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return requirement and self.can_afford(unit_type) <NEW_LINE> <DEDENT> def can_build_unique(self, unit_type, building, requirement=True): <NEW_LINE> <INDENT> return ( self.can_afford(unit_type) and not building and self.building_requirements(unit_type, requirement, one_at_time=True) ) <NEW_LINE> <DEDENT> def can_train(self, unit_type, requirement=True, larva=True): <NEW_LINE> <INDENT> if self.hives and not self.caverns: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.pits.ready and not self.hives and not self.already_pending(UnitTypeId.HIVE): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return (not larva or self.larvae) and self.can_afford(unit_type) and requirement <NEW_LINE> <DEDENT> def can_upgrade(self, upgrade, research, host_building): <NEW_LINE> <INDENT> return not self.already_pending_upgrade(upgrade) and self.can_afford(research) and host_building <NEW_LINE> <DEDENT> async def place_building(self, building): <NEW_LINE> <INDENT> position = await self.get_production_position() <NEW_LINE> if not position: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if any(enemy.distance_to(position) < 10 for enemy in self.enemies) and not self.close_enemy_production: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> selected_drone = self.select_build_worker(position) <NEW_LINE> if selected_drone: <NEW_LINE> <INDENT> self.add_action(selected_drone.build(building, position)) | Global wrappers | 6259908aa05bb46b3848bf26 |
class TestAlertResourceAttributes(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testAlertResourceAttributes(self): <NEW_LINE> <INDENT> pass | AlertResourceAttributes unit test stubs | 6259908a3617ad0b5ee07d52 |
class MapMemHook(Hook): <NEW_LINE> <INDENT> def __init__(self, se_obj, emu_eng, cb, begin=1, end=0): <NEW_LINE> <INDENT> super(MapMemHook, self).__init__(se_obj, emu_eng, cb) <NEW_LINE> self.begin = begin <NEW_LINE> self.end = end <NEW_LINE> <DEDENT> def add(self): <NEW_LINE> <INDENT> self.added = True <NEW_LINE> self.enabled = True | This hook will fire each time a chunk of memory is mapped | 6259908a26068e7796d4e543 |
class MuninPHPfpmPlugin(MuninPlugin): <NEW_LINE> <INDENT> plugin_name = 'phpfpmstats' <NEW_LINE> isMultigraph = True <NEW_LINE> def __init__(self, argv=(), env={}, debug=False): <NEW_LINE> <INDENT> MuninPlugin.__init__(self, argv, env, debug) <NEW_LINE> self._host = self.envGet('host') <NEW_LINE> self._port = self.envGet('port') <NEW_LINE> self._user = self.envGet('user') <NEW_LINE> self._monpath = self.envGet('monpath') <NEW_LINE> self._password = self.envGet('password') <NEW_LINE> self._ssl = self.envCheckFlag('ssl', False) <NEW_LINE> if self.graphEnabled('php_fpm_connections'): <NEW_LINE> <INDENT> graph = MuninGraph('PHP FPM - Connections per second', 'PHP', info='PHP Fast Process Manager (FPM) - Connections per second.', args='--base 1000 --lower-limit 0') <NEW_LINE> graph.addField('conn', 'conn', draw='LINE2', type='DERIVE', min=0) <NEW_LINE> self.appendGraph('php_fpm_connections', graph) <NEW_LINE> <DEDENT> if self.graphEnabled('php_fpm_processes'): <NEW_LINE> <INDENT> graph = MuninGraph('PHP FPM - Processes', 'PHP', info='PHP Fast Process Manager (FPM) - Active / Idle Processes.', args='--base 1000 --lower-limit 0') <NEW_LINE> graph.addField('active', 'active', draw='AREASTACK', type='GAUGE') <NEW_LINE> graph.addField('idle', 'idle', draw='AREASTACK', type='GAUGE') <NEW_LINE> graph.addField('total', 'total', draw='LINE2', type='GAUGE', colour='000000') <NEW_LINE> self.appendGraph('php_fpm_processes', graph) <NEW_LINE> <DEDENT> <DEDENT> def retrieveVals(self): <NEW_LINE> <INDENT> fpminfo = PHPfpmInfo(self._host, self._port, self._user, self._password, self._monpath, self._ssl) <NEW_LINE> stats = fpminfo.getStats() <NEW_LINE> if self.hasGraph('php_fpm_connections') and stats: <NEW_LINE> <INDENT> self.setGraphVal('php_fpm_connections', 'conn', stats['accepted conn']) <NEW_LINE> <DEDENT> if self.hasGraph('php_fpm_processes') and stats: <NEW_LINE> <INDENT> self.setGraphVal('php_fpm_processes', 'active', stats['active processes']) <NEW_LINE> self.setGraphVal('php_fpm_processes', 'idle', stats['idle processes']) <NEW_LINE> self.setGraphVal('php_fpm_processes', 'total', stats['total processes']) | Multigraph Munin Plugin for monitoring PHP Fast Process Manager (FPM).
| 6259908ad486a94d0ba2dbb6 |
class Token(AuthenticatedMessage): <NEW_LINE> <INDENT> __magic__ = ord("t") <NEW_LINE> __fields__ = ( ("valid_from", TypeInfo(int)), ("valid_to", TypeInfo(int)), ("username", TypeInfo(str)) ) | Represents a token used to authenticate the user | 6259908aaad79263cf4303bb |
class LC_temperature_control(Instrument): <NEW_LINE> <INDENT> def __init__(self, name, reset=False): <NEW_LINE> <INDENT> Instrument.__init__(self, name) <NEW_LINE> self.FP = LVApp("DRTempControl.Application", "DR TempControl.exe\TC.vi") <NEW_LINE> self._channels = range(10) <NEW_LINE> self._currents = range(3) <NEW_LINE> self.add_parameter('avs_name', flags=Instrument.FLAG_GET, type=types.StringType, channels=self._channels) <NEW_LINE> self.add_parameter('current', flags=Instrument.FLAG_GET, type=types.FloatType, channels=self._currents, units='A') <NEW_LINE> self.add_parameter('resistance', flags=Instrument.FLAG_GET, type=types.FloatType, channels=self._channels, units='Ohm') <NEW_LINE> self.add_parameter('temperature', flags=Instrument.FLAG_GET, type=types.FloatType, channels=self._channels, units='K') <NEW_LINE> self.get_all() <NEW_LINE> <DEDENT> def do_get_avs_name(self, channel): <NEW_LINE> <INDENT> return self.FP.GetData('AVS names')[channel] <NEW_LINE> <DEDENT> def do_get_current(self, channel): <NEW_LINE> <INDENT> return self.FP.GetData('I')[channel] <NEW_LINE> <DEDENT> def do_get_resistance(self, channel): <NEW_LINE> <INDENT> return self.FP.GetData('R')[channel] <NEW_LINE> <DEDENT> def do_get_temperature(self, channel): <NEW_LINE> <INDENT> return self.FP.GetData('T')[channel] <NEW_LINE> <DEDENT> def get_all(self): <NEW_LINE> <INDENT> for i in self._channels: <NEW_LINE> <INDENT> self.get('avs_name%d' % i) <NEW_LINE> self.get('resistance%d' % i) <NEW_LINE> self.get('temperature%d' % i) <NEW_LINE> <DEDENT> for i in self._currents: <NEW_LINE> <INDENT> self.get('current%d' % i) | Driver for the Leiden Cryogenics TemperatureControl application
Install pywin32 using downloadable 2.7 32-bit installer
If using an environment, put the path to it in the following
registery key:
HKEY_CURRENT_USER/Software/Python/PythonCore/2.7/InstallPath | 6259908ad8ef3951e32c8c5e |
class SharedCount(object): <NEW_LINE> <INDENT> def __init__(self, initial_count=0): <NEW_LINE> <INDENT> self._count = initial_count <NEW_LINE> self._count_lock = Lock() <NEW_LINE> <DEDENT> def incre(self, delta=1): <NEW_LINE> <INDENT> with self._count_lock: <NEW_LINE> <INDENT> self._count += delta <NEW_LINE> <DEDENT> <DEDENT> def decre(self, delta): <NEW_LINE> <INDENT> with self._count_lock: <NEW_LINE> <INDENT> self._count -= delta | 线程调度本质上是不确定的,因此,在多线程程序中错误地使用锁机制可能会导致
随机数据损坏或者其他的异常行为,我们称之为竞争条件。
为了避免竞争条件,最好只在临界区(对临界资源进行操作的那部分代码)使用锁。 | 6259908a63b5f9789fe86d6b |
class ControlFlowContext(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._outer_context = ops.get_default_graph()._get_control_flow_context() <NEW_LINE> self._context_stack = [] <NEW_LINE> self._values = set() <NEW_LINE> self._external_values = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def outer_context(self): <NEW_LINE> <INDENT> return self._outer_context <NEW_LINE> <DEDENT> @property <NEW_LINE> def grad_state(self): <NEW_LINE> <INDENT> raise NotImplementedError("Abstract method") <NEW_LINE> <DEDENT> @property <NEW_LINE> def back_prop(self): <NEW_LINE> <INDENT> raise NotImplementedError("Abstract method") <NEW_LINE> <DEDENT> def AddName(self, name): <NEW_LINE> <INDENT> self._values.add(name) <NEW_LINE> <DEDENT> def Enter(self): <NEW_LINE> <INDENT> graph = ops.get_default_graph() <NEW_LINE> self._context_stack.append(graph._get_control_flow_context()) <NEW_LINE> graph._set_control_flow_context(self) <NEW_LINE> <DEDENT> def Exit(self): <NEW_LINE> <INDENT> graph = ops.get_default_graph() <NEW_LINE> last_context = self._context_stack.pop() <NEW_LINE> graph._set_control_flow_context(last_context) <NEW_LINE> <DEDENT> def ExitResult(self, result): <NEW_LINE> <INDENT> if self._outer_context: <NEW_LINE> <INDENT> for x in result: <NEW_LINE> <INDENT> self._outer_context.AddName(x.name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def GetWhileContext(self): <NEW_LINE> <INDENT> if self._outer_context: <NEW_LINE> <INDENT> return self._outer_context.GetWhileContext() <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def _MaybeAddToWhileContext(self, op): <NEW_LINE> <INDENT> while_ctxt = self.GetWhileContext() <NEW_LINE> if while_ctxt is not None: <NEW_LINE> <INDENT> op._add_control_input(while_ctxt.GetControlPivot().op) <NEW_LINE> <DEDENT> <DEDENT> def _MaybeRemoveExternalControlEdges(self, op): <NEW_LINE> <INDENT> while_ctxt = self.GetWhileContext() <NEW_LINE> if while_ctxt is None: <NEW_LINE> <INDENT> internal_control_inputs = op.control_inputs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> internal_control_inputs = [] <NEW_LINE> for x in op.control_inputs: <NEW_LINE> <INDENT> ctxt = _GetOutputContext(x) <NEW_LINE> if ctxt is not None and ctxt.GetWhileContext() == while_ctxt: <NEW_LINE> <INDENT> internal_control_inputs.append(x) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(internal_control_inputs) != len(op.control_inputs): <NEW_LINE> <INDENT> del op.control_inputs[:] <NEW_LINE> op._add_control_inputs(internal_control_inputs) <NEW_LINE> <DEDENT> return internal_control_inputs | The base class for control flow context.
The usage pattern is a sequence of (Enter, Exit) followed by a final
ExitResult.
We maintain the following state for control flow contexts during graph
construction:
1. graph has _control_flow_context: the current context used to
construct new nodes. Changed by ctxt.Enter() and ctxt.Exit()
2. op has _control_flow_context: the context to which the op belongs.
Set at the time the op is created. Immutable.
3. A ControlFlowContext has _outer_context: the context in which this
context is created. Set at the time a context is created. Immutable.
4. A ControlFlowContext has _context_stack.
Pushed and popped by ctxt.Enter() and ctxt.Exit() | 6259908a99fddb7c1ca63bdd |
class LockAssetTestCase(AssetsTestCase): <NEW_LINE> <INDENT> def test_locking(self): <NEW_LINE> <INDENT> def verify_asset_locked_state(locked): <NEW_LINE> <INDENT> asset_location = StaticContent.get_location_from_path('/c4x/edX/toy/asset/sample_static.html') <NEW_LINE> content = contentstore().find(asset_location) <NEW_LINE> self.assertEqual(content.locked, locked) <NEW_LINE> <DEDENT> def post_asset_update(lock, course): <NEW_LINE> <INDENT> content_type = 'application/txt' <NEW_LINE> upload_date = datetime(2013, 6, 1, 10, 30, tzinfo=UTC) <NEW_LINE> asset_location = course.id.make_asset_key('asset', 'sample_static.html') <NEW_LINE> url = reverse_course_url( 'assets_handler', course.id, kwargs={'asset_key_string': str(asset_location)} ) <NEW_LINE> resp = self.client.post( url, json.dumps(assets._get_asset_json( "sample_static.html", content_type, upload_date, asset_location, None, lock)), "application/json" ) <NEW_LINE> self.assertEqual(resp.status_code, 201) <NEW_LINE> return json.loads(resp.content.decode('utf-8')) <NEW_LINE> <DEDENT> module_store = modulestore() <NEW_LINE> course_items = import_course_from_xml( module_store, self.user.id, TEST_DATA_DIR, ['toy'], static_content_store=contentstore(), verbose=True ) <NEW_LINE> course = course_items[0] <NEW_LINE> verify_asset_locked_state(False) <NEW_LINE> resp_asset = post_asset_update(True, course) <NEW_LINE> self.assertTrue(resp_asset['locked']) <NEW_LINE> verify_asset_locked_state(True) <NEW_LINE> resp_asset = post_asset_update(False, course) <NEW_LINE> self.assertFalse(resp_asset['locked']) <NEW_LINE> verify_asset_locked_state(False) | Unit test for locking and unlocking an asset. | 6259908aa05bb46b3848bf27 |
class Goolf(Gcc, OpenMPI, OpenBLAS, ScaLAPACK, Fftw): <NEW_LINE> <INDENT> NAME = 'goolf' <NEW_LINE> BLACS_MODULE_NAME = [] <NEW_LINE> BLACS_LIB = [] <NEW_LINE> BLACS_LIB_MT = [] | Compiler toolchain with GCC, OpenMPI, OpenBLAS, ScaLAPACK and FFTW. | 6259908a091ae35668706846 |
class BaseTestMockedCFABManager(base.BaseTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(BaseTestMockedCFABManager, self).setUp() <NEW_LINE> self.manager = cfabdriver._CFABManager() <NEW_LINE> self.manager.close_session = mock.MagicMock() <NEW_LINE> <DEDENT> def assert_wrote(self, lines): <NEW_LINE> <INDENT> telnet = self.manager._telnet <NEW_LINE> self.assertEqual( lines, [x[0][0] for x in telnet.write.call_args_list]) | Base class to test Fujitsu C-Fabric manager. | 6259908a23849d37ff852cbd |
class UpdateModelMixin(object): <NEW_LINE> <INDENT> def update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> partial = kwargs.pop('partial', False) <NEW_LINE> self.object = self.get_object_or_none() <NEW_LINE> if self.object is None: <NEW_LINE> <INDENT> created = True <NEW_LINE> save_kwargs = {'force_insert': True} <NEW_LINE> success_status_code = status.HTTP_201_CREATED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> created = False <NEW_LINE> save_kwargs = {'force_update': True} <NEW_LINE> success_status_code = status.HTTP_200_OK <NEW_LINE> <DEDENT> serializer = self.get_serializer(self.object, data=request.DATA, files=request.FILES, partial=partial) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.pre_save(serializer.object) <NEW_LINE> <DEDENT> except ValidationError as err: <NEW_LINE> <INDENT> return Response(err.message_dict, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> self.object = serializer.save(**save_kwargs) <NEW_LINE> self.post_save(self.object, created=created) <NEW_LINE> return Response(serializer.data, status=success_status_code) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def partial_update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> kwargs['partial'] = True <NEW_LINE> return self.update(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_object_or_none(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.get_object() <NEW_LINE> <DEDENT> except Http404: <NEW_LINE> <INDENT> if self.request.method == 'PUT': <NEW_LINE> <INDENT> self.check_permissions(clone_request(self.request, 'POST')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def pre_save(self, obj): <NEW_LINE> <INDENT> lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field <NEW_LINE> lookup = self.kwargs.get(lookup_url_kwarg, None) <NEW_LINE> pk = self.kwargs.get(self.pk_url_kwarg, None) <NEW_LINE> slug = self.kwargs.get(self.slug_url_kwarg, None) <NEW_LINE> slug_field = slug and self.slug_field or None <NEW_LINE> if lookup: <NEW_LINE> <INDENT> setattr(obj, self.lookup_field, lookup) <NEW_LINE> <DEDENT> if pk: <NEW_LINE> <INDENT> setattr(obj, 'pk', pk) <NEW_LINE> <DEDENT> if slug: <NEW_LINE> <INDENT> setattr(obj, slug_field, slug) <NEW_LINE> <DEDENT> if hasattr(obj, 'full_clean'): <NEW_LINE> <INDENT> exclude = _get_validation_exclusions(obj, pk, slug_field, self.lookup_field) <NEW_LINE> obj.full_clean(exclude) | Update a model instance. | 6259908aaad79263cf4303bd |
class Game: <NEW_LINE> <INDENT> def __init__(self, player, opponent_trainers, cities, potential_legendary_creatures): <NEW_LINE> <INDENT> self.player: Player = player <NEW_LINE> self.__opponent_trainers: list = opponent_trainers <NEW_LINE> self.__cities: list = cities <NEW_LINE> self.__potential_legendary_creatures: list = potential_legendary_creatures <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> res: str = "Player in the game:\n" + str(self.player) + "\n" <NEW_LINE> res += "Below is a list of opponent trainers in the game:\n" <NEW_LINE> for opponent_trainer in self.__opponent_trainers: <NEW_LINE> <INDENT> res += str(opponent_trainer) + "\n" <NEW_LINE> <DEDENT> res += "Maps of cities in the game:\n" <NEW_LINE> for city in self.__cities: <NEW_LINE> <INDENT> res += str(city) + "\n" <NEW_LINE> <DEDENT> res += "Below is a list of potential legendary creatures in this game:\n" <NEW_LINE> for legendary_creature in self.__potential_legendary_creatures: <NEW_LINE> <INDENT> res += str(legendary_creature) + "\n" <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def get_opponent_trainers(self): <NEW_LINE> <INDENT> return self.__opponent_trainers <NEW_LINE> <DEDENT> def get_cities(self): <NEW_LINE> <INDENT> return self.__cities <NEW_LINE> <DEDENT> def get_potential_legendary_creatures(self): <NEW_LINE> <INDENT> return self.__potential_legendary_creatures <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> return copy.deepcopy(self) | This class contains attributes of the saved game data. | 6259908a4c3428357761bebe |
class RevisionsFeed(DocumentsFeed): <NEW_LINE> <INDENT> title = _("MDN recent revisions") <NEW_LINE> subtitle = _("Recent revisions to MDN documents") <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return Revision.objects.order_by('-created')[:50] <NEW_LINE> <DEDENT> def item_title(self, item): <NEW_LINE> <INDENT> return "%s/%s" % (item.document.locale, item.document.full_path) <NEW_LINE> <DEDENT> def item_description(self, item): <NEW_LINE> <INDENT> previous = item.get_previous() <NEW_LINE> if previous is None: <NEW_LINE> <INDENT> return '<p>Created by: %s</p>' % item.creator.username <NEW_LINE> <DEDENT> by = '<p>Edited by: %s</p>' % item.creator.username <NEW_LINE> comment = '<p>Comment: %s</p>' % item.comment <NEW_LINE> diff = ("Diff:<blockquote>%s</blockquote>" % ( diff_inline(previous.content, item.content))) <NEW_LINE> diff = (diff.replace('<ins', '<ins style="background-color: #AAFFAA;text-decoration:none;"') .replace('<del', '<del style="background-color: #FFAAAA;text-decoration:none;"')) <NEW_LINE> link_cell = '<td><a href="%s">%s</a></td>' <NEW_LINE> view_cell = link_cell % (reverse('wiki.document', args=[item.document.full_path]), _('View Page')) <NEW_LINE> edit_cell = link_cell % (reverse('wiki.edit_document', args=[item.document.full_path]), _('Edit Page')) <NEW_LINE> compare_cell = link_cell % (reverse('wiki.compare_revisions', args=[item.document.full_path]) + '?' + urllib.urlencode({'from': previous.id, 'to': item.id}), _('Show comparison')) <NEW_LINE> history_cell = link_cell % (reverse('wiki.document_revisions', args=[item.document.full_path]), _('History')) <NEW_LINE> links_table = '<table border="0" width="80%">' <NEW_LINE> links_table = links_table + '<tr>%s%s%s%s</tr>' % (view_cell, edit_cell, compare_cell, history_cell) <NEW_LINE> links_table = links_table + '</table>' <NEW_LINE> description = "%s%s%s%s" % (by, comment, diff, links_table) <NEW_LINE> return description <NEW_LINE> <DEDENT> def item_link(self, item): <NEW_LINE> <INDENT> return reverse('wiki.document', args=[item.document.full_path]) <NEW_LINE> <DEDENT> def item_pubdate(self, item): <NEW_LINE> <INDENT> return item.created <NEW_LINE> <DEDENT> def item_author_name(self, item): <NEW_LINE> <INDENT> return '%s' % item.creator <NEW_LINE> <DEDENT> def item_author_link(self, item): <NEW_LINE> <INDENT> return self.request.build_absolute_uri( reverse('devmo.views.profile_view', args=(item.creator.username,))) <NEW_LINE> <DEDENT> def item_categories(self, item): <NEW_LINE> <INDENT> return [] | Feed of recent revisions | 6259908a7c178a314d78e9eb |
class IsOwner(permissions.BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> if isinstance(view,views.UserProfileViewSet) and request.method == "POST": <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif request.auth is not None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | This is the base permission class for all the requests so that they are authenticated | 6259908a5fc7496912d4906d |
class ProductionConfig(Config): <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> TESTING = False <NEW_LINE> DATABASE_URL = os.getenv("DATABASE_URL") | Production environment configurations | 6259908ad486a94d0ba2dbba |
class Meta: <NEW_LINE> <INDENT> model = Shelfbook <NEW_LINE> fields = ["id", "shelf", "book", "status"] | Meta of shelfbook serializer. | 6259908a7c178a314d78e9ec |
class FilterSection(BaseSection): <NEW_LINE> <INDENT> _fields = ['ДатаНачала', 'ДатаКонца', 'РасчСчет', 'Документ'] <NEW_LINE> _mandatory_fields = ['ДатаНачала', 'ДатаКонца', 'РасчСчет'] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> BaseSection.__init__(self) <NEW_LINE> for name in self._fields: <NEW_LINE> <INDENT> self.__dict__[name] = None <NEW_LINE> <DEDENT> self.__dict__['ДатаНачала'] = datetime.date.today() <NEW_LINE> self.__dict__['ДатаКонца'] = datetime.date.today() | Секция `Фильтров` | 6259908a63b5f9789fe86d6e |
class TriggerEfficiencyContainer(ComparisonData): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ComparisonData.__init__(self) <NEW_LINE> <DEDENT> def AddEfficiency(self, trclasstype, key, efficiencyCurve, style): <NEW_LINE> <INDENT> triggerdata = None <NEW_LINE> if trclasstype == "pthat": <NEW_LINE> <INDENT> triggerdata = TriggerEfficiencyClassPtHat(key, efficiencyCurve, style) <NEW_LINE> <DEDENT> elif trclasstype == "triggertype": <NEW_LINE> <INDENT> triggerdata = TriggerEfficiencyClassTriggerType(key, efficiencyCurve, style) <NEW_LINE> <DEDENT> self.AddEntry(triggerdata) | Underlying data structure for the comparison plot | 6259908abf627c535bcb30d9 |
class OptimizerCrab(Optimizer): <NEW_LINE> <INDENT> def reset(self): <NEW_LINE> <INDENT> Optimizer.reset(self) <NEW_LINE> self.id_text = 'CRAB' <NEW_LINE> self.num_optim_vars = 0 <NEW_LINE> <DEDENT> def init_optim(self, term_conds): <NEW_LINE> <INDENT> Optimizer.init_optim(self, term_conds) <NEW_LINE> dyn = self.dynamics <NEW_LINE> self.num_optim_vars = 0 <NEW_LINE> pulse_gen_valid = True <NEW_LINE> if self.pulse_generator is None: <NEW_LINE> <INDENT> pulse_gen_valid = False <NEW_LINE> err_msg = "pulse_generator attribute is None" <NEW_LINE> <DEDENT> elif not isinstance(self.pulse_generator, collections.Iterable): <NEW_LINE> <INDENT> pulse_gen_valid = False <NEW_LINE> err_msg = "pulse_generator is not iterable" <NEW_LINE> <DEDENT> elif len(self.pulse_generator) != dyn.get_num_ctrls(): <NEW_LINE> <INDENT> pulse_gen_valid = False <NEW_LINE> err_msg = ("the number of pulse generators {} does not equal " "the number of controls {}".format( len(self.pulse_generator), dyn.num_ctrls)) <NEW_LINE> <DEDENT> if pulse_gen_valid: <NEW_LINE> <INDENT> for p_gen in self.pulse_generator: <NEW_LINE> <INDENT> if not isinstance(p_gen, pulsegen.PulseGenCrab): <NEW_LINE> <INDENT> pulse_gen_valid = False <NEW_LINE> err_msg = ( "pulse_generator contained object of type '{}'".format( p_gen.__class__.__name__)) <NEW_LINE> break <NEW_LINE> <DEDENT> self.num_optim_vars += p_gen.num_optim_vars <NEW_LINE> <DEDENT> <DEDENT> if not pulse_gen_valid: <NEW_LINE> <INDENT> raise errors.UsageError( "The pulse_generator attribute must be set to a list of " "PulseGenCrab - one for each control. Here " + err_msg) <NEW_LINE> <DEDENT> <DEDENT> def _build_bounds_list(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def _get_optim_var_vals(self): <NEW_LINE> <INDENT> pvals = [] <NEW_LINE> for pgen in self.pulse_generator: <NEW_LINE> <INDENT> pvals.extend(pgen.get_optim_var_vals()) <NEW_LINE> <DEDENT> return np.array(pvals) <NEW_LINE> <DEDENT> def _get_ctrl_amps(self, optim_var_vals): <NEW_LINE> <INDENT> dyn = self.dynamics <NEW_LINE> if self.log_level <= logging.DEBUG: <NEW_LINE> <INDENT> changed_params = self.optim_var_vals != optim_var_vals <NEW_LINE> logger.debug( "{} out of {} optimisation parameters changed".format( changed_params.sum(), len(optim_var_vals))) <NEW_LINE> <DEDENT> amps = np.empty([dyn.num_tslots, dyn.num_ctrls]) <NEW_LINE> j = 0 <NEW_LINE> param_idx_st = 0 <NEW_LINE> for p_gen in self.pulse_generator: <NEW_LINE> <INDENT> param_idx_end = param_idx_st + p_gen.num_optim_vars <NEW_LINE> pg_pvals = optim_var_vals[param_idx_st:param_idx_end] <NEW_LINE> p_gen.set_optim_var_vals(pg_pvals) <NEW_LINE> amps[:, j] = p_gen.gen_pulse() <NEW_LINE> param_idx_st = param_idx_end <NEW_LINE> j += 1 <NEW_LINE> <DEDENT> self.optim_var_vals = optim_var_vals <NEW_LINE> return amps | Optimises the pulse using the CRAB algorithm [1].
It uses the scipy.optimize.minimize function with the method specified
by the optim_method attribute. See Optimizer.run_optimization for details
It minimises the fidelity error function with respect to the CRAB
basis function coefficients.
AJGP ToDo: Add citation here | 6259908aaad79263cf4303c1 |
class EngineKnowledgeComponent(EngineApiModelMixin, ApiModel): <NEW_LINE> <INDENT> model_name = 'knowledge_component' <NEW_LINE> lookup_field = 'kc_id' <NEW_LINE> def add_prerequisite_knowledge_component(self, prerequisite, connection_strength): <NEW_LINE> <INDENT> self.model.prerequisite_knowledge_components[prerequisite] = connection_strength <NEW_LINE> <DEDENT> def to_api_params(self): <NEW_LINE> <INDENT> return dict( name=self.model.name, kc_id=self.model.slug, mastery_prior=self.model.mastery_prior, ) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> qset = self.client.engine_api.request('GET', self.model_name, params={'kc_id':self.model.slug} ).json()['results'] <NEW_LINE> return qset[0] if len(qset)>0 else None <NEW_LINE> <DEDENT> def create(self): <NEW_LINE> <INDENT> for kc in self.model.prerequisite_knowledge_components: <NEW_LINE> <INDENT> if not kc.engine.id: <NEW_LINE> <INDENT> kc.engine.update() <NEW_LINE> <DEDENT> <DEDENT> return super().create() <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> for kc in self.model.prerequisite_knowledge_components: <NEW_LINE> <INDENT> if not kc.engine.id: <NEW_LINE> <INDENT> kc.engine.update() <NEW_LINE> <DEDENT> <DEDENT> return super().update() <NEW_LINE> <DEDENT> def push(self): <NEW_LINE> <INDENT> return self.update() | Knowledge component | 6259908a4527f215b58eb7a3 |
class NoMoreStepsError(Exception): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) | No more undos/redos. | 6259908a7c178a314d78e9ed |
class PodmanError(DockerException): <NEW_LINE> <INDENT> pass | Base class for PodmanPy exceptions. | 6259908a99fddb7c1ca63be0 |
class Group(): <NEW_LINE> <INDENT> def __init__(self, id, name, description, capabilities, required, editable): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.name = name <NEW_LINE> self.description = description <NEW_LINE> self.capabilities = capabilities <NEW_LINE> self.required = required <NEW_LINE> self.editable = editable <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{cls}(id={x.id!r}, name={x.name!r}, description={x.description!r}, capabilities={x.capabilities!r}, required={x.required!r}, editable={x.editable!r})'.format(cls=self.__class__.__name__, x=self) | Class that helps interact an existing group on the Log Insight server. | 6259908afff4ab517ebcf41e |
class ProjectSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> projectfiles = ProjectFileSerializer(many=True, read_only=True) <NEW_LINE> model = Project <NEW_LINE> fields = ('id', 'project_name', 'project_description', 'created_date','commandline','public','user', 'projectfiles') | ProjectSerializer allows easy validation of Project Submission Data | 6259908ad486a94d0ba2dbbe |
class LazyflowVectorwiseClassifierABC(with_metaclass(abc.ABCMeta, object)): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def predict_probabilities(self, X): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractproperty <NEW_LINE> def known_classes(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractproperty <NEW_LINE> def feature_count(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractproperty <NEW_LINE> def feature_names(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def __subclasshook__(cls, C): <NEW_LINE> <INDENT> if cls is LazyflowVectorwiseClassifierABC: <NEW_LINE> <INDENT> return _has_attributes(C, ["predict_probabilities", "known_classes", "serialize_hdf5", "deserialize_hdf5"]) <NEW_LINE> <DEDENT> return NotImplemented <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def serialize_hdf5(self, h5py_group): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def deserialize_hdf5(cls, h5py_group): <NEW_LINE> <INDENT> raise NotImplementedError | Defines an interface for "vector-wise" classifier objects that can be used by the lazyflow classifier operators.
A "vector-wise" classifier is trained with a 2D feature matrix and a 1D label vector.
All scikit-learn classifiers already satisfy this interface. | 6259908a97e22403b383cb00 |
class TEGettextExtractInterface(object): <NEW_LINE> <INDENT> name = None <NEW_LINE> exts = None <NEW_LINE> def __init__(self, **options): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setup_parser(self, parser): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def extract_keys(self): <NEW_LINE> <INDENT> raise NotImplementedError | Templating Engines Gettext Extract interface that needs to be inherited
by a templating engine to provide the extraction code. | 6259908a656771135c48ae35 |
@functools.total_ordering <NEW_LINE> class LineSet: <NEW_LINE> <INDENT> def __init__( self, name, lines, ignore_comments=False, ignore_docstrings=False, ignore_imports=False, ignore_signatures=False, ): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self._real_lines = lines <NEW_LINE> self._stripped_lines = stripped_lines( lines, ignore_comments, ignore_docstrings, ignore_imports, ignore_signatures ) <NEW_LINE> self._index = self._mk_index() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "<Lineset for %s>" % self.name <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._real_lines) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self._stripped_lines[index] <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.name < other.name <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return id(self) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, LineSet): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def enumerate_stripped(self, start_at=0): <NEW_LINE> <INDENT> idx = start_at <NEW_LINE> if start_at: <NEW_LINE> <INDENT> lines = self._stripped_lines[start_at:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lines = self._stripped_lines <NEW_LINE> <DEDENT> for line in lines: <NEW_LINE> <INDENT> yield idx, line <NEW_LINE> idx += 1 <NEW_LINE> <DEDENT> <DEDENT> def find(self, stripped_line): <NEW_LINE> <INDENT> return self._index.get(stripped_line, ()) <NEW_LINE> <DEDENT> def _mk_index(self): <NEW_LINE> <INDENT> index = defaultdict(list) <NEW_LINE> for line_no, line in enumerate(self._stripped_lines): <NEW_LINE> <INDENT> if line: <NEW_LINE> <INDENT> index[line].append(line_no) <NEW_LINE> <DEDENT> <DEDENT> return index | Holds and indexes all the lines of a single source file | 6259908a167d2b6e312b839c |
class ProcessStep(ModelBase): <NEW_LINE> <INDENT> __dump_attributes__ = ["type", "options"] <NEW_LINE> type = None <NEW_LINE> options = None <NEW_LINE> def __str__(self, *args, **kwargs): <NEW_LINE> <INDENT> return "ProcessStep Type: %s" % (self.type) | Object representing a process step | 6259908aa05bb46b3848bf2b |
class RenewalRequirementMissing(Exception): <NEW_LINE> <INDENT> pass | Gets raised when a OCSP renewal is run while not all requirements are met. | 6259908ae1aae11d1e7cf619 |
class UndefinedAttrDependencyError(Exception): <NEW_LINE> <INDENT> pass | Raised when no dependency could be found for a given attribute. | 6259908ad8ef3951e32c8c63 |
class RequiredIfNot(Required): <NEW_LINE> <INDENT> def __init__(self, other_field_name, *args, **kwargs): <NEW_LINE> <INDENT> self.other_field_name = other_field_name <NEW_LINE> super(RequiredIfNot, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def __call__(self, form, field): <NEW_LINE> <INDENT> other_field = form._fields.get(self.other_field_name) <NEW_LINE> other_field_set = other_field and bool(other_field.data) <NEW_LINE> field_set = field and bool(field.data) <NEW_LINE> if other_field_set == field_set: <NEW_LINE> <INDENT> raise ValidationError('Please use either {0} or {1}'.format( other_field.label.text, field.label.text )) | A validator which makes a field mutually exclusive with another | 6259908a71ff763f4b5e93b9 |
class ExpressBus(IVehicle): <NEW_LINE> <INDENT> def running(self): <NEW_LINE> <INDENT> print("坐快速公交(经济绿色)", end='') | 快速公交 | 6259908a4527f215b58eb7a5 |
class Stock(models.Model): <NEW_LINE> <INDENT> name = models.CharField( max_length=20, blank=True, null=True, verbose_name='Name') <NEW_LINE> avatar = models.ImageField( default='img/stocks/stock.png', verbose_name='Avatar', upload_to='img/stocks', null=True, blank=True) <NEW_LINE> cost = models.FloatField( default=0, blank=True, null=True, verbose_name='Cost') <NEW_LINE> dividend_income = models.FloatField( default=0, blank=True, null=True, verbose_name='Dividend income') <NEW_LINE> list_costs = models.TextField( default='[]', blank=False, null=True, verbose_name='List costs') <NEW_LINE> def json_to_string(self, json_value): <NEW_LINE> <INDENT> self.list_costs = json.dumps(json_value) <NEW_LINE> <DEDENT> def string_to_json(self): <NEW_LINE> <INDENT> return json.loads(self.list_costs) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'Stock' <NEW_LINE> verbose_name = 'Stock' <NEW_LINE> verbose_name_plural = 'Stocks' <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.name | Stock model. | 6259908aec188e330fdfa4b9 |
class Andand(object): <NEW_LINE> <INDENT> def __init__(self, item=None): <NEW_LINE> <INDENT> self.item = item <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = getattr(self.item, name) <NEW_LINE> return item if name is 'item' else Andand(item) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return Andand() <NEW_LINE> <DEDENT> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return self.item | A Ruby inspired null soaking object
Examples:
>>> kwargs = {'key': 'value'}
>>> kw = Objectify(kwargs)
>>> kw.key == 'value'
True
>>> Andand(kw).key.missing.undefined.item
>>> Andand(kw).key.missing.undefined() | 6259908a5fdd1c0f98e5fb82 |
class PseGru(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_dim=10, mlp1=[10, 32, 64], pooling='mean_std', mlp2=[132, 128], with_extra=True, extra_size=4, hidden_dim=128, mlp4=[128, 64, 32, 20], positions=None): <NEW_LINE> <INDENT> super(PseGru, self).__init__() <NEW_LINE> self.spatial_encoder = PixelSetEncoder(input_dim, mlp1=mlp1, pooling=pooling, mlp2=mlp2, with_extra=with_extra, extra_size=extra_size) <NEW_LINE> self.temporal_encoder = GRU(in_channels=mlp2[-1], hidden_dim=hidden_dim, positions=positions) <NEW_LINE> self.decoder = get_decoder(mlp4) <NEW_LINE> self.name = '_'.join([self.spatial_encoder.name, self.temporal_encoder.name]) <NEW_LINE> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> out = self.spatial_encoder(input) <NEW_LINE> out = self.temporal_encoder(out) <NEW_LINE> out = self.decoder(out) <NEW_LINE> return out <NEW_LINE> <DEDENT> def param_ratio(self): <NEW_LINE> <INDENT> total = get_ntrainparams(self) <NEW_LINE> s = get_ntrainparams(self.spatial_encoder) <NEW_LINE> t = get_ntrainparams(self.temporal_encoder) <NEW_LINE> c = get_ntrainparams(self.decoder) <NEW_LINE> print('TOTAL TRAINABLE PARAMETERS : {}'.format(total)) <NEW_LINE> print('RATIOS: Spatial {:5.1f}% , Temporal {:5.1f}% , Classifier {:5.1f}%'.format(s / total * 100, t / total * 100, c / total * 100)) <NEW_LINE> return total | Pixel-Set encoder + GRU | 6259908a7b180e01f3e49e6a |
class DescribeSubnetRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(DescribeSubnetRequest, self).__init__( '/regions/{regionId}/subnets/{subnetId}', 'GET', header, version) <NEW_LINE> self.parameters = parameters | 查询子网详情 | 6259908a656771135c48ae36 |
class WxMenu(Jsonable): <NEW_LINE> <INDENT> def __init__(self, dic): <NEW_LINE> <INDENT> self.items = [] <NEW_LINE> if dic is None or len(dic) == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if "menu" in dic.keys(): <NEW_LINE> <INDENT> buttons = dic["menu"]["button"] <NEW_LINE> <DEDENT> elif "button" in dic.keys(): <NEW_LINE> <INDENT> buttons = dic["button"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.warn("菜单初始化数据格式错误:找不到'button'键值!") <NEW_LINE> return <NEW_LINE> <DEDENT> for button in buttons: <NEW_LINE> <INDENT> if "type" in button.keys(): <NEW_LINE> <INDENT> menuclass = MENUITEM_TYPES[button["type"]] <NEW_LINE> menuitme = menuclass(button) <NEW_LINE> self.items.append(menuitme) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> submenu = MenuItem(button) <NEW_LINE> for subbutton in button["sub_button"]: <NEW_LINE> <INDENT> menuclass = MENUITEM_TYPES[subbutton["type"]] <NEW_LINE> menuitme = menuclass(subbutton) <NEW_LINE> submenu.add_submenuitem(menuitme) <NEW_LINE> <DEDENT> self.items.append(submenu) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def to_json(self, contains_menu=False): <NEW_LINE> <INDENT> if contains_menu: <NEW_LINE> <INDENT> template = '{"menu":{"button":%s}}' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> template = '{"button":%s}' <NEW_LINE> <DEDENT> return template % json.dumps(self.items, ensure_ascii=False, default=parse_menuitem) | 自定义菜单对象,
能够帮助公众号丰富界面,让用户更好更快地理解公众号的功能 | 6259908af9cc0f698b1c60d1 |
class MPILog(object): <NEW_LINE> <INDENT> def __init__(self, rank=0, log_dir='./logs'): <NEW_LINE> <INDENT> self.disabled = False <NEW_LINE> self.root_logger = logging.getLogger() <NEW_LINE> self.root_logger.setLevel(logging.WARNING) <NEW_LINE> if not os.path.exists(log_dir): <NEW_LINE> <INDENT> os.makedirs(log_dir, exist_ok=True) <NEW_LINE> <DEDENT> log_name = log_dir+'/worker-%.3d.log' % rank <NEW_LINE> log_format = logging.Formatter( "%(asctime)s %(name)-25s %(levelname)-9s %(message)s") <NEW_LINE> self.file_handler = logging.FileHandler(log_name, mode='w') <NEW_LINE> self.file_handler.setLevel(logging.WARNING) <NEW_LINE> self.file_handler.setFormatter(log_format) <NEW_LINE> self.root_logger.addHandler(self.file_handler) <NEW_LINE> logging.info("Initialized") <NEW_LINE> <DEDENT> def disable(self): <NEW_LINE> <INDENT> logging.info("Disable logging") <NEW_LINE> self.root_logger.disabled = True <NEW_LINE> self.disabled = True <NEW_LINE> <DEDENT> def debug(self, string): <NEW_LINE> <INDENT> if self.disabled == False: <NEW_LINE> <INDENT> logging.debug(string) <NEW_LINE> <DEDENT> <DEDENT> def info(self, string): <NEW_LINE> <INDENT> if self.disabled == False: <NEW_LINE> <INDENT> logging.info(string) <NEW_LINE> <DEDENT> <DEDENT> def critical(self, string): <NEW_LINE> <INDENT> if self.disabled == False: <NEW_LINE> <INDENT> logging.critical(string) | Class to log messages coming from other classes. Messages contain
{Time stamp} {Class name} {Log level} {Message}. Errors, warnings and info
are logged into the console. To disable logging, call Logger().disable()
Parameters
----------
debug : bool
Log DEBUG messages in 'debug.log'; default is False | 6259908a3346ee7daa338468 |
class UnknownProperties(results.VersionResult, results.Warning): <NEW_LINE> <INDENT> def __init__(self, properties, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.properties = tuple(properties) <NEW_LINE> <DEDENT> @property <NEW_LINE> def desc(self): <NEW_LINE> <INDENT> properties = ' '.join(self.properties) <NEW_LINE> return f'unknown PROPERTIES="{properties}"' | Package's PROPERTIES metadata has unknown entries. | 6259908a4a966d76dd5f0af2 |
class LED(Phidget): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Phidget.__init__(self) <NEW_LINE> try: <NEW_LINE> <INDENT> PhidgetLibrary.getDll().CPhidgetLED_create(byref(self.handle)) <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def getDiscreteLED(self, index): <NEW_LINE> <INDENT> ledVal = c_int() <NEW_LINE> try: <NEW_LINE> <INDENT> result = PhidgetLibrary.getDll().CPhidgetLED_getDiscreteLED(self.handle, c_int(index), byref(ledVal)) <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> if result > 0: <NEW_LINE> <INDENT> raise PhidgetException(result) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ledVal.value <NEW_LINE> <DEDENT> <DEDENT> def setDiscreteLED(self, index, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = PhidgetLibrary.getDll().CPhidgetLED_setDiscreteLED(self.handle, c_int(index), c_int(value)) <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> if result > 0: <NEW_LINE> <INDENT> raise PhidgetException(result) <NEW_LINE> <DEDENT> <DEDENT> def getLEDCount(self): <NEW_LINE> <INDENT> LEDCount = c_int() <NEW_LINE> try: <NEW_LINE> <INDENT> result = PhidgetLibrary.getDll().CPhidgetLED_getLEDCount(self.handle, byref(LEDCount)) <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> if result > 0: <NEW_LINE> <INDENT> raise PhidgetException(result) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return LEDCount.value | This class represents a Phidget LED. All methods to control a Phidget LED are implemented in this class.
The Phidget LED is a board that is meant for driving LEDs. Currently, the only available version drives 64 LEDs, but other versions may become available so this number is not absolute.
LEDs can be controlled individually, at brightness levels from 0-100.
Extends:
Phidget | 6259908abf627c535bcb30df |
class Kubectl: <NEW_LINE> <INDENT> def create_namespace(self, namespace, timeout=30): <NEW_LINE> <INDENT> cmd = ["kubectl", "create", "namespace", namespace] <NEW_LINE> return subprocess.run(cmd, timeout=timeout, check=True, shell=False, text=True) <NEW_LINE> <DEDENT> def get_pods(self, namespace=None, timeout=30): <NEW_LINE> <INDENT> cmd = [ "kubectl", "get", "po", ] <NEW_LINE> if namespace: <NEW_LINE> <INDENT> cmd.append(f"-n {namespace}") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cmd.append("-A") <NEW_LINE> <DEDENT> return subprocess.run(cmd, timeout=timeout, check=True, shell=False, text=True) <NEW_LINE> <DEDENT> def apply(self, file, sync=False, timeout=10): <NEW_LINE> <INDENT> with tempfile.NamedTemporaryFile(mode="w") as tmp_app: <NEW_LINE> <INDENT> with open(file, "r") as app: <NEW_LINE> <INDENT> contents = yaml.safe_load(app) <NEW_LINE> <DEDENT> if not sync: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> del contents["spec"]["syncPolicy"]["automated"] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> yaml.dump(contents, tmp_app) <NEW_LINE> cmd = [ "kubectl", "apply", "-f", f"{tmp_app.name}", ] <NEW_LINE> return subprocess.run( cmd, timeout=timeout, check=True, shell=False, text=True, ) | Kubectl cli wrapper | 6259908a091ae35668706850 |
class _ApiOfficial(ApiBase): <NEW_LINE> <INDENT> channel = Api.OFFICIAL <NEW_LINE> def __init__(self, entry_point: Any) -> None: <NEW_LINE> <INDENT> logging.warning('Using official version!') <NEW_LINE> self.streaming_api = entry_point.GetStreamFilter <NEW_LINE> <DEDENT> def get_tweets(self, where: List, lang: List=['en'], pages: int=25) -> Iterator[str]: <NEW_LINE> <INDENT> for tweet in self.streaming_api(locations=where, languages=lang): <NEW_LINE> <INDENT> yield tweet['text'] <NEW_LINE> <DEDENT> <DEDENT> def get_tweets_map(self, where: List, lang: List=['en'], pages: int=25) -> Iterator[dict]: <NEW_LINE> <INDENT> for tweet in self.streaming_api(locations=where, languages=lang): <NEW_LINE> <INDENT> MAP_RESPONSE['tweet'] = tweet['text'] <NEW_LINE> polygon = Polygon(tweet['place']['bounding_box']['coordinates'][0]) <NEW_LINE> centroid = polygon.representative_point().coords[0] <NEW_LINE> MAP_RESPONSE['geometry']['coordinates'] = list(centroid)[::-1] <NEW_LINE> yield MAP_RESPONSE | Official Twitter API | 6259908aaad79263cf4303c7 |
class Agent(NeutronAPIDictWrapper): <NEW_LINE> <INDENT> def __init__(self, apiresource): <NEW_LINE> <INDENT> _init_apiresource(apiresource) <NEW_LINE> super(Agent, self).__init__(apiresource) | Wrapper for neutron agents. | 6259908badb09d7d5dc0c169 |
class Slate(System): <NEW_LINE> <INDENT> def __init__(self, source_dir=None, database=None, pattern=r'sla_([\d]{6})\.csv'): <NEW_LINE> <INDENT> super().__init__(source_dir, database, pattern) <NEW_LINE> self.create_table() <NEW_LINE> <DEDENT> def create_table(self): <NEW_LINE> <INDENT> connection = sqlite3.connect(self.database) <NEW_LINE> connection.execute(Query.create_slate) <NEW_LINE> connection.commit() <NEW_LINE> connection.close() <NEW_LINE> <DEDENT> def insert_one_file(self, file_path=None, db_connect=None, date=None): <NEW_LINE> <INDENT> if not db_connect: <NEW_LINE> <INDENT> raise ConnectionError('No connection of database provided!') <NEW_LINE> <DEDENT> cursor = db_connect.cursor() <NEW_LINE> for cwid, first, middle, last, username, email, dcisionfn, prnd, defer, dcisionln, pchange, special in file_reading_gen(file_path, 12, header=True): <NEW_LINE> <INDENT> data = (cwid, to_upper(first), to_upper(middle), to_upper(last), first, middle, last, email, username, date if date else 'undated') <NEW_LINE> cursor.execute(Query.insert_slate, data) <NEW_LINE> db_connect.commit() <NEW_LINE> <DEDENT> <DEDENT> def print_count(self): <NEW_LINE> <INDENT> connection = sqlite3.connect(self.database) <NEW_LINE> cursor = connection.cursor() <NEW_LINE> pp_lst = list() <NEW_LINE> for row in cursor.execute(Query.summary_slate): <NEW_LINE> <INDENT> pp_lst.append(row) <NEW_LINE> <DEDENT> print('\nSlate information:') <NEW_LINE> print(tabulate(pp_lst, headers=['received_date', 'count'], showindex='always', tablefmt='fancy_grid')) <NEW_LINE> connection.close() | Slate is responsible for generating new Stevens identifications for newly admitted students
of Steven. There are two general types of students, brand new students and continuing students.
However there are some continuing students forget to imply that they are former students of
Stevens and Slate generate a set of new Stevens ID for them. | 6259908b97e22403b383cb06 |
class ResultsSet(): <NEW_LINE> <INDENT> def __init__( self, client, handle, input_type=None, output_type=None, query=None): <NEW_LINE> <INDENT> self.client = client <NEW_LINE> self.handle = handle <NEW_LINE> self.input_type = input_type <NEW_LINE> self.output_type = output_type <NEW_LINE> self.query = query <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ( f'<{_class_name(self.output_type)} ' f'base_url={self.client.base_url} handle={self.handle}>') <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.client.set_count(self.handle, self.output_type) <NEW_LINE> <DEDENT> def __or__(self, other_set): <NEW_LINE> <INDENT> return self._operation(other_set, self.client.set_union) <NEW_LINE> <DEDENT> def __and__(self, other_set): <NEW_LINE> <INDENT> return self._operation(other_set, self.client.set_intersection) <NEW_LINE> <DEDENT> def __sub__(self, other_set): <NEW_LINE> <INDENT> return self._operation(other_set, self.client.set_difference) <NEW_LINE> <DEDENT> def _operation(self, other_set, method): <NEW_LINE> <INDENT> new_handle = method(self.handle, other_set.handle, self.output_type) <NEW_LINE> return ResultsSet( self.client, new_handle, input_type=self.input_type, output_type=self.output_type, query=self.query) <NEW_LINE> <DEDENT> def get_list(self, values_included=[], sort_by=None): <NEW_LINE> <INDENT> return ResultsList( results_set=self, values_included=values_included, sort_by=sort_by) | Instances of ResultsSet subclasses can be combined with set operators,
and then prepared for evaluation by calling get_list(),
which returns a ResultsList. | 6259908bd8ef3951e32c8c65 |
class GameLogicError(Error): <NEW_LINE> <INDENT> rc = 11 <NEW_LINE> def __init__(self, msg_key, msg_value=""): <NEW_LINE> <INDENT> self.msg = utils.get_msg(msg_key, msg_value) or msg_key | 游戏逻辑错误异常
在游戏逻辑处理中发现逻辑不通时,抛出此异常
Attributes:
msg: 具体给前端显示的错误信息
Args:
msg_key: 出错所属计算逻辑类别
msg_value: 此类别类别的具体出错原因 | 6259908b167d2b6e312b839f |
class EDTestCasePluginExecuteExecGnomv0_2_list(EDTestCasePluginExecute): <NEW_LINE> <INDENT> def __init__(self, _strTestName=None): <NEW_LINE> <INDENT> EDTestCasePluginExecute.__init__(self, "EDPluginExecGnomv0_2") <NEW_LINE> self.setConfigurationFile(self.getRefConfigFile()) <NEW_LINE> self.setDataInputFile(os.path.join(self.getPluginTestsDataHome(), "XSDataInputGnom_reference_list.xml")) <NEW_LINE> self.setReferenceDataOutputFile(os.path.join(self.getPluginTestsDataHome(), "XSDataResultGnom_reference.xml")) <NEW_LINE> <DEDENT> @timeit <NEW_LINE> def testExecute(self): <NEW_LINE> <INDENT> self.run() <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> self.addTestMethod(self.testExecute) <NEW_LINE> <DEDENT> def postProcess(self): <NEW_LINE> <INDENT> self.plugin.plotFittingResults() | Those are all execution tests for the EDNA Exec plugin Gnomv0_2 | 6259908b99fddb7c1ca63be4 |
class AutoGraderException(Exception): <NEW_LINE> <INDENT> pass | Base class for exceptions in this module | 6259908bfff4ab517ebcf426 |
class ShowChassisFirmwareNoForwarding(ShowChassisFirmware): <NEW_LINE> <INDENT> cli_command = [ 'show chassis firmware no-forwarding' ] <NEW_LINE> def cli(self, output=None): <NEW_LINE> <INDENT> if not output: <NEW_LINE> <INDENT> out = self.device.execute(self.cli_command[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out = output <NEW_LINE> <DEDENT> return super().cli(output=out) | Parser for:
- show chassis firmware no-forwarding | 6259908bbf627c535bcb30e3 |
class Identifier(CIStr): <NEW_LINE> <INDENT> def __new__(cls, value): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> raise ValueError("PostgreSQL identifiers cannot be blank") <NEW_LINE> <DEDENT> if not Identifier._re_chk.match(value): <NEW_LINE> <INDENT> value = '"%s"' % value.replace('"', '""') <NEW_LINE> <DEDENT> return CIStr.__new__(cls, value) <NEW_LINE> <DEDENT> _re_chk = re.compile(r'^[a-z_][a-z0-9_\$]*$', re.IGNORECASE) <NEW_LINE> @classmethod <NEW_LINE> def parse_arg(self, s): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return Identifier(s) <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> raise ArgumentTypeError(e) | A string modeling a PostgreSQL identifier. | 6259908b5fcc89381b266f66 |
class PvmSEAMechanismDriver(mech_pvm_base.PvmMechanismDriverBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(PvmSEAMechanismDriver, self).__init__(pconst.AGENT_TYPE_PVM_SEA, pconst.VIF_TYPE_PVM_SEA) <NEW_LINE> <DEDENT> def try_to_bind_segment_for_agent(self, context, segment, agent): <NEW_LINE> <INDENT> bindable = (super(PvmSEAMechanismDriver, self). try_to_bind_segment_for_agent(context, segment, agent)) <NEW_LINE> if bindable: <NEW_LINE> <INDENT> self.rpc_publisher.port_update(context._plugin_context, context._port, segment[api.NETWORK_TYPE], segment[api.SEGMENTATION_ID], segment[api.PHYSICAL_NETWORK]) <NEW_LINE> <DEDENT> return bindable <NEW_LINE> <DEDENT> def get_allowed_network_types(self, agent=None): <NEW_LINE> <INDENT> return [p_constants.TYPE_VLAN] | Attach to networks using PowerVM Shared Ethernet agent.
The PvmSEAMechanismDriver integrates the ml2 plugin with the
PowerVM Shared Ethernet Agent. | 6259908bd486a94d0ba2dbc5 |
class TextMessage: <NEW_LINE> <INDENT> notification_message_max_len = 12 <NEW_LINE> def __init__(self, sent_by, time, content): <NEW_LINE> <INDENT> self.sent_by = sent_by <NEW_LINE> self.time = time <NEW_LINE> self.content = content <NEW_LINE> <DEDENT> def notification(self): <NEW_LINE> <INDENT> noti_string = "{}, {}\n".format(self.sent_by, self.time) <NEW_LINE> noti_string += self.content if len(self.content) <= TextMessage.notification_message_max_len else self.content[:TextMessage.notification_message_max_len] + "..." <NEW_LINE> return noti_string | 문자 메시지 클래스 | 6259908b4527f215b58eb7a8 |
class Deck(CardCollection): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Deck, self).__init__() <NEW_LINE> for rank in range(1, 105): <NEW_LINE> <INDENT> self.add(Card(rank)) | Initialize Deck and fill with Cards. | 6259908b5fdd1c0f98e5fb88 |
class FlowControl(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, mks_instance, mfcs, devices, name): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.mfcs = mfcs <NEW_LINE> self.mks = mks_instance <NEW_LINE> self.pullsocket = DateDataPullSocket(name, devices, timeouts=3.0, port=9000) <NEW_LINE> self.pullsocket.start() <NEW_LINE> self.pushsocket = DataPushSocket(name, action='enqueue') <NEW_LINE> self.pushsocket.start() <NEW_LINE> self.livesocket = LiveSocket(name, devices) <NEW_LINE> self.livesocket.start() <NEW_LINE> self.running = True <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while self.running: <NEW_LINE> <INDENT> time.sleep(0.1) <NEW_LINE> qsize = self.pushsocket.queue.qsize() <NEW_LINE> while qsize > 0: <NEW_LINE> <INDENT> element = self.pushsocket.queue.get() <NEW_LINE> mfc = list(element.keys())[0] <NEW_LINE> print(element[mfc]) <NEW_LINE> print('Queue: ' + str(qsize)) <NEW_LINE> self.mks.set_flow(element[mfc], self.mfcs[mfc]) <NEW_LINE> qsize = self.pushsocket.queue.qsize() <NEW_LINE> <DEDENT> for mfc in self.mfcs: <NEW_LINE> <INDENT> print('!!!') <NEW_LINE> flow = self.mks.read_flow(self.mfcs[mfc]) <NEW_LINE> print(mfc + ': ' + str(flow)) <NEW_LINE> self.pullsocket.set_point_now(mfc, flow) <NEW_LINE> self.livesocket.set_point_now(mfc, flow) | Keep updated values of the current flow | 6259908b283ffb24f3cf54b2 |
class ShippingAddressUpdateView(UpdateView): <NEW_LINE> <INDENT> model = ShippingAddress <NEW_LINE> context_object_name = 'address' <NEW_LINE> template_name = 'dashboard/orders/shippingaddress_form.html' <NEW_LINE> form_class = forms.ShippingAddressForm <NEW_LINE> def get_object(self, queryset=None): <NEW_LINE> <INDENT> order = get_order_for_user_or_404(self.request.user, self.kwargs['number']) <NEW_LINE> return get_object_or_404(self.model, order=order) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> ctx = super(ShippingAddressUpdateView, self).get_context_data(**kwargs) <NEW_LINE> ctx['order'] = self.object.order <NEW_LINE> return ctx <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> old_address = ShippingAddress.objects.get(id=self.object.id) <NEW_LINE> response = super(ShippingAddressUpdateView, self).form_valid(form) <NEW_LINE> changes = get_change_summary(old_address, self.object) <NEW_LINE> if changes: <NEW_LINE> <INDENT> msg = _("Delivery address updated:\n%s") % changes <NEW_LINE> self.object.order.notes.create(user=self.request.user, message=msg, note_type=OrderNote.SYSTEM) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> messages.info(self.request, _("Delivery address updated")) <NEW_LINE> return reverse('dashboard:order-detail', kwargs={'number': self.object.order.number, }) | Dashboard view to update an order's shipping address.
Supports the permission-based dashboard. | 6259908b4a966d76dd5f0af8 |
class ReportMagneticField(ReportVariableTask): <NEW_LINE> <INDENT> title = "Magnetic Field" <NEW_LINE> _minimum_time_between_samples = 0.3 <NEW_LINE> def __init__(self, gauge: Lakeshore475, store: Store) -> None: <NEW_LINE> <INDENT> super(ReportMagneticField, self).__init__(store) <NEW_LINE> self.gauge = gauge <NEW_LINE> <DEDENT> @property <NEW_LINE> def variable_type(self): <NEW_LINE> <INDENT> return MagneticField <NEW_LINE> <DEDENT> @property <NEW_LINE> def variable(self) -> Quantity: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._wait_for_minimum_time() <NEW_LINE> return self.gauge.field <NEW_LINE> <DEDENT> except NoEchoedCommandFoundError: <NEW_LINE> <INDENT> return nan * gauss <NEW_LINE> <DEDENT> <DEDENT> def _wait_for_minimum_time(self): <NEW_LINE> <INDENT> sleep(self._minimum_time_between_samples) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return "{0}(gauge={1}, store={2})".format( self.__class__.__name__, self.gauge, self.store ) | Implements a task to return the magnetic field | 6259908b63b5f9789fe86d7c |
class DossierSource(_DossierBase): <NEW_LINE> <INDENT> def reader(self, chemin_relatif): <NEW_LINE> <INDENT> return csv.reader(self._open(chemin_relatif, "r"), delimiter=self.delimiteur, quotechar=self.quotechar) <NEW_LINE> <DEDENT> def DictReader(self, chemin_relatif): <NEW_LINE> <INDENT> return csv.DictReader(self._open(chemin_relatif, "r"), delimiter=self.delimiteur, quotechar=self.quotechar) <NEW_LINE> <DEDENT> def lire(self, chemin_relatif): <NEW_LINE> <INDENT> return self._open(chemin_relatif, "rb").read() | Source de données.
Une instance représente un répertoire de données source avec tous les réglages
idoines du parseur (délimiteur, format de caractères). La méthode reader()
permet d'ouvrir un CSV par nom relatif. | 6259908b167d2b6e312b83a1 |
class Generator: <NEW_LINE> <INDENT> def __init__(self, template_dir, template_name, context=None): <NEW_LINE> <INDENT> self.template_dir = template_dir <NEW_LINE> self.template_path = os.path.join(template_dir, template_name + '.docx') <NEW_LINE> self.document_path_doc = os.path.join(settings.MEDIA_ROOT, template_name + '.docx') <NEW_LINE> self.document_path_pdf = os.path.join(settings.MEDIA_ROOT, template_name + '.pdf') <NEW_LINE> self.context = context <NEW_LINE> self.jinja_env = jinja2.Environment() <NEW_LINE> self.set_jinja_filters() <NEW_LINE> self.create() <NEW_LINE> <DEDENT> def set_jinja_filters(self): <NEW_LINE> <INDENT> self.jinja_env.filters['f'] = lambda a: a[:len(a) // 2 if not len(a) % 2 else len(a) // 2 + 1 ] <NEW_LINE> self.jinja_env.filters['l'] = lambda a: a[ len(a) // 2 if not len(a) % 2 else len(a) // 2 + 1:] <NEW_LINE> self.jinja_env.filters['s'] = lambda a: ', '.join(a) <NEW_LINE> <DEDENT> def generate_word(self): <NEW_LINE> <INDENT> doc = DocxTemplate(self.template_path) <NEW_LINE> doc.render(self.context, jinja_env=self.jinja_env) <NEW_LINE> doc.save(self.document_path_doc) <NEW_LINE> <DEDENT> def generate_pdf(self): <NEW_LINE> <INDENT> Popen([ settings.LIBREOFFICE_CALL, '--headless', '--convert-to', 'pdf', self.document_path_doc, '--outdir', settings.MEDIA_ROOT ]).communicate() <NEW_LINE> <DEDENT> def create(self): <NEW_LINE> <INDENT> self.generate_word() <NEW_LINE> self.generate_pdf() <NEW_LINE> return self | Class to create Word-documents from templates and incoming data | 6259908ba8370b77170f1fe0 |
class HueBridge(object): <NEW_LINE> <INDENT> def __init__(self, host, hass, filename, allow_unreachable=False, allow_in_emulated_hue=True, allow_hue_groups=True): <NEW_LINE> <INDENT> self.host = host <NEW_LINE> self.hass = hass <NEW_LINE> self.filename = filename <NEW_LINE> self.allow_unreachable = allow_unreachable <NEW_LINE> self.allow_in_emulated_hue = allow_in_emulated_hue <NEW_LINE> self.allow_hue_groups = allow_hue_groups <NEW_LINE> self.bridge = None <NEW_LINE> self.configured = False <NEW_LINE> self.config_request_id = None <NEW_LINE> hass.data[DOMAIN][socket.gethostbyname(host)] = self <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> import phue <NEW_LINE> try: <NEW_LINE> <INDENT> self.bridge = phue.Bridge( self.host, config_file_path=self.hass.config.path(self.filename)) <NEW_LINE> <DEDENT> except ConnectionRefusedError: <NEW_LINE> <INDENT> _LOGGER.error("Error connecting to the Hue bridge at %s", self.host) <NEW_LINE> return <NEW_LINE> <DEDENT> except phue.PhueRegistrationException: <NEW_LINE> <INDENT> _LOGGER.warning("Connected to Hue at %s but not registered.", self.host) <NEW_LINE> self.request_configuration() <NEW_LINE> return <NEW_LINE> <DEDENT> if self.config_request_id: <NEW_LINE> <INDENT> request_id = self.config_request_id <NEW_LINE> self.config_request_id = None <NEW_LINE> configurator = self.hass.components.configurator <NEW_LINE> configurator.request_done(request_id) <NEW_LINE> <DEDENT> self.configured = True <NEW_LINE> discovery.load_platform( self.hass, 'light', DOMAIN, {'bridge_id': socket.gethostbyname(self.host)}) <NEW_LINE> def hue_activate_scene(call): <NEW_LINE> <INDENT> group_name = call.data[ATTR_GROUP_NAME] <NEW_LINE> scene_name = call.data[ATTR_SCENE_NAME] <NEW_LINE> self.bridge.run_scene(group_name, scene_name) <NEW_LINE> <DEDENT> descriptions = load_yaml_config_file( os.path.join(os.path.dirname(__file__), 'services.yaml')) <NEW_LINE> self.hass.services.register( DOMAIN, SERVICE_HUE_SCENE, hue_activate_scene, descriptions.get(SERVICE_HUE_SCENE), schema=SCENE_SCHEMA) <NEW_LINE> <DEDENT> def request_configuration(self): <NEW_LINE> <INDENT> configurator = self.hass.components.configurator <NEW_LINE> if self.config_request_id: <NEW_LINE> <INDENT> configurator.notify_errors( self.config_request_id, "Failed to register, please try again.") <NEW_LINE> return <NEW_LINE> <DEDENT> self.config_request_id = configurator.request_config( "Philips Hue", lambda data: self.setup(), description=CONFIG_INSTRUCTIONS, entity_picture="/static/images/logo_philips_hue.png", submit_caption="I have pressed the button" ) <NEW_LINE> <DEDENT> def get_api(self): <NEW_LINE> <INDENT> return self.bridge.get_api() <NEW_LINE> <DEDENT> def set_light(self, light_id, command): <NEW_LINE> <INDENT> return self.bridge.set_light(light_id, command) <NEW_LINE> <DEDENT> def set_group(self, light_id, command): <NEW_LINE> <INDENT> return self.bridge.set_group(light_id, command) | Manages a single Hue bridge. | 6259908b23849d37ff852ccf |
class UserRegisterForm(UserCreationForm): <NEW_LINE> <INDENT> email = forms.EmailField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ['username', 'email', 'password1', 'password2'] | Form which adds the Email field to the sign up process | 6259908b63b5f9789fe86d7e |
class Story(models.Model): <NEW_LINE> <INDENT> title = models.CharField( max_length=90, help_text='The title of the survey.' ) <NEW_LINE> is_public = models.BooleanField( default=True, help_text='Determines if the survey is public for users to take.' ) <NEW_LINE> start = models.TextField( blank=True, help_text='The beginning of a story that will be provided to the user.' ) <NEW_LINE> end = models.TextField( blank=True, help_text='The end of a story that will be provided to the user.' ) <NEW_LINE> public = PublicStorySurveyQuerySet.as_manager() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title | A database model representing a survey. | 6259908bd8ef3951e32c8c68 |
class BciCompetitionDataset(BaseDataset): <NEW_LINE> <INDENT> def __init__(self, dataset_md=None, **kwargs): <NEW_LINE> <INDENT> super(BciCompetitionDataset, self).__init__(dataset_md=dataset_md) <NEW_LINE> self.data_directory = dataset_md["dataset_directory"] <NEW_LINE> self.file_path = dataset_md["mat_file"] <NEW_LINE> self.dataset_md = dataset_md <NEW_LINE> <DEDENT> def store(self, result_dir): <NEW_LINE> <INDENT> raise NotImplementedError("Storing of BciCompetitionDataset is currently not supported!") <NEW_LINE> <DEDENT> def get_data(self, run_nr, split_nr, train_test): <NEW_LINE> <INDENT> filepath = self.data_directory + os.path.sep + self.file_path <NEW_LINE> data = scipy.io.loadmat(filepath) <NEW_LINE> signal = data['Signal'] <NEW_LINE> flashing = data['Flashing'] <NEW_LINE> stimulus_code = data['StimulusCode'] <NEW_LINE> stimulus_type = data['StimulusType'] <NEW_LINE> target_char = data['TargetChar'] <NEW_LINE> window = 240 <NEW_LINE> channels = 64 <NEW_LINE> epochs = signal.shape[0] <NEW_LINE> data_collection = [] <NEW_LINE> responses = numpy.zeros((12, 15, window, channels)) <NEW_LINE> for epoch in range(epochs): <NEW_LINE> <INDENT> counter = 0 <NEW_LINE> rowcolcnt=numpy.ones(12) <NEW_LINE> for n in range(1, signal.shape[1]): <NEW_LINE> <INDENT> if (flashing[epoch,n]==0 and flashing[epoch,n-1]==1): <NEW_LINE> <INDENT> rowcol=stimulus_code[epoch,n-1] <NEW_LINE> responses[rowcol-1,rowcolcnt[rowcol-1]-1,:,:]=signal[epoch,n-24:n+window-24,:] <NEW_LINE> rowcolcnt[rowcol-1]=rowcolcnt[rowcol-1]+1 <NEW_LINE> <DEDENT> <DEDENT> avgresp=numpy.mean(responses,1) <NEW_LINE> targets = stimulus_code[epoch,:]*stimulus_type[epoch,:] <NEW_LINE> target_rowcol = [] <NEW_LINE> for value in targets: <NEW_LINE> <INDENT> if value not in target_rowcol: <NEW_LINE> <INDENT> target_rowcol.append(value) <NEW_LINE> <DEDENT> <DEDENT> target_rowcol.sort() <NEW_LINE> for i in range(avgresp.shape[0]): <NEW_LINE> <INDENT> temp = avgresp[i,:,:] <NEW_LINE> data = TimeSeries(input_array = temp, channel_names = range(64), sampling_frequency = window) <NEW_LINE> if i == target_rowcol[1]-1 or i == target_rowcol[2]-1: <NEW_LINE> <INDENT> data_collection.append((data,"Target")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data_collection.append((data,"Standard")) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return data_collection | Class for reading the Berlin BrainComputerInterface-competition data
This module contains a class (*BciCompetitionDataset*) that encapsulates
most relevant code to use the data from the BCI competition.
Currently, only reading of the data is supported. | 6259908b656771135c48ae3b |
class RestAPIContext(object): <NEW_LINE> <INDENT> def __init__(self, _domain, _model_path, **kwargs): <NEW_LINE> <INDENT> self.kwargs = kwargs <NEW_LINE> self.kwargs['http_domain'] = _domain <NEW_LINE> self.domain = _domain <NEW_LINE> self.model_path = _model_path <NEW_LINE> <DEDENT> @property <NEW_LINE> def context(self): <NEW_LINE> <INDENT> _ = { "base": {}, "context": {} } <NEW_LINE> _['base'] = self.kwargs <NEW_LINE> import codecs <NEW_LINE> with codecs.open(self.model_path, 'rb', 'utf-8') as fr: <NEW_LINE> <INDENT> _["context"] = json.loads(fr.read()) <NEW_LINE> <DEDENT> methods = [] <NEW_LINE> permissions = _["context"].get("permissions", []) <NEW_LINE> if type(_['context']['method']) in [dict, ]: <NEW_LINE> <INDENT> for http_method, http_desc in _['context']['method'].items(): <NEW_LINE> <INDENT> http_desc["method"] = http_method <NEW_LINE> methods.append(http_desc) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> methods = _['context']['method'] or _['context']['endpoints'] <NEW_LINE> <DEDENT> for http_desc in methods: <NEW_LINE> <INDENT> http_method = http_desc['method'] <NEW_LINE> if http_desc.get("list"): <NEW_LINE> <INDENT> http_desc['response'] = json.dumps([_["context"]['model'], ], indent=4) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> http_desc['response'] = json.dumps(_["context"]['model'], indent=4) <NEW_LINE> <DEDENT> if http_method.upper() in ['POST', 'PUT']: <NEW_LINE> <INDENT> http_desc['request'] = json.dumps(_["context"]['model'], indent=4) <NEW_LINE> <DEDENT> if not http_desc.get("codes"): <NEW_LINE> <INDENT> http_desc['codes'] = {} <NEW_LINE> http_desc['codes'].update(_['base']['global_codes']) <NEW_LINE> <DEDENT> if not http_desc.get("headers"): <NEW_LINE> <INDENT> http_desc['headers'] = {} <NEW_LINE> http_desc['headers'].update(_['base']['global_headers']) <NEW_LINE> <DEDENT> if not http_desc.get("params"): <NEW_LINE> <INDENT> http_desc['params'] = {} <NEW_LINE> <DEDENT> http_desc['permissions'] = http_desc.get("permissions", []) + permissions <NEW_LINE> <DEDENT> _['context']['method'] = methods <NEW_LINE> _['context']['endpoints'] = methods <NEW_LINE> return _ <NEW_LINE> <DEDENT> def get_rst_content(self): <NEW_LINE> <INDENT> templateLoader = FileSystemLoader(searchpath=DIRPATH) <NEW_LINE> method_map = {"detail": "get"} <NEW_LINE> templateEnv = Environment(loader=templateLoader, lstrip_blocks=True, trim_blocks=True) <NEW_LINE> templateEnv.filters['method_wrapper'] = lambda x: method_map.get(x, x) <NEW_LINE> TEMPLATE_FILE = "api.jinja2.1.txt" <NEW_LINE> template = templateEnv.get_template(TEMPLATE_FILE) <NEW_LINE> return template.render(**self.context) | 6259908baad79263cf4303d0 |
|
class ListOfQueues(FixedTypeList): <NEW_LINE> <INDENT> def __init__(self, items=None): <NEW_LINE> <INDENT> super().__init__(pyof_class=PacketQueue, items=items) | List of queues.
Represented by instances of :class:`PacketQueue` and used on
:class:`QueueGetConfigReply` objects. | 6259908ba05bb46b3848bf31 |
class test_user(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.APP = APP.test_client() <NEW_LINE> <DEDENT> def test_user_registration(self): <NEW_LINE> <INDENT> reg_response = self.APP.post('/api/v2/signup', data=json.dumps( dict(username="Dag", email="[email protected]", password="Dag1234")), content_type='application/json') <NEW_LINE> result = json.loads(reg_response.data) <NEW_LINE> self.assertEqual(result, {"success": "User added"}) <NEW_LINE> <DEDENT> def test_user_registration_no_user_name(self): <NEW_LINE> <INDENT> reg_response = self.APP.post('/api/v2/signup', data=json.dumps( dict(username="", email="[email protected]", password="Dag1234")), content_type='application/json') <NEW_LINE> result = json.loads(reg_response.data) <NEW_LINE> self.assertEqual(result, {"Error": "Fields have not been filled"}) <NEW_LINE> <DEDENT> def test_user_registration_no_password(self): <NEW_LINE> <INDENT> reg_response = self.APP.post('/api/v2/signup', data=json.dumps( dict(username="Dag", email="[email protected]", password="")), content_type='application/json') <NEW_LINE> result = json.loads(reg_response.data) <NEW_LINE> self.assertEqual(result, {"Error": "Fields have not been filled"}) <NEW_LINE> <DEDENT> def test_user_registration_no_email(self): <NEW_LINE> <INDENT> reg_response = self.APP.post('/api/v2/signup', data=json.dumps( dict(username="Dag", email="[email protected]", password="")), content_type='application/json') <NEW_LINE> result = json.loads(reg_response.data) <NEW_LINE> self.assertEqual(result, {"Error": "Fields have not been filled"}) | This Tests user registration | 6259908bd486a94d0ba2dbcb |
class yqout1(models.Model): <NEW_LINE> <INDENT> publish_time = models.CharField(max_length=32,verbose_name='发布时间') <NEW_LINE> title = models.CharField(max_length=300,verbose_name='标题') <NEW_LINE> hf = models.CharField(max_length=30,verbose_name='回复数量') <NEW_LINE> ck = models.CharField(max_length=32,verbose_name='查看量') <NEW_LINE> url = models.CharField(max_length=500,verbose_name='连接') <NEW_LINE> ly = models.CharField(max_length=64,verbose_name='来源网站') <NEW_LINE> gjc = models.CharField(max_length=32,verbose_name='关键词') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = '实时舆情信息' <NEW_LINE> verbose_name_plural = '实时舆情信息' | 咨询投诉列表 | 6259908b099cdd3c63676205 |
class GenerativeModel(object): <NEW_LINE> <INDENT> def __init__(self, brain_name): <NEW_LINE> <INDENT> self._model_name = brain_name <NEW_LINE> self.brain_name = brain_name + ".brain" <NEW_LINE> self.brain_questions_name = brain_name + "_questions.brain" <NEW_LINE> self.brain = None <NEW_LINE> self.brain_questions = None <NEW_LINE> self.question_prob = 0.3 <NEW_LINE> self.similarity_min = 0.1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._model_name <NEW_LINE> <DEDENT> def train(self, corpus): <NEW_LINE> <INDENT> self.brain_questions = self._learn_corpus(corpus, self.brain_questions_name, questions=True) <NEW_LINE> self.brain = self._learn_corpus(corpus, self.brain_name, questions=False) <NEW_LINE> return self <NEW_LINE> <DEDENT> def generate_start(self): <NEW_LINE> <INDENT> start_seed = random.choice(['Hello', 'Hi']) <NEW_LINE> line = self.generate(start_seed) <NEW_LINE> return line <NEW_LINE> <DEDENT> def generate(self, context): <NEW_LINE> <INDENT> u = random.random() <NEW_LINE> while True: <NEW_LINE> <INDENT> if self.brain_questions and u < self.question_prob: <NEW_LINE> <INDENT> new_line = self.brain_questions.reply(context) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_line = self.brain.reply(context) <NEW_LINE> <DEDENT> return new_line <NEW_LINE> <DEDENT> <DEDENT> def _learn_corpus(self, corpus_file, brain_name, questions=False): <NEW_LINE> <INDENT> if not os.path.isfile(brain_name): <NEW_LINE> <INDENT> brain = Brain(brain_name) <NEW_LINE> print("- Training...") <NEW_LINE> corpus = read_file(corpus_file) <NEW_LINE> corpus = clean_text(corpus, get_questions=questions) <NEW_LINE> for sent in corpus: <NEW_LINE> <INDENT> brain.learn(sent) <NEW_LINE> <DEDENT> <DEDENT> return Brain(brain_name) | Abstract class for a generative model for text | 6259908bad47b63b2c5a9468 |
class GetData: <NEW_LINE> <INDENT> cookies = None <NEW_LINE> re_call="18258148330" <NEW_LINE> re_pwd="wx123456" <NEW_LINE> re_memberId="1123267" | 反射类,利用反射传送cookies | 6259908b99fddb7c1ca63be8 |
Subsets and Splits