code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class UserAuth(models.Model): <NEW_LINE> <INDENT> TYPE_CHOICES = [ ("email", "Email"), ("twitter", "Twitter"), ("facebook", "Facebook"), ("google", "Google"), ] <NEW_LINE> user = models.ForeignKey(User, related_name="auths") <NEW_LINE> type = models.CharField(max_length=30, choices=TYPE_CHOICES) <NEW_LINE> identifier = models.TextField(db_index=True) <NEW_LINE> data = JsonBField(blank=True, null=True) <NEW_LINE> @classmethod <NEW_LINE> def by_identifier(cls, id_type, identifier): <NEW_LINE> <INDENT> if id_type not in [x for x, y in cls.TYPE_CHOICES]: <NEW_LINE> <INDENT> raise ValueError("Invalid identifier type %s" % id_type) <NEW_LINE> <DEDENT> identifier = cls.normalise_identifier(id_type, identifier) <NEW_LINE> try: <NEW_LINE> <INDENT> return UserAuth.objects.get(type=id_type, identifier=identifier) <NEW_LINE> <DEDENT> except UserAuth.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def normalise_identifier(cls, id_type, identifier): <NEW_LINE> <INDENT> if id_type == "email": <NEW_LINE> <INDENT> return identifier.lower() <NEW_LINE> <DEDENT> elif id_type == "twitter": <NEW_LINE> <INDENT> return identifier.lower() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return identifier <NEW_LINE> <DEDENT> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "%s: %s" % (self.type, self.human_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def human_name(self): <NEW_LINE> <INDENT> return self.identifier | A way of authenticating a user. Generally either a third-party verifiable
identity using OAuth or similar, or an identity token the user can provide
themselves like an email address that will be combined with the password
field on User.
Each identity backend should have a single "identifier" field that can be
looked up via an indexed equality query, and then all other relevant data
in a "data" JSON blob. | 625990917cff6e4e811b772b |
class PepConstructingUserClass( PepValue, PepTypeMatcher ): <NEW_LINE> <INDENT> def __init__( self, userclass ): <NEW_LINE> <INDENT> PepValue.__init__( self ) <NEW_LINE> self.userclass = userclass <NEW_LINE> <DEDENT> def construction_args( self ): <NEW_LINE> <INDENT> return ( self.userclass, ) <NEW_LINE> <DEDENT> def matches( self, other, env ): <NEW_LINE> <INDENT> return self.userclass.matches( other, env ) <NEW_LINE> <DEDENT> def get_name( self ): <NEW_LINE> <INDENT> return self.userclass.get_name() <NEW_LINE> <DEDENT> def underlying_class( self ): <NEW_LINE> <INDENT> return self.userclass.underlying_class() <NEW_LINE> <DEDENT> def get_namespace( self ): <NEW_LINE> <INDENT> return self.userclass.get_namespace() <NEW_LINE> <DEDENT> def runtime_namespace( self, instance, insert_placeholders ): <NEW_LINE> <INDENT> type_is( bool, insert_placeholders ) <NEW_LINE> ret = PepInstanceNamespace( instance, self.get_namespace() ) <NEW_LINE> for var_type, var_name in self.userclass.member_variables: <NEW_LINE> <INDENT> ret[var_name] = PepUninitedMemberVariable( var_type, "" ) <NEW_LINE> <DEDENT> return ret | A wrapper around a class that specifies that it is still
being constructed. At the moment this wrapper is automatically
applied (in cpprenderer.py!) but I think at some point you
will have to declare it explicitly with something like:
def_init( constructing(MyClass) self ) | 62599091ad47b63b2c5a953a |
class MainApp(App): <NEW_LINE> <INDENT> title = 'Simple painter' <NEW_LINE> def build(self): <NEW_LINE> <INDENT> return MainWindow() | Класс приложения | 62599091091ae3566870691e |
class DirShouldNotHaveFiles(PhotolibDirLinter): <NEW_LINE> <INDENT> def __call__(self, abspath: str, workspace_relpath: str, dirnames: typing.List[str], filenames: typing.List[str], files_ignored: bool): <NEW_LINE> <INDENT> del files_ignored <NEW_LINE> if common.PHOTOLIB_LEAF_DIR_RE.match(workspace_relpath): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if not filenames: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> filelist = " ".join([f"'{abspath}{f}'" for f in filenames]) <NEW_LINE> return [ Error(workspace_relpath, "dir/not_empty", "directory should be empty but contains files", fix_it=f"rm -rv '{filelist}'") ] | Checks whether a non-leaf directory contains files. | 62599091ad47b63b2c5a953c |
class ListEntityTypesRequest(proto.Message): <NEW_LINE> <INDENT> parent = proto.Field(proto.STRING, number=1,) <NEW_LINE> language_code = proto.Field(proto.STRING, number=2,) <NEW_LINE> page_size = proto.Field(proto.INT32, number=3,) <NEW_LINE> page_token = proto.Field(proto.STRING, number=4,) | The request message for
[EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2beta1.EntityTypes.ListEntityTypes].
Attributes:
parent (str):
Required. The agent to list all entity types from. Supported
formats:
- ``projects/<Project ID>/agent``
- ``projects/<Project ID>/locations/<Location ID>/agent``
language_code (str):
Optional. The language used to access language-specific
data. If not specified, the agent's default language is
used. For more information, see `Multilingual intent and
entity
data <https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity>`__.
page_size (int):
Optional. The maximum number of items to
return in a single page. By default 100 and at
most 1000.
page_token (str):
Optional. The next_page_token value returned from a previous
list request. | 62599091099cdd3c6367626f |
class ProfileCommand(Command): <NEW_LINE> <INDENT> description = 'Run profile from setup' <NEW_LINE> user_options = [ ('abc', None, 'abc')] <NEW_LINE> def initialize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> command = 'python -m unittest discover -p "profile*.py" -v' <NEW_LINE> subprocess.run(command, shell=True, stderr=subprocess.STDOUT) | Run profile from setup. | 625990913617ad0b5ee07e3c |
class ServiceState(Enum): <NEW_LINE> <INDENT> REQUIRED = 0 <NEW_LINE> OPTIONAL = 1 <NEW_LINE> CONFLICTED = 2 | Service interaction states | 6259909150812a4eaa621a3c |
@expand_message_class <NEW_LINE> class PresGetMatchingCredentials(AdminHolderMessage): <NEW_LINE> <INDENT> message_type = "presentation-get-matching-credentials" <NEW_LINE> class Fields: <NEW_LINE> <INDENT> presentation_exchange_id = fields.Str( required=True, description="Presentation to match credentials to.", example=UUIDFour.EXAMPLE, ) <NEW_LINE> paginate = fields.Nested( Paginate.Schema, required=False, data_key="~paginate", missing=Paginate(limit=10, offset=0), description="Pagination decorator.", ) <NEW_LINE> <DEDENT> def __init__( self, presentation_exchange_id: str, paginate: Paginate = None, **kwargs ): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.presentation_exchange_id = presentation_exchange_id <NEW_LINE> self.paginate = paginate <NEW_LINE> <DEDENT> @log_handling <NEW_LINE> @admin_only <NEW_LINE> async def handle(self, context: RequestContext, responder: BaseResponder): <NEW_LINE> <INDENT> holder = cast(IndySdkHolder, context.inject(IndyHolder)) <NEW_LINE> async with context.session() as session: <NEW_LINE> <INDENT> async with ExceptionReporter( responder, InvalidPresentationExchange, context.message ): <NEW_LINE> <INDENT> pres_ex_record = await PresRequestApprove.get_pres_ex_record( session, self.presentation_exchange_id ) <NEW_LINE> <DEDENT> <DEDENT> matches = PresMatchingCredentials( presentation_exchange_id=self.presentation_exchange_id, matching_credentials=await holder.get_credentials_for_presentation_request_by_referent( pres_ex_record.presentation_request, (), self.paginate.offset, self.paginate.limit, extra_query={}, ), presentation_request=pres_ex_record.presentation_request, page=Page(count_=self.paginate.limit, offset=self.paginate.offset), ) <NEW_LINE> matches.assign_thread_from(self) <NEW_LINE> await responder.send_reply(matches) | Retrieve matching credentials for a presentation request. | 6259909160cbc95b06365bde |
class ExposedAttachHandler(Vulnerability, Event): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Vulnerability.__init__(self, Kubelet, "Exposed Attaching To Container", category=RemoteCodeExec) | Opens a websocket that could enable an attacker to attach to a running container | 625990913617ad0b5ee07e3e |
class _DataStats(NamedTuple): <NEW_LINE> <INDENT> x_min: float <NEW_LINE> y_min: float <NEW_LINE> x_max: float <NEW_LINE> y_max: float <NEW_LINE> y_median: float <NEW_LINE> x_bins: float <NEW_LINE> x_bin_size: float <NEW_LINE> y_bins: float <NEW_LINE> y_bin_size: float <NEW_LINE> x_delta: float <NEW_LINE> y_delta: float | Internal class for storing values computed from the data.
.. attribute:: x_min, x_max:
Minimum and maximum x-axis datapoints.
.. attribute:: y_min, y_max:
Minimum and maximum y-axis datapoints.
.. attribute:: y_median:
Median y-axis datapoint, for calculating scaling of y-axis.
.. attribute:: x_bins, x_bin_size:
The number of bins on the x-axis, and their size.
.. attribute:: y_bins, y_bin_size:
The number of bins on the y-axis, and their size.
.. attribute:: x_delta, y_delta:
Delta values for x-axis and y-axis respectively.
These are useful values for calculating
how much of the graph should be visible.
In general, 2 * delta will be visible on each axis. | 625990918a349b6b43687f4d |
class EndActionStatement(Statement): <NEW_LINE> <INDENT> def convertToFDL(self): <NEW_LINE> <INDENT> return str.format(customize.endActionTemplate, **self.attributes) <NEW_LINE> <DEDENT> def entityList(self): <NEW_LINE> <INDENT> return [('object','any')] <NEW_LINE> <DEDENT> def bookmarkAttribute(self): <NEW_LINE> <INDENT> return 'action' | Represents the action end FDL statement. | 62599091656771135c48aea8 |
@provider.configure(name="VirshProvider") <NEW_LINE> class VirshProvider(provider.ProviderFactory): <NEW_LINE> <INDENT> CONFIG_SCHEMA = { "type": "object", "properties": { "type": { "type": "string" }, "connection": { "type": "string", "pattern": "^.+@.+$" }, "template_name": { "type": "string" }, "template_user": { "type": "string" }, "template_password": { "type": "string" } }, "required": ["connection", "template_name", "template_user"] } <NEW_LINE> def create_servers(self, image_uuid=None, type_id=None, amount=1): <NEW_LINE> <INDENT> return [self.create_vm(str(uuid.uuid4())) for i in range(amount)] <NEW_LINE> <DEDENT> def create_vm(self, vm_name): <NEW_LINE> <INDENT> virt_url = self._get_virt_connection_url(self.config["connection"]) <NEW_LINE> cmd = ["virt-clone", "--connect=%s" % virt_url, "-o", self.config["template_name"], "-n", vm_name, "--auto-clone"] <NEW_LINE> subprocess.check_call(cmd) <NEW_LINE> cmd = ["virsh", "--connect=%s" % virt_url, "start", vm_name] <NEW_LINE> subprocess.check_call(cmd) <NEW_LINE> self.resources.create({"name": vm_name}) <NEW_LINE> return provider.Server( self._determine_vm_ip(vm_name), self.config["template_user"], password=self.config.get("template_password"), ) <NEW_LINE> <DEDENT> def destroy_servers(self): <NEW_LINE> <INDENT> for resource in self.resources.get_all(): <NEW_LINE> <INDENT> self.destroy_vm(resource["info"]["name"]) <NEW_LINE> self.resources.delete(resource) <NEW_LINE> <DEDENT> <DEDENT> def destroy_vm(self, vm_name): <NEW_LINE> <INDENT> print("Destroy VM %s" % vm_name) <NEW_LINE> vconnection = self._get_virt_connection_url(self.config["connection"]) <NEW_LINE> cmd = ["virsh", "--connect=%s" % vconnection, "destroy", vm_name] <NEW_LINE> subprocess.check_call(cmd) <NEW_LINE> cmd = ["virsh", "--connect=%s" % vconnection, "undefine", vm_name, "--remove-all-storage"] <NEW_LINE> subprocess.check_call(cmd) <NEW_LINE> return True <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_virt_connection_url(connection): <NEW_LINE> <INDENT> return "qemu+ssh://%s/system" % connection <NEW_LINE> <DEDENT> def _determine_vm_ip(self, vm_name): <NEW_LINE> <INDENT> ssh_opt = "-o StrictHostKeyChecking=no" <NEW_LINE> script_path = os.path.dirname(__file__) + "/virsh/get_domain_ip.sh" <NEW_LINE> cmd = ["scp", ssh_opt, script_path, "%s:~/get_domain_ip.sh" % self.config["connection"]] <NEW_LINE> subprocess.check_call(cmd) <NEW_LINE> tries = 0 <NEW_LINE> ip = None <NEW_LINE> while tries < 3 and not ip: <NEW_LINE> <INDENT> cmd = ["ssh", ssh_opt, self.config["connection"], "./get_domain_ip.sh", vm_name] <NEW_LINE> out = subprocess.check_output(cmd) <NEW_LINE> try: <NEW_LINE> <INDENT> ip = netaddr.IPAddress(out) <NEW_LINE> <DEDENT> except netaddr.core.AddrFormatError: <NEW_LINE> <INDENT> ip = None <NEW_LINE> <DEDENT> tries += 1 <NEW_LINE> time.sleep(10) <NEW_LINE> <DEDENT> return str(ip) | Create VMs from prebuilt templates.
Sample configuration:
{
"type": "VirshProvider",
"connection": "alex@performance-01", # ssh connection to vms host
"template_name": "stack-01-devstack-template", # vm image template
"template_user": "ubuntu", # vm user to launch devstack
"template_password": "password" # vm password to launch devstack
} | 62599091ad47b63b2c5a9540 |
class File(TelegramObject): <NEW_LINE> <INDENT> def __init__(self, file_id, **kwargs): <NEW_LINE> <INDENT> self.file_id = str(file_id) <NEW_LINE> self.file_size = int(kwargs.get('file_size', 0)) <NEW_LINE> self.file_path = str(kwargs.get('file_path', '')) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def de_json(data): <NEW_LINE> <INDENT> if not data: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return File(**data) <NEW_LINE> <DEDENT> def download(self, custom_path=None): <NEW_LINE> <INDENT> url = self.file_path <NEW_LINE> if custom_path: <NEW_LINE> <INDENT> filename = custom_path <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filename = basename(url) <NEW_LINE> <DEDENT> _download(url, filename) | This object represents a Telegram File.
Attributes:
file_id (str):
file_size (str):
file_path (str):
Args:
file_id (str):
**kwargs: Arbitrary keyword arguments.
Keyword Args:
file_size (Optional[int]):
file_path (Optional[str]): | 62599091091ae35668706924 |
class Erdos_Renyi(AbstractGenerator): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _get_default_params(cls) -> Dict: <NEW_LINE> <INDENT> return {'p': .1, 'self_loop': False} <NEW_LINE> <DEDENT> def _generate_structure(self) -> np.ndarray: <NEW_LINE> <INDENT> mat: np.ndarray = np.random.choice([1,0], size=(self.n, self.n), p=[self.params['p'], 1-self.params['p']]) <NEW_LINE> if not self.params['self_loop']: <NEW_LINE> <INDENT> for i in range(self.n): <NEW_LINE> <INDENT> mat[i,i] = 0 <NEW_LINE> <DEDENT> <DEDENT> return mat | Erdos-Renyi graph generator. Uses G(n,p) model.
###TODO: G(n,p) may be unsatisfactory for small n. exchange models based on
n?
# TODO: documentation in Generator.py
For method documentation see AbstractGenerator.py
Parameters
p: probability of given edge connection. default .1
self_loop: whether or not to include self loops. default false | 62599091d8ef3951e32c8cd5 |
class DellBooleanFalseOk(DellBoolean): <NEW_LINE> <INDENT> okStates = ('False') | The DellBooleanFalseOk class inherits from the DellBoolean class
with the only difference being that DellBooleanFalseOk adds the
okStates attribute. | 62599091283ffb24f3cf5591 |
class UserNotFoundException(APIException): <NEW_LINE> <INDENT> status_code = 404 <NEW_LINE> detail = "You are not apparently part of the system." | The user is not part of the system. | 6259909155399d3f05628208 |
class SheetsSupport(SheetsABC): <NEW_LINE> <INDENT> def __init__(self, sheet_id, spreadsheet_id): <NEW_LINE> <INDENT> self.sheet_id = sheet_id <NEW_LINE> self.spreadsheet_id = spreadsheet_id <NEW_LINE> self.sheet_service = get_sheet_service().spreadsheets() <NEW_LINE> <DEDENT> def get_sheet_data(self): <NEW_LINE> <INDENT> request = self.sheet_service.values().get( spreadsheetId=self.spreadsheet_id, range=self.sheet_id ) <NEW_LINE> response = request.execute() <NEW_LINE> if not response: <NEW_LINE> <INDENT> logger.error("[ERROR] Empty response from Google Sheets API.") <NEW_LINE> return None <NEW_LINE> <DEDENT> return response['values'] | Generic functions for interaction with the Google Sheets API. | 625990913617ad0b5ee07e46 |
class SimpleDigestStorage(object): <NEW_LINE> <INDENT> implements(IDigestStorage) <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> annot = IAnnotations(context) <NEW_LINE> listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) <NEW_LINE> self.digest = listen_annot.setdefault('digest', PersistentList()) <NEW_LINE> <DEDENT> def add_message_to_digest(self, msg): <NEW_LINE> <INDENT> self.digest.append(msg) <NEW_LINE> <DEDENT> def get_digest(self): <NEW_LINE> <INDENT> return self.digest <NEW_LINE> <DEDENT> def consume_digest(self): <NEW_LINE> <INDENT> digest = list(self.digest) <NEW_LINE> del self.digest[:] <NEW_LINE> return digest | Create our stubs:
>>> import email
>>> from Products.listen.content.tests import DummyAnnotableList
>>> from Products.listen.content.digest import SimpleDigestStorage
>>> mlist = DummyAnnotableList()
>>> digest_store = SimpleDigestStorage(mlist)
>>> msg1 = email.message_from_string('message one')
>>> msg2 = email.message_from_string('message two')
>>> msg3 = email.message_from_string('message three')
Add the messages to the digest, verify that we can retrieve and
consume:
>>> digest_store.add_message_to_digest(msg1)
>>> len(digest_store.get_digest())
1
>>> digest_store.add_message_to_digest(msg2)
>>> len(digest_store.get_digest())
2
>>> digest_store.add_message_to_digest(msg3)
>>> len(digest_store.get_digest())
3
>>> digest = digest_store.consume_digest()
>>> len(digest_store.get_digest())
0
>>> list(digest) == [msg1, msg2, msg3]
True | 62599091f9cc0f698b1c6146 |
class AugustusExonLoss(AbstractAugustusClassifier): <NEW_LINE> <INDENT> @property <NEW_LINE> def rgb(self): <NEW_LINE> <INDENT> return self.colors["alignment"] <NEW_LINE> <DEDENT> def run(self, shortIntronSize=30): <NEW_LINE> <INDENT> self.getAugustusTranscriptDict() <NEW_LINE> self.getTranscriptDict() <NEW_LINE> classify_dict = {} <NEW_LINE> details_dict = defaultdict(list) <NEW_LINE> for aug_aId, aug_t in self.augustusTranscriptDict.iteritems(): <NEW_LINE> <INDENT> if psl_lib.remove_augustus_alignment_number(aug_aId) not in self.transcriptDict: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> t = self.transcriptDict[psl_lib.remove_augustus_alignment_number(aug_aId)] <NEW_LINE> if aug_t.strand != t.strand or aug_t.chromosome != t.chromosome: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> aug_t_intervals = aug_t.exonIntervals <NEW_LINE> merged_t_intervals = seq_lib.gap_merge_intervals(t.exonIntervals, gap=shortIntronSize) <NEW_LINE> for interval in merged_t_intervals: <NEW_LINE> <INDENT> if seq_lib.interval_not_intersect_intervals(aug_t_intervals, interval): <NEW_LINE> <INDENT> classify_dict[aug_aId] = 1 <NEW_LINE> details_dict[aug_aId].append(interval.get_bed(self.rgb, "/".join([self.column, aug_aId]))) <NEW_LINE> <DEDENT> <DEDENT> if aug_aId not in classify_dict: <NEW_LINE> <INDENT> classify_dict[aug_aId] = 0 <NEW_LINE> <DEDENT> <DEDENT> self.dumpValueDicts(classify_dict, details_dict) | Does the augustus version of this transcript lose an exon?
This is calculated by looking at the exon boundary intervals between the genePreds | 6259909255399d3f0562820a |
class CTD_ANON_27 (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = None <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/mnt/work/BlenderRobotDesigner/robot_designer_plugin/resources/xsd_sdf/gripper.xsd', 8, 10) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> __detach_steps = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'detach_steps'), 'detach_steps', '__AbsentNamespace0_CTD_ANON_27_detach_steps', True, pyxb.utils.utility.Location('/mnt/work/BlenderRobotDesigner/robot_designer_plugin/resources/xsd_sdf/gripper.xsd', 11, 14), ) <NEW_LINE> detach_steps = property(__detach_steps.value, __detach_steps.set, None, None) <NEW_LINE> __attach_steps = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'attach_steps'), 'attach_steps', '__AbsentNamespace0_CTD_ANON_27_attach_steps', True, pyxb.utils.utility.Location('/mnt/work/BlenderRobotDesigner/robot_designer_plugin/resources/xsd_sdf/gripper.xsd', 15, 14), ) <NEW_LINE> attach_steps = property(__attach_steps.value, __attach_steps.set, None, None) <NEW_LINE> __min_contact_count = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, 'min_contact_count'), 'min_contact_count', '__AbsentNamespace0_CTD_ANON_27_min_contact_count', True, pyxb.utils.utility.Location('/mnt/work/BlenderRobotDesigner/robot_designer_plugin/resources/xsd_sdf/gripper.xsd', 19, 14), ) <NEW_LINE> min_contact_count = property(__min_contact_count.value, __min_contact_count.set, None, None) <NEW_LINE> _ElementMap.update({ __detach_steps.name() : __detach_steps, __attach_steps.name() : __attach_steps, __min_contact_count.name() : __min_contact_count }) <NEW_LINE> _AttributeMap.update({ }) | Complex type [anonymous] with content type ELEMENT_ONLY | 62599092d8ef3951e32c8cd8 |
class ListVirtualHubRouteTableV2SResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[VirtualHubRouteTableV2]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ListVirtualHubRouteTableV2SResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None) | List of VirtualHubRouteTableV2s and a URL nextLink to get the next set of results.
:param value: List of VirtualHubRouteTableV2s.
:type value: list[~azure.mgmt.network.v2021_02_01.models.VirtualHubRouteTableV2]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str | 62599092099cdd3c63676275 |
class CustomerProfile(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField( "users.User", on_delete=models.CASCADE, related_name="customerprofile_user", ) <NEW_LINE> mobile_number = models.CharField(max_length=20,) <NEW_LINE> photo = models.URLField() <NEW_LINE> timestamp_created = models.DateTimeField(auto_now_add=True,) <NEW_LINE> last_updated = models.DateTimeField(auto_now=True,) <NEW_LINE> last_login = models.DateTimeField(null=True, blank=True,) | Generated Model | 625990923617ad0b5ee07e4a |
class DockerConfig(object): <NEW_LINE> <INDENT> ENV = args.ENV <NEW_LINE> MODE = args.MODE <NEW_LINE> DEBUG = args.DEBUG <NEW_LINE> POSTGRES_MASQUERADER_READ_WRITE = args.POSTGRES_MASQUERADER_READ_WRITE <NEW_LINE> MASQUERADER_LOCAL = args.MASQUERADER_LOCAL <NEW_LINE> POSTGRES_TEST_DSN = args.POSTGRES_TEST_DSN <NEW_LINE> POSTGRES_PASS_DSN = args.POSTGRES_PASS_DSN <NEW_LINE> POSTGRES_NOPASS_DSN = args.POSTGRES_NOPASS_DSN <NEW_LINE> POSTGRES_TEST_GITLAB = args.POSTGRES_TEST_GITLAB | The main config part. | 6259909255399d3f0562820e |
class DefaultMethodController(object): <NEW_LINE> <INDENT> def options(self, req, allowed_methods, *args, **kwargs): <NEW_LINE> <INDENT> raise webob.exc.HTTPNoContent(headers=[('Allow', allowed_methods)]) <NEW_LINE> <DEDENT> def reject(self, req, allowed_methods, *args, **kwargs): <NEW_LINE> <INDENT> raise webob.exc.HTTPMethodNotAllowed( headers=[('Allow', allowed_methods)]) | A default controller for handling requests.
This controller handles the OPTIONS request method and any of the
HTTP methods that are not explicitly implemented by the application. | 62599092283ffb24f3cf559c |
class Derivative: <NEW_LINE> <INDENT> def __init__(self, polynom): <NEW_LINE> <INDENT> self.polynom = polynom <NEW_LINE> self.operator = {"+": operator.add, "-": operator.sub, "*": operator.mul, "/": operator.div} <NEW_LINE> self.result = [] <NEW_LINE> <DEDENT> def __make_monom(self): <NEW_LINE> <INDENT> monoms = re.split('\+|\-|\*|\/', self.polynom) <NEW_LINE> op = re.findall('\+|\-|\*|\/', self.polynom) <NEW_LINE> for monom in monoms: <NEW_LINE> <INDENT> self.result = Monom(list(monom)). derivative() <NEW_LINE> <DEDENT> <DEDENT> def derivative(self, monom): <NEW_LINE> <INDENT> for monom in self.polynom: <NEW_LINE> <INDENT> if monom == monom: <NEW_LINE> <INDENT> if monom.is_trivial(): <NEW_LINE> <INDENT> self.derivative = monom.param <NEW_LINE> <DEDENT> self.derivative = monom + monom | docstring for Derivative | 62599092dc8b845886d552b5 |
class LBFGSParameter(ct.Structure): <NEW_LINE> <INDENT> _fields_ = [ ('m', ct.c_int), ('epsilon', ct.c_double), ('past', ct.c_int), ('delta', ct.c_double), ('max_iterations', ct.c_int), ('linesearch', ct.c_int), ('max_linesearch', ct.c_int), ('min_step', ct.c_double), ('max_step', ct.c_double), ('ftol', ct.c_double), ('wolfe', ct.c_double), ('gtol', ct.c_double), ('xtol', ct.c_double), ('orthantwise_c', ct.c_double), ('orthantwise_start', ct.c_int), ('orthantwise_end', ct.c_int) ] | LBFGS parameters.
See http://www.chokkan.org/software/liblbfgs/structlbfgs__parameter__t.html for documentation. | 625990927cff6e4e811b7740 |
class FiniteSetsOrderedByInclusion(UniqueRepresentation, Parent): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Parent.__init__(self, category = Posets()) <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return "An example of a poset: sets ordered by inclusion" <NEW_LINE> <DEDENT> def le(self, x, y): <NEW_LINE> <INDENT> return x.value.issubset(y.value) <NEW_LINE> <DEDENT> def an_element(self): <NEW_LINE> <INDENT> return self(Set([1,4,6])) <NEW_LINE> <DEDENT> class Element(ElementWrapper): <NEW_LINE> <INDENT> wrapped_class = Set_object_enumerated | An example of a poset: finite sets ordered by inclusion
This class provides a minimal implementation of a poset
EXAMPLES::
sage: P = Posets().example(); P
An example of a poset: sets ordered by inclusion
We conclude by running systematic tests on this poset::
sage: TestSuite(P).run(verbose = True)
running ._test_an_element() . . . pass
running ._test_cardinality() . . . pass
running ._test_category() . . . pass
running ._test_construction() . . . pass
running ._test_elements() . . .
Running the test suite of self.an_element()
running ._test_category() . . . pass
running ._test_eq() . . . pass
running ._test_new() . . . pass
running ._test_not_implemented_methods() . . . pass
running ._test_pickling() . . . pass
pass
running ._test_elements_eq_reflexive() . . . pass
running ._test_elements_eq_symmetric() . . . pass
running ._test_elements_eq_transitive() . . . pass
running ._test_elements_neq() . . . pass
running ._test_eq() . . . pass
running ._test_new() . . . pass
running ._test_not_implemented_methods() . . . pass
running ._test_pickling() . . . pass
running ._test_some_elements() . . . pass | 62599092099cdd3c63676279 |
class RelationDataContent(LazyMapping, MutableMapping): <NEW_LINE> <INDENT> def __init__(self, relation, entity, backend): <NEW_LINE> <INDENT> self.relation = relation <NEW_LINE> self._entity = entity <NEW_LINE> self._backend = backend <NEW_LINE> self._is_app = isinstance(entity, Application) <NEW_LINE> <DEDENT> def _load(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._backend.relation_get(self.relation.id, self._entity.name, self._is_app) <NEW_LINE> <DEDENT> except RelationNotFoundError: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> def _is_mutable(self): <NEW_LINE> <INDENT> if self._is_app: <NEW_LINE> <INDENT> is_our_app = self._backend.app_name == self._entity.name <NEW_LINE> if not is_our_app: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self._backend.is_leader() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_our_unit = self._backend.unit_name == self._entity.name <NEW_LINE> if is_our_unit: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if not self._is_mutable(): <NEW_LINE> <INDENT> raise RelationDataError('cannot set relation data for {}'.format(self._entity.name)) <NEW_LINE> <DEDENT> if not isinstance(value, str): <NEW_LINE> <INDENT> raise RelationDataError('relation data values must be strings') <NEW_LINE> <DEDENT> self._backend.relation_set(self.relation.id, key, value, self._is_app) <NEW_LINE> if self._lazy_data is not None: <NEW_LINE> <INDENT> if value == '': <NEW_LINE> <INDENT> self._data.pop(key, None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._data[key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> self.__setitem__(key, '') | Data content of a unit or application in a relation. | 62599092d8ef3951e32c8cdc |
@six.add_metaclass(DomainObjectMetaclass) <NEW_LINE> class DomainObject(AbstractDomainObject): <NEW_LINE> <INDENT> pass | Base class for heirarchies with the default metaclass. | 6259909297e22403b383cbf7 |
class Hset(RedisProtocol): <NEW_LINE> <INDENT> def __call__(self, hash_name, field, value, *args, **kwargs): <NEW_LINE> <INDENT> def output(): <NEW_LINE> <INDENT> self.setup_output(4) <NEW_LINE> self.write("HMSET") <NEW_LINE> self.write(hash_name) <NEW_LINE> self.write(field) <NEW_LINE> self.write(value) <NEW_LINE> <DEDENT> output() | from pyredbulk import hset
# HSET myhash field1 "Hello"
with hset("/tmp/test.txt") as redis_insert:
redis_insert(myhash, field1, "Hello")
# | 62599092f9cc0f698b1c614b |
class Screen(Plane): <NEW_LINE> <INDENT> def __init__(self, width = 0.5184, height = 0.324, diagonal_size = 0.61, pixel_pitch = 0.270, resolution = (1920,1200), aspect_ratio = (16,10), curvature_radius=4.0): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.diagonal_size = diagonal_size <NEW_LINE> self.pixel_pitch = pixel_pitch <NEW_LINE> self.resolution = resolution <NEW_LINE> self.aspect_ratio = aspect_ratio <NEW_LINE> self.curvature_radius = curvature_radius <NEW_LINE> <DEDENT> def set_dimensions(self,width,height): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.grid_size = (self.width, self.height) <NEW_LINE> <DEDENT> def set_resolution_pixels(self, x,y): <NEW_LINE> <INDENT> self.resolution = (x,y) <NEW_LINE> <DEDENT> def set_pixel_pitch(self, pixel_pitch): <NEW_LINE> <INDENT> self.pixel_pitch = pixel_pitch <NEW_LINE> self.grid_step = pixel_pitch <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self.curvature_radius == 0: <NEW_LINE> <INDENT> super(Screen, self).update() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.update_curved() <NEW_LINE> print("curved screen") <NEW_LINE> <DEDENT> <DEDENT> def update_curved(self): <NEW_LINE> <INDENT> x_range = range(int(round(self.grid_size[0]/self.grid_step))) <NEW_LINE> y_range = range(int(round(self.grid_size[1]/self.grid_step))) <NEW_LINE> xx, yy = np.meshgrid(x_range, y_range) <NEW_LINE> xx = (xx.astype(np.float32))*self.grid_step - (x_range[-1]*self.grid_step/2.) <NEW_LINE> yy = (yy.astype(np.float32))*self.grid_step - (y_range[-1]*self.grid_step/2.) <NEW_LINE> teta = np.arccos((xx)/(self.curvature_radius/2.0)) <NEW_LINE> zz = self.curvature_radius - self.curvature_radius*np.sin(teta) <NEW_LINE> hh = np.ones_like(xx, dtype=np.float32) <NEW_LINE> self.plane_points = np.array([xx.ravel(),yy.ravel(),zz.ravel(), hh.ravel()], dtype=np.float32) <NEW_LINE> self.plane_points_basis = self.plane_points <NEW_LINE> self.plane_points[0] += self.origin[0] <NEW_LINE> self.plane_points[1] += self.origin[1] <NEW_LINE> self.plane_points[2] += self.origin[2] <NEW_LINE> self.xx = xx <NEW_LINE> self.yy = yy <NEW_LINE> self.zz = zz | Class for representing a 3D LCD screen | 6259909255399d3f05628214 |
class QuestionModel: <NEW_LINE> <INDENT> v2 = Namespace('Questions', description = 'Questions Routes') <NEW_LINE> questions = v2.model('Question', { 'title': fields.String(required=True, description='This is the title of the question'), 'body': fields.String(required=True, description='This is the body of the question'), }) | Meetups input data | 62599092d8ef3951e32c8cdd |
class OptionsFile(SplittedFile): <NEW_LINE> <INDENT> @property <NEW_LINE> def options(self) -> List[str]: <NEW_LINE> <INDENT> return [self._strip_selected(value) for value in self.parse()] <NEW_LINE> <DEDENT> def _strip_selected(self, value: str) -> str: <NEW_LINE> <INDENT> return value[1:-1] if value.startswith("[") else value | File listing a set of options.
It returns available options for a file, including the selected one, if
present.
For example, for a file containing::
foo [bar] baz
:meth:`options` returns::
['foo', 'bar', 'baz'] | 625990928a349b6b43687f61 |
class TestDataSetAPI: <NEW_LINE> <INDENT> def test_dset_illegal_dim(self, backend, iterset): <NEW_LINE> <INDENT> with pytest.raises(TypeError): <NEW_LINE> <INDENT> op2.DataSet(iterset, 'illegaldim') <NEW_LINE> <DEDENT> <DEDENT> def test_dset_illegal_dim_tuple(self, backend, iterset): <NEW_LINE> <INDENT> with pytest.raises(TypeError): <NEW_LINE> <INDENT> op2.DataSet(iterset, (1, 'illegaldim')) <NEW_LINE> <DEDENT> <DEDENT> def test_dset_illegal_name(self, backend, iterset): <NEW_LINE> <INDENT> with pytest.raises(exceptions.NameTypeError): <NEW_LINE> <INDENT> op2.DataSet(iterset, 1, 2) <NEW_LINE> <DEDENT> <DEDENT> def test_dset_default_dim(self, backend, iterset): <NEW_LINE> <INDENT> assert op2.DataSet(iterset).dim == (1,) <NEW_LINE> <DEDENT> def test_dset_dim(self, backend, iterset): <NEW_LINE> <INDENT> s = op2.DataSet(iterset, 1) <NEW_LINE> assert s.dim == (1,) <NEW_LINE> <DEDENT> def test_dset_dim_list(self, backend, iterset): <NEW_LINE> <INDENT> s = op2.DataSet(iterset, [2, 3]) <NEW_LINE> assert s.dim == (2, 3) <NEW_LINE> <DEDENT> def test_dset_iter(self, backend, dset): <NEW_LINE> <INDENT> for s in dset: <NEW_LINE> <INDENT> assert s is dset <NEW_LINE> <DEDENT> <DEDENT> def test_dset_len(self, backend, dset): <NEW_LINE> <INDENT> assert len(dset) == 1 <NEW_LINE> <DEDENT> def test_dset_repr(self, backend, dset): <NEW_LINE> <INDENT> from pyop2.op2 import Set, DataSet <NEW_LINE> assert isinstance(eval(repr(dset)), op2.DataSet) <NEW_LINE> <DEDENT> def test_dset_str(self, backend, dset): <NEW_LINE> <INDENT> assert str(dset) == "OP2 DataSet: %s on set %s, with dim %s" % (dset.name, dset.set, dset.dim) <NEW_LINE> <DEDENT> def test_dset_eq(self, backend, dset): <NEW_LINE> <INDENT> dsetcopy = op2.DataSet(dset.set, dset.dim) <NEW_LINE> assert dsetcopy == dset <NEW_LINE> assert not dsetcopy != dset <NEW_LINE> <DEDENT> def test_dset_ne_set(self, backend, dset): <NEW_LINE> <INDENT> dsetcopy = op2.DataSet(op2.Set(dset.set.size), dset.dim) <NEW_LINE> assert dsetcopy != dset <NEW_LINE> assert not dsetcopy == dset <NEW_LINE> <DEDENT> def test_dset_ne_dim(self, backend, dset): <NEW_LINE> <INDENT> dsetcopy = op2.DataSet(dset.set, tuple(d + 1 for d in dset.dim)) <NEW_LINE> assert dsetcopy != dset <NEW_LINE> assert not dsetcopy == dset <NEW_LINE> <DEDENT> def test_dat_in_dset(self, backend, dset): <NEW_LINE> <INDENT> assert op2.Dat(dset) in dset <NEW_LINE> <DEDENT> def test_dat_not_in_dset(self, backend, dset): <NEW_LINE> <INDENT> assert op2.Dat(dset) not in op2.DataSet(op2.Set(5, 'bar')) | DataSet API unit tests | 6259909297e22403b383cbf9 |
class Factory: <NEW_LINE> <INDENT> def __init__(self, entity: t.Type[Entity], fields: t.Collection[str] = None): <NEW_LINE> <INDENT> self.entity = entity <NEW_LINE> self.mapped_fields = fields <NEW_LINE> <DEDENT> def construct(self, dto: Dto) -> Entity: <NEW_LINE> <INDENT> entity = self.entity(**dto) <NEW_LINE> entity.__get_id_field__().__set_id__(entity, dto.__id__) <NEW_LINE> return entity <NEW_LINE> <DEDENT> def deconstruct(self, entity: Entity) -> Dto: <NEW_LINE> <INDENT> data = asdict(entity) <NEW_LINE> if self.mapped_fields: <NEW_LINE> <INDENT> data = {field: value for field, value in data.items() if field in self.mapped_fields} <NEW_LINE> <DEDENT> dto = Dto(data) <NEW_LINE> dto.__id__ = entity.__get_id__() <NEW_LINE> return dto | A prototype serialization/validation class, designed to:
* support dataclasses as entities
* deconstruct specified fields (all dataclass fields by default) | 6259909255399d3f05628216 |
class corosync(Plugin, RedHatPlugin): <NEW_LINE> <INDENT> files = ('corosync',) <NEW_LINE> packages = ('corosync',) <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> self.add_copy_specs([ "/etc/corosync", "/var/lib/corosync/fdata", "/var/log/cluster/corosync.log"]) <NEW_LINE> self.add_cmd_output("corosync-quorumtool -l") <NEW_LINE> self.add_cmd_output("corosync-quorumtool -s") <NEW_LINE> self.add_cmd_output("corosync-cpgtool") <NEW_LINE> self.add_cmd_output("corosync-objctl -a") <NEW_LINE> self.add_cmd_output("corosync-fplay") <NEW_LINE> self.add_cmd_output("corosync-objctl -w runtime.blackbox.dump_state=$(date +\%s)") <NEW_LINE> self.add_cmd_output("corosync-objctl -w runtime.blackbox.dump_flight_data=$(date +\%s)") <NEW_LINE> self.call_ext_prog("killall -USR2 corosync") | corosync information
| 62599092d8ef3951e32c8cde |
class ColumnSchema (object): <NEW_LINE> <INDENT> def __init__ (self, name, datatype, isNullable, extra): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.datatype = datatype <NEW_LINE> if isNullable == 'YES': <NEW_LINE> <INDENT> self.isNullableVal = True <NEW_LINE> <DEDENT> elif isNullable == 'NO': <NEW_LINE> <INDENT> self.isNullableVal = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ColumnSchemaException ('An unexpected value was encountered for field: isNullable = "%s"' % isNullable) <NEW_LINE> <DEDENT> self.extra = extra <NEW_LINE> <DEDENT> def getName (self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def getDataType (self): <NEW_LINE> <INDENT> return self.datatype <NEW_LINE> <DEDENT> def isNullable (self): <NEW_LINE> <INDENT> return self.isNullableVal <NEW_LINE> <DEDENT> def getExtra (self): <NEW_LINE> <INDENT> return self.extra <NEW_LINE> <DEDENT> def __repr__ (self): <NEW_LINE> <INDENT> s = '<column name="%s" type="%s" nullable="%s"/>\n' % ( self.getName (), self.getDataType (), self.isNullable ()) <NEW_LINE> return s | An abstract representation of a column in a database table. | 62599092f9cc0f698b1c614d |
class ConceptMap_DependsOnSchema: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_schema( max_nesting_depth: Optional[int] = 6, nesting_depth: int = 0, nesting_list: List[str] = [], max_recursion_limit: Optional[int] = 2, include_extension: Optional[bool] = False, extension_fields: Optional[List[str]] = [ "valueBoolean", "valueCode", "valueDate", "valueDateTime", "valueDecimal", "valueId", "valueInteger", "valuePositiveInt", "valueString", "valueTime", "valueUnsignedInt", "valueUri", "valueQuantity", ], extension_depth: int = 0, max_extension_depth: Optional[int] = 2, ) -> Union[StructType, DataType]: <NEW_LINE> <INDENT> from spark_fhir_schemas.stu3.complex_types.extension import ExtensionSchema <NEW_LINE> if ( max_recursion_limit and nesting_list.count("ConceptMap_DependsOn") >= max_recursion_limit ) or (max_nesting_depth and nesting_depth >= max_nesting_depth): <NEW_LINE> <INDENT> return StructType([StructField("id", StringType(), True)]) <NEW_LINE> <DEDENT> my_nesting_list: List[str] = nesting_list + ["ConceptMap_DependsOn"] <NEW_LINE> schema = StructType( [ StructField("id", StringType(), True), StructField( "extension", ArrayType( ExtensionSchema.get_schema( max_nesting_depth=max_nesting_depth, nesting_depth=nesting_depth + 1, nesting_list=my_nesting_list, max_recursion_limit=max_recursion_limit, include_extension=include_extension, extension_fields=extension_fields, extension_depth=extension_depth, max_extension_depth=max_extension_depth, ) ), True, ), StructField("property", StringType(), True), StructField("system", StringType(), True), StructField("code", StringType(), True), StructField("display", StringType(), True), ] ) <NEW_LINE> if not include_extension: <NEW_LINE> <INDENT> schema.fields = [ c if c.name != "extension" else StructField("extension", StringType(), True) for c in schema.fields ] <NEW_LINE> <DEDENT> return schema | A statement of relationships from one set of concepts to one or more other
concepts - either code systems or data elements, or classes in class models. | 62599092283ffb24f3cf55a4 |
class blender_render(blender.blender): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> blender.blender.__init__(self) <NEW_LINE> <DEDENT> def do(self, i_args): <NEW_LINE> <INDENT> data = i_args['data'] <NEW_LINE> lines = data.split('\n') <NEW_LINE> need_calc = False <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> ptpos = line.find(keypart) <NEW_LINE> if ptpos > 0: <NEW_LINE> <INDENT> parts = line[ptpos + 5:].split('-') <NEW_LINE> if len(parts) == 2: <NEW_LINE> <INDENT> ok = True <NEW_LINE> try: <NEW_LINE> <INDENT> part0 = int(parts[0]) <NEW_LINE> part1 = int(parts[1]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> ok = False <NEW_LINE> <DEDENT> if ok: <NEW_LINE> <INDENT> if part1 > 0: <NEW_LINE> <INDENT> self.percentframe = int(100 * part0 / part1) <NEW_LINE> need_calc = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if need_calc: <NEW_LINE> <INDENT> self.calculate() <NEW_LINE> <DEDENT> blender.blender.do(self, i_args) | Blender Render
| 6259909255399d3f05628218 |
class AmenityViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = models.Amenity.objects.all() <NEW_LINE> serializer_class = serializers.AmenitySerializer <NEW_LINE> filter_backends = (filters.OrderingFilter,) <NEW_LINE> ordering_fields = '__all__' | API endpoint for Amenity object | 625990925fdd1c0f98e5fc7b |
class CRC_14_GSM(CRC_POLY): <NEW_LINE> <INDENT> POLY = 0x202D <NEW_LINE> WIDTH = 14 | Used in mobile networks | 62599092dc8b845886d552bd |
class MovingStandardDevWindow(EventWindow): <NEW_LINE> <INDENT> def __init__(self, market_aware=True, window_length=None, delta=None): <NEW_LINE> <INDENT> EventWindow.__init__(self, market_aware, window_length, delta) <NEW_LINE> self.sum = 0.0 <NEW_LINE> self.sum_sqr = 0.0 <NEW_LINE> <DEDENT> def handle_add(self, event): <NEW_LINE> <INDENT> assert isinstance(event.price, Number) <NEW_LINE> self.sum += event.price <NEW_LINE> self.sum_sqr += event.price ** 2 <NEW_LINE> <DEDENT> def handle_remove(self, event): <NEW_LINE> <INDENT> assert isinstance(event.price, Number) <NEW_LINE> self.sum -= event.price <NEW_LINE> self.sum_sqr -= event.price ** 2 <NEW_LINE> <DEDENT> def get_stddev(self): <NEW_LINE> <INDENT> if len(self) <= 1: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> average = self.sum / len(self) <NEW_LINE> s_squared = (self.sum_sqr - self.sum * average) / (len(self) - 1) <NEW_LINE> stddev = sqrt(s_squared) <NEW_LINE> <DEDENT> return stddev | Iteratively calculates standard deviation for a particular sid
over a given time window. The expected functionality of this
class is to be instantiated inside a MovingStandardDev. | 62599092099cdd3c6367627c |
class BillingAgreementDetails(object): <NEW_LINE> <INDENT> deserialized_types = { 'billing_agreement_id': 'str', 'creation_timestamp': 'datetime', 'destination': 'ask_sdk_model.interfaces.amazonpay.model.v1.destination.Destination', 'checkout_language': 'str', 'release_environment': 'ask_sdk_model.interfaces.amazonpay.model.v1.release_environment.ReleaseEnvironment', 'billing_agreement_status': 'ask_sdk_model.interfaces.amazonpay.model.v1.billing_agreement_status.BillingAgreementStatus' } <NEW_LINE> attribute_map = { 'billing_agreement_id': 'billingAgreementId', 'creation_timestamp': 'creationTimestamp', 'destination': 'destination', 'checkout_language': 'checkoutLanguage', 'release_environment': 'releaseEnvironment', 'billing_agreement_status': 'billingAgreementStatus' } <NEW_LINE> def __init__(self, billing_agreement_id=None, creation_timestamp=None, destination=None, checkout_language=None, release_environment=None, billing_agreement_status=None): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> self.billing_agreement_id = billing_agreement_id <NEW_LINE> self.creation_timestamp = creation_timestamp <NEW_LINE> self.destination = destination <NEW_LINE> self.checkout_language = checkout_language <NEW_LINE> self.release_environment = release_environment <NEW_LINE> self.billing_agreement_status = billing_agreement_status <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, BillingAgreementDetails): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | The result attributes from successful SetupAmazonPay call. # noqa: E501
NOTE: This class is auto generated.
Do not edit the class manually.
:param billing_agreement_id: Billing agreement id which can be used for one time and recurring purchases # noqa: E501
:type billing_agreement_id: (optional) str
:param creation_timestamp: Time at which billing agreement details created. # noqa: E501
:type creation_timestamp: (optional) datetime
:type destination: (optional) ask_sdk_model.interfaces.amazonpay.model.v1.destination.Destination
:param checkout_language: Merchant's preferred language of checkout. # noqa: E501
:type checkout_language: (optional) str
:type release_environment: (optional) ask_sdk_model.interfaces.amazonpay.model.v1.release_environment.ReleaseEnvironment
:type billing_agreement_status: (optional) ask_sdk_model.interfaces.amazonpay.model.v1.billing_agreement_status.BillingAgreementStatus | 62599092091ae35668706938 |
class BoolAttribute(BaseAttribute): <NEW_LINE> <INDENT> _config_items = {"default": [str, None], "mutate_rate": [float, None], "rate_to_true_add": [float, 0.0], "rate_to_false_add": [float, 0.0]} <NEW_LINE> def init_value(self, config): <NEW_LINE> <INDENT> default = str(getattr(config, self.default_name)).lower() <NEW_LINE> if default in ('1', 'on', 'yes', 'true'): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif default in ('0', 'off', 'no', 'false'): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif default in ('random', 'none'): <NEW_LINE> <INDENT> return bool(random() < 0.5) <NEW_LINE> <DEDENT> raise RuntimeError("Unknown default value {!r} for {!s}".format(default, self.name)) <NEW_LINE> <DEDENT> def mutate_value(self, value, config): <NEW_LINE> <INDENT> mutate_rate = getattr(config, self.mutate_rate_name) <NEW_LINE> if value: <NEW_LINE> <INDENT> mutate_rate += getattr(config, self.rate_to_false_add_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mutate_rate += getattr(config, self.rate_to_true_add_name) <NEW_LINE> <DEDENT> if mutate_rate > 0: <NEW_LINE> <INDENT> r = random() <NEW_LINE> if r < mutate_rate: <NEW_LINE> <INDENT> return random() < 0.5 <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def validate(self, config): <NEW_LINE> <INDENT> pass | Class for boolean attributes such as whether a connection is enabled or not. | 625990928a349b6b43687f65 |
@tvm._ffi.register_object("transform.ModulePass") <NEW_LINE> class ModulePass(Pass): <NEW_LINE> <INDENT> pass | A pass that works on tvm.IRModule. Users don't need to interact with
this class directly. Instead, a module pass should be created through
`module_pass`, because the design of the `module_pass` API is flexible
enough to handle the creation of a module pass in different manners. In
addition, all members of a module pass can be accessed from the base class.
The same rule applies to FunctionPass as well. | 62599092be7bc26dc9252cd8 |
class AWSVPNGatewayDefinition(nixops.resources.ResourceDefinition): <NEW_LINE> <INDENT> config: AwsVpnGatewayOptions <NEW_LINE> @classmethod <NEW_LINE> def get_type(cls): <NEW_LINE> <INDENT> return "aws-vpn-gateway" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_resource_type(cls): <NEW_LINE> <INDENT> return "awsVPNGateways" <NEW_LINE> <DEDENT> def show_type(self): <NEW_LINE> <INDENT> return "{0}".format(self.get_type()) | Definition of an AWS VPN gateway. | 62599092d8ef3951e32c8ce0 |
class FusionLoadSequence(api.Loader): <NEW_LINE> <INDENT> families = ["colorbleed.imagesequence"] <NEW_LINE> representations = ["*"] <NEW_LINE> label = "Load sequence" <NEW_LINE> order = -10 <NEW_LINE> icon = "code-fork" <NEW_LINE> color = "orange" <NEW_LINE> def load(self, context, name, namespace, data): <NEW_LINE> <INDENT> from avalon.fusion import ( imprint_container, get_current_comp, comp_lock_and_undo_chunk ) <NEW_LINE> if namespace is None: <NEW_LINE> <INDENT> namespace = context['asset']['name'] <NEW_LINE> <DEDENT> path = self._get_first_image(self.fname) <NEW_LINE> comp = get_current_comp() <NEW_LINE> with comp_lock_and_undo_chunk(comp, "Create Loader"): <NEW_LINE> <INDENT> args = (-32768, -32768) <NEW_LINE> tool = comp.AddTool("Loader", *args) <NEW_LINE> tool["Clip"] = path <NEW_LINE> start = context["version"]["data"].get("startFrame", None) <NEW_LINE> if start is not None: <NEW_LINE> <INDENT> loader_shift(tool, start, relative=False) <NEW_LINE> <DEDENT> imprint_container(tool, name=name, namespace=namespace, context=context, loader=self.__class__.__name__) <NEW_LINE> <DEDENT> <DEDENT> def switch(self, container, representation): <NEW_LINE> <INDENT> self.update(container, representation) <NEW_LINE> <DEDENT> def update(self, container, representation): <NEW_LINE> <INDENT> from avalon.fusion import comp_lock_and_undo_chunk <NEW_LINE> tool = container["_tool"] <NEW_LINE> assert tool.ID == "Loader", "Must be Loader" <NEW_LINE> comp = tool.Comp() <NEW_LINE> root = api.get_representation_path(representation) <NEW_LINE> path = self._get_first_image(root) <NEW_LINE> version = io.find_one({"type": "version", "_id": representation["parent"]}) <NEW_LINE> start = version["data"].get("startFrame") <NEW_LINE> if start is None: <NEW_LINE> <INDENT> self.log.warning("Missing start frame for updated version" "assuming starts at frame 0 for: " "{} ({})".format(tool.Name, representation)) <NEW_LINE> start = 0 <NEW_LINE> <DEDENT> with comp_lock_and_undo_chunk(comp, "Update Loader"): <NEW_LINE> <INDENT> with preserve_trim(tool, log=self.log): <NEW_LINE> <INDENT> with preserve_inputs(tool, inputs=("HoldFirstFrame", "HoldLastFrame", "Reverse", "Depth", "KeyCode", "TimeCodeOffset")): <NEW_LINE> <INDENT> tool["Clip"] = path <NEW_LINE> <DEDENT> <DEDENT> global_in_changed = loader_shift(tool, start, relative=False) <NEW_LINE> if global_in_changed: <NEW_LINE> <INDENT> self.log.debug("Changed '%s' global in: %d" % (tool.Name, start)) <NEW_LINE> <DEDENT> tool.SetData("avalon.representation", str(representation["_id"])) <NEW_LINE> <DEDENT> <DEDENT> def remove(self, container): <NEW_LINE> <INDENT> from avalon.fusion import comp_lock_and_undo_chunk <NEW_LINE> tool = container["_tool"] <NEW_LINE> assert tool.ID == "Loader", "Must be Loader" <NEW_LINE> comp = tool.Comp() <NEW_LINE> with comp_lock_and_undo_chunk(comp, "Remove Loader"): <NEW_LINE> <INDENT> tool.Delete() <NEW_LINE> <DEDENT> <DEDENT> def _get_first_image(self, root): <NEW_LINE> <INDENT> files = sorted(os.listdir(root)) <NEW_LINE> return os.path.join(root, files[0]) | Load image sequence into Fusion | 62599092adb09d7d5dc0c261 |
class AddPhase(tk.Toplevel): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._callback = kwargs.pop("callback", None) <NEW_LINE> tk.Toplevel.__init__(self, *args, **kwargs) <NEW_LINE> self.title("GSF Strategy Planner: Add new phase") <NEW_LINE> self._entry = ttk.Entry(self, width=30) <NEW_LINE> self._entry.bind("<Return>", self.add_phase) <NEW_LINE> self._cancel_button = ttk.Button(self, text="Cancel", command=self.destroy) <NEW_LINE> self._add_button = ttk.Button(self, text="Add", command=self.add_phase) <NEW_LINE> self.grid_widgets() <NEW_LINE> <DEDENT> def grid_widgets(self): <NEW_LINE> <INDENT> self._entry.grid(row=0, column=0, columnspan=2, sticky="nswe", padx=5, pady=5) <NEW_LINE> self._cancel_button.grid(row=1, column=0, sticky="nswe", padx=5, pady=5) <NEW_LINE> self._add_button.grid(row=1, column=1, sticky="nswe", padx=5, pady=5) <NEW_LINE> <DEDENT> def add_phase(self, *args): <NEW_LINE> <INDENT> if not self.check_widgets(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if callable(self._callback): <NEW_LINE> <INDENT> self._callback(self._entry.get()) <NEW_LINE> <DEDENT> self.destroy() <NEW_LINE> <DEDENT> def check_widgets(self): <NEW_LINE> <INDENT> name = self._entry.get() <NEW_LINE> if "¤" in name or "³" in name or "_" in name or "`" in name or "~" in name or "€" in name: <NEW_LINE> <INDENT> messagebox.showinfo("Info", "The name you have chosen for your Phase contains invalid characters. A " "Phase name may not contain the characters _, `, ~, ³, ¤ or €.") <NEW_LINE> return False <NEW_LINE> <DEDENT> return True | Toplevel to show widgets for entering the data required to create
a new Phase. | 625990923617ad0b5ee07e59 |
class DeleteImageProcessingTemplateRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Definition = None <NEW_LINE> self.SubAppId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Definition = params.get("Definition") <NEW_LINE> self.SubAppId = params.get("SubAppId") | DeleteImageProcessingTemplate请求参数结构体
| 62599092be7bc26dc9252cd9 |
class Output: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.dim = 0 <NEW_LINE> self.array = [] <NEW_LINE> <DEDENT> def __eq__(self,other): <NEW_LINE> <INDENT> return self.dim == other.dim and sorted(self.array) == sorted(other.array) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.dim == 0: <NEW_LINE> <INDENT> return "No black subsquares." <NEW_LINE> <DEDENT> builder = [f"Max dimension is {self.dim} with UL corners at:"] <NEW_LINE> for point in self.array: <NEW_LINE> <INDENT> builder.append(str(point)) <NEW_LINE> <DEDENT> return "\n".join(builder) | Represents an output of all max black subsquares.
Attributes:
dim: An int. The dimension of the subsquares.
array: A list of tuples representing the upper-left corners. | 62599092f9cc0f698b1c614f |
class SchemaValidationError(SchemaValidatorError): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> message = message.split('\n')[0] <NEW_LINE> super().__init__(message) | Raised during validate if the given object is not valid for the given
schema. | 6259909255399d3f0562821c |
@admin.register(ListParam) <NEW_LINE> class ListParamAdmin(AParamAdmin): <NEW_LINE> <INDENT> base_model = ListParam <NEW_LINE> show_in_index = False <NEW_LINE> extra_fieldset_title = 'List params' <NEW_LINE> formfield_overrides = { models.TextField: {'widget': forms.Textarea(attrs={'rows': 2, 'cols': 50, 'class': 'span8'})} } <NEW_LINE> fieldsets = [ ('General', { 'fields': required_base_fields + ['list_mode', 'list_elements'], 'classes': ['collapse', 'open'] }), ('More', { 'fields': extra_base_fields, 'classes': ['collapse'] }), ('Dependencies', { 'fields': dependencies_fields, 'classes': ['collapse'] }) ] | ListParam subclass Admin | 625990927cff6e4e811b774c |
@python_2_unicode_compatible <NEW_LINE> class FilerStatusTypesCd(CalAccessBaseModel): <NEW_LINE> <INDENT> status_type = fields.CharField( max_length=11, db_column='STATUS_TYPE', help_text='This field is undocumented' ) <NEW_LINE> status_desc = fields.CharField( max_length=11, db_column='STATUS_DESC', help_text='This field is undocumented' ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'calaccess_raw' <NEW_LINE> db_table = 'FILER_STATUS_TYPES_CD' <NEW_LINE> verbose_name = 'FILER_STATUS_TYPES_CD' <NEW_LINE> verbose_name_plural = 'FILER_STATUS_TYPES_CD' <NEW_LINE> ordering = ("status_type",) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.status_type | This is an undocumented model. | 62599092091ae3566870693c |
class Entry_intel_vga(Entry_blob_ext): <NEW_LINE> <INDENT> def __init__(self, section, etype, node): <NEW_LINE> <INDENT> super().__init__(section, etype, node) | Intel Video Graphics Adaptor (VGA) file
Properties / Entry arguments:
- filename: Filename of file to read into entry
This file contains code that sets up the integrated graphics subsystem on
some Intel SoCs. U-Boot executes this when the display is started up.
This is similar to the VBT file but in a different format.
See README.x86 for information about Intel binary blobs. | 62599092be7bc26dc9252cda |
class ReplicasListResponse(_messages.Message): <NEW_LINE> <INDENT> nextPageToken = _messages.StringField(1) <NEW_LINE> resources = _messages.MessageField('Replica', 2, repeated=True) | A ReplicasListResponse object.
Fields:
nextPageToken: A string attribute.
resources: A Replica attribute. | 6259909250812a4eaa621a4b |
class VotableEditVote(EditVote): <NEW_LINE> <INDENT> def get_action_permission(self, ar, obj, state): <NEW_LINE> <INDENT> if not super(VotableEditVote, self).get_action_permission(ar, obj, state): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> vote = obj.get_favourite(ar.get_user()) <NEW_LINE> return vote is not None <NEW_LINE> <DEDENT> def run_from_ui(self, ar, **kw): <NEW_LINE> <INDENT> obj = ar.selected_rows[0] <NEW_LINE> vote = obj.get_favourite(ar.get_user()) <NEW_LINE> self.goto_vote(vote, ar) | Edit your vote about this object.
| 62599092adb09d7d5dc0c266 |
class PostListSerializer(ModelSerializer): <NEW_LINE> <INDENT> url = HyperlinkedIdentityField(view_name="post:detail") <NEW_LINE> user = HyperlinkedRelatedField( view_name="userprofile:detail", read_only=True ) <NEW_LINE> created = TimestampField(read_only=True) <NEW_LINE> updated = TimestampField(read_only=True) <NEW_LINE> user_name = SerializerMethodField() <NEW_LINE> point = PointDetailSerializer(many=True, read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Post <NEW_LINE> fields = [ 'url', 'id', 'user', 'user_name', 'title', 'location', 'views', 'created', 'updated', 'point', ] <NEW_LINE> <DEDENT> def get_user_name(self, obj): <NEW_LINE> <INDENT> return obj.user.username | Post列表 | 62599092d8ef3951e32c8ce2 |
class TestAppointment(TestCase): <NEW_LINE> <INDENT> def test_due_today(self): <NEW_LINE> <INDENT> appointment = Appointment() <NEW_LINE> appointment.appointment_date = time.localtime() <NEW_LINE> self.assertEqual(appointment.due_today(), True) <NEW_LINE> <DEDENT> def test_not_due_today_after(self): <NEW_LINE> <INDENT> appointment = Appointment() <NEW_LINE> appointment.appointment_date = time.localtime() + datetime.timedelta(days=1) <NEW_LINE> self.assertEqual(appointment.due_today(), False) <NEW_LINE> <DEDENT> def test_not_due_today_before(self): <NEW_LINE> <INDENT> appointment = Appointment() <NEW_LINE> appointment.appointment_date = time.localtime() - datetime.timedelta(days=1) <NEW_LINE> self.assertEqual(appointment.due_today(), False) <NEW_LINE> <DEDENT> def test_past_due(self): <NEW_LINE> <INDENT> appointment = Appointment() <NEW_LINE> appointment.appointment_date = timezone.now().date() <NEW_LINE> appointment.appointment_time = timezone.now().time() + timezone.timedelta(minutes=1) <NEW_LINE> self.assertEqual(appointment.past_due(), False) <NEW_LINE> appointment.appointment_time = timezone.now().time() - timezone.timedelta(minutes=1) <NEW_LINE> self.assertEqual(appointment.past_due(), True) <NEW_LINE> <DEDENT> def test_get_patient(self): <NEW_LINE> <INDENT> appointment = Appointment() <NEW_LINE> appointment_user = User() <NEW_LINE> appointment.user = appointment_user <NEW_LINE> self.assertEqual(appointment.user, appointment_user) | Tests functions of appointments.
| 625990928a349b6b43687f6b |
class TriRefiner(object): <NEW_LINE> <INDENT> def __init__(self, triangulation): <NEW_LINE> <INDENT> if not isinstance(triangulation, Triangulation): <NEW_LINE> <INDENT> raise ValueError("Expected a Triangulation object") <NEW_LINE> <DEDENT> self._triangulation = triangulation | Abstract base class for classes implementing mesh refinement.
A TriRefiner encapsulates a Triangulation object and provides tools for
mesh refinement and interpolation.
Derived classes must implements:
- ``refine_triangulation(return_tri_index=False, **kwargs)`` , where
the optional keyword arguments *kwargs* are defined in each
TriRefiner concrete implementation, and which returns :
- a refined triangulation
- optionally (depending on *return_tri_index*), for each
point of the refined triangulation: the index of
the initial triangulation triangle to which it belongs.
- ``refine_field(z, triinterpolator=None, **kwargs)`` , where:
- *z* array of field values (to refine) defined at the base
triangulation nodes
- *triinterpolator* is a
:class:`~matplotlib.tri.TriInterpolator` (optional)
- the other optional keyword arguments *kwargs* are defined in
each TriRefiner concrete implementation
and which returns (as a tuple) a refined triangular mesh and the
interpolated values of the field at the refined triangulation nodes. | 62599092091ae35668706940 |
class left_not_right (Pred.Condition) : <NEW_LINE> <INDENT> kind = Pred.Object <NEW_LINE> assertion = "left != right" <NEW_LINE> attributes = ("left", "right") | `left` and `right` must be different objects! | 6259909255399d3f05628221 |
class CustomProcessor(Processor): <NEW_LINE> <INDENT> def process_state_batch(self, batch): <NEW_LINE> <INDENT> return np.squeeze(batch, axis=1) <NEW_LINE> <DEDENT> def process_info(self, info): <NEW_LINE> <INDENT> processed_info = info['player_data'] <NEW_LINE> if 'stack' in processed_info: <NEW_LINE> <INDENT> processed_info = {'x': 1} <NEW_LINE> <DEDENT> return processed_info | The agent and the environment | 62599092ad47b63b2c5a955e |
class S3Extractor(Extractor): <NEW_LINE> <INDENT> def __init__(self, settings: S3SourceSettings): <NEW_LINE> <INDENT> super().__init__(settings) <NEW_LINE> self._settings = settings <NEW_LINE> self._s3_util = None <NEW_LINE> <DEDENT> def _get_s3_util(self) -> S3Util: <NEW_LINE> <INDENT> if self._s3_util is None: <NEW_LINE> <INDENT> self._s3_util = S3Util( bucket=self._settings.bucket, conn=AwsConnectionManager(self._settings.connection_settings), ) <NEW_LINE> <DEDENT> return self._s3_util <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def extract_next(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def has_next(self) -> bool: <NEW_LINE> <INDENT> pass | Base class for all types of S3 Extractors which handles connection to s3
Args:
settings (S3SourceSettings): settings to connect to the source s3 bucket | 6259909297e22403b383cc06 |
class Agent: <NEW_LINE> <INDENT> def __init__(self, params): <NEW_LINE> <INDENT> self.env = params.env <NEW_LINE> self.noise_stddev = params.noise_stddev if params.get("noisy_actions", False) else None <NEW_LINE> if isinstance(self.env, DotMap): <NEW_LINE> <INDENT> raise ValueError("Environment must be provided to the agent at initialization.") <NEW_LINE> <DEDENT> if (not isinstance(self.noise_stddev, float)) and params.get("noisy_actions", False): <NEW_LINE> <INDENT> raise ValueError("Must provide standard deviation for noise for noisy actions.") <NEW_LINE> <DEDENT> if self.noise_stddev is not None: <NEW_LINE> <INDENT> self.dU = self.env.action_space.shape[0] <NEW_LINE> <DEDENT> <DEDENT> def sample(self, horizon, policy, record_fname=None): <NEW_LINE> <INDENT> video_record = record_fname is not None <NEW_LINE> recorder = None if not video_record else VideoRecorder(self.env, record_fname) <NEW_LINE> times, rewards = [], [] <NEW_LINE> O, A, reward_sum, done = [self.env.reset()], [], 0, False <NEW_LINE> policy.reset() <NEW_LINE> for t in range(horizon): <NEW_LINE> <INDENT> if video_record: <NEW_LINE> <INDENT> recorder.capture_frame() <NEW_LINE> <DEDENT> start = time.time() <NEW_LINE> A.append(policy.act(O[t], t)) <NEW_LINE> times.append(time.time() - start) <NEW_LINE> if self.noise_stddev is None: <NEW_LINE> <INDENT> obs, reward, done, info = self.env.step(A[t]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> action = A[t] + np.random.normal(loc=0, scale=self.noise_stddev, size=[self.dU]) <NEW_LINE> action = np.minimum(np.maximum(action, self.env.action_space.low), self.env.action_space.high) <NEW_LINE> obs, reward, done, info = self.env.step(action) <NEW_LINE> <DEDENT> O.append(obs) <NEW_LINE> reward_sum += reward <NEW_LINE> rewards.append(reward) <NEW_LINE> if done: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if video_record: <NEW_LINE> <INDENT> recorder.capture_frame() <NEW_LINE> recorder.close() <NEW_LINE> <DEDENT> print("Average action selection time: ", np.mean(times)) <NEW_LINE> print("Rollout length: ", len(A)) <NEW_LINE> return { "obs": np.array(O), "ac": np.array(A), "reward_sum": reward_sum, "rewards": np.array(rewards), } | An general class for RL agents.
| 6259909255399d3f05628223 |
class serveConsumer(AsyncWebsocketConsumer): <NEW_LINE> <INDENT> async def connect(self): <NEW_LINE> <INDENT> if self.scope["user"].is_anonymous: <NEW_LINE> <INDENT> await self.close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await self.channel_layer.group_add("serve", self.channel_name) <NEW_LINE> await self.accept() <NEW_LINE> <DEDENT> <DEDENT> async def disconnect(self, close_code): <NEW_LINE> <INDENT> await self.channel_layer.group_discard("serve", self.channel_name) <NEW_LINE> <DEDENT> async def receive(self, text_data): <NEW_LINE> <INDENT> await self.send(text_data=json.dumps(text_data)) | Consumer to manage WebSocket connections for the Notification app,
called when the websocket is handshaking as part of initial connection. | 625990928a349b6b43687f6f |
class GrainsTargetingTest(integration.ShellCase): <NEW_LINE> <INDENT> def test_grains_targeting_os_running(self): <NEW_LINE> <INDENT> test_ret = ['sub_minion:', ' True', 'minion:', ' True'] <NEW_LINE> os_grain = '' <NEW_LINE> for item in self.run_salt('minion grains.get os'): <NEW_LINE> <INDENT> if item != 'minion:': <NEW_LINE> <INDENT> os_grain = item.strip() <NEW_LINE> <DEDENT> <DEDENT> ret = self.run_salt('-G \'os:{0}\' test.ping'.format(os_grain)) <NEW_LINE> self.assertEqual(sorted(ret), sorted(test_ret)) <NEW_LINE> <DEDENT> def test_grains_targeting_minion_id_running(self): <NEW_LINE> <INDENT> minion = self.run_salt('-G \'id:minion\' test.ping') <NEW_LINE> self.assertEqual(sorted(minion), sorted(['minion:', ' True'])) <NEW_LINE> sub_minion = self.run_salt('-G \'id:sub_minion\' test.ping') <NEW_LINE> self.assertEqual(sorted(sub_minion), sorted(['sub_minion:', ' True'])) <NEW_LINE> <DEDENT> def test_grains_targeting_disconnected(self): <NEW_LINE> <INDENT> test_ret = 'Minion did not return. [No response]' <NEW_LINE> key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'disconnected') <NEW_LINE> salt.utils.fopen(key_file, 'a').close() <NEW_LINE> try: <NEW_LINE> <INDENT> ret = '' <NEW_LINE> for item in self.run_salt('-G \'id:disconnected\' test.ping', timeout=40): <NEW_LINE> <INDENT> if item != 'disconnected:': <NEW_LINE> <INDENT> ret = item.strip() <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual(ret, test_ret) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> os.unlink(key_file) | Integration tests for targeting with grains. | 62599092be7bc26dc9252cdd |
class account_fiscal_position(orm.Model): <NEW_LINE> <INDENT> _inherit = 'account.fiscal.position' <NEW_LINE> _columns = { 'intracommunity_operations': fields.boolean( 'Intra-Community operations'), } | Inheritance of Account fiscal position to add field 'include_in_mod349'.
This fields let us map fiscal position, taxes and accounts to create an
AEAT 349 Report | 6259909255399d3f05628227 |
class MoneyField: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_amount_from_decimal(decimal_amount: Decimal) -> int: <NEW_LINE> <INDENT> return UnitConverter.to_minor_units(decimal_amount) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_vat_rate_from_decimal(decimal_vat_rate: Decimal) -> int: <NEW_LINE> <INDENT> return UnitConverter.vat_rate_to_minor_units(decimal_vat_rate) <NEW_LINE> <DEDENT> def to_decimals(self) -> Decimal: <NEW_LINE> <INDENT> return UnitConverter.to_decimals(self.amount) <NEW_LINE> <DEDENT> def set_amount_from_decimal(self, decimal_amount: Decimal) -> None: <NEW_LINE> <INDENT> self.amount = self.get_amount_from_decimal(decimal_amount) <NEW_LINE> <DEDENT> def set_vat_rate_from_decimal(self, decimal_vat_rate: Decimal) -> None: <NEW_LINE> <INDENT> self.vat_rate = self.get_vat_rate_from_decimal(decimal_vat_rate) <NEW_LINE> <DEDENT> def __init__(self, amount: int, currency: str, vat_amount: int, vat_rate: int): <NEW_LINE> <INDENT> self.amount = amount <NEW_LINE> self.currency = currency <NEW_LINE> self.vat_amount = vat_amount <NEW_LINE> self.vat_rate = vat_rate <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return { 'amount': self.amount, 'currency': self.currency, 'vat_amount': self.vat_amount, 'vat_rate': self.vat_rate, } | Represents the composite amount field for money values.
Used by both events and commands.
Avro will serialize it as follows:
>>> {'amount': 1000, 'currency': 'SEK'}
Examples:
>>> from typing import Dict, NamedTuple
>>> from eventsourcing_helpers.message import Event
>>>
>>> @Event
>>> class CheckoutStarted(NamedTuple):
>>> total_amount = Dict[str, MoneyField] | 62599092bf627c535bcb31e9 |
class TestLinkResponseEntity(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testLinkResponseEntity(self): <NEW_LINE> <INDENT> pass | LinkResponseEntity unit test stubs | 6259909260cbc95b06365bf2 |
class EDPluginISPyBSetImagesPositionsv1_4(EDPluginISPyBv1_4): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> EDPluginISPyBv1_4.__init__(self) <NEW_LINE> self.setXSDataInputClass(XSDataInputISPyBSetImagesPositions) <NEW_LINE> self.listImageCreation = [] <NEW_LINE> <DEDENT> def configure(self): <NEW_LINE> <INDENT> EDPluginISPyBv1_4.configure(self, _bRequireToolsForCollectionWebServiceWsdl=True) <NEW_LINE> <DEDENT> def process(self, _edObject=None): <NEW_LINE> <INDENT> EDPluginISPyBv1_4.process(self) <NEW_LINE> self.DEBUG("EDPluginISPyBSetImagesPositionsv1_4.process") <NEW_LINE> httpAuthenticatedToolsForCollectionWebService = HttpAuthenticated(username=self.strUserName, password=self.strPassWord) <NEW_LINE> clientToolsForCollectionWebService = Client(self.strToolsForCollectionWebServiceWsdl, transport=httpAuthenticatedToolsForCollectionWebService, cache=None) <NEW_LINE> listImagePosition = [] <NEW_LINE> for xsDataImagePosition in self.dataInput.imagePosition: <NEW_LINE> <INDENT> imagePosition = clientToolsForCollectionWebService.factory.create('imagePosition') <NEW_LINE> imagePosition.fileName = os.path.basename(xsDataImagePosition.fileName.path.value) <NEW_LINE> imagePosition.fileLocation = os.path.dirname(xsDataImagePosition.fileName.path.value) <NEW_LINE> if xsDataImagePosition.jpegFileFullPath is not None: <NEW_LINE> <INDENT> imagePosition.jpegFileFullPath = xsDataImagePosition.jpegFileFullPath.path.value <NEW_LINE> <DEDENT> if xsDataImagePosition.jpegThumbnailFileFullPath is not None: <NEW_LINE> <INDENT> imagePosition.jpegThumbnailFileFullPath = xsDataImagePosition.jpegThumbnailFileFullPath.path.value <NEW_LINE> <DEDENT> listImagePosition.append(imagePosition) <NEW_LINE> <DEDENT> self.listImageCreation = clientToolsForCollectionWebService.service.setImagesPositions( listImagePosition=listImagePosition) <NEW_LINE> self.DEBUG("EDPluginISPyBSetImagesPositionsv1_4.process: listImageCreation=%r" % self.listImageCreation) <NEW_LINE> <DEDENT> def finallyProcess(self, _edObject=None): <NEW_LINE> <INDENT> EDPluginISPyBv1_4.finallyProcess(self) <NEW_LINE> self.DEBUG("EDPluginISPyBSetImagesPositionsv1_4.finallyProcess") <NEW_LINE> xsDataResultISPyBSetImagesPositions = XSDataResultISPyBSetImagesPositions() <NEW_LINE> for imageCreation in self.listImageCreation: <NEW_LINE> <INDENT> xsDataISPyBImageCreation = XSDataISPyBImageCreation() <NEW_LINE> xsDataISPyBImageCreation.fileLocation = XSDataString(imageCreation.fileLocation) <NEW_LINE> xsDataISPyBImageCreation.fileName = XSDataString(imageCreation.fileName) <NEW_LINE> try: <NEW_LINE> <INDENT> xsDataISPyBImageCreation.imageId = XSDataInteger(imageCreation.imageId) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self.WARNING("Image %s/%s does not have any image id" % (imageCreation.fileLocation, imageCreation.fileName)) <NEW_LINE> <DEDENT> xsDataISPyBImageCreation.isCreated = XSDataBoolean(imageCreation.isCreated) <NEW_LINE> xsDataResultISPyBSetImagesPositions.addImageCreation(xsDataISPyBImageCreation) <NEW_LINE> <DEDENT> self.setDataOutput(xsDataResultISPyBSetImagesPositions) | Plugin to store sample position (for grid scans) | 62599092099cdd3c63676284 |
class DescribeClusterInstancesResponseContent(Model): <NEW_LINE> <INDENT> def __init__(self, instances=None, next_token=None): <NEW_LINE> <INDENT> self.openapi_types = {"instances": List[ClusterInstance], "next_token": str} <NEW_LINE> self.attribute_map = {"instances": "instances", "next_token": "nextToken"} <NEW_LINE> self._instances = instances <NEW_LINE> self._next_token = next_token <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> "DescribeClusterInstancesResponseContent": <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def instances(self): <NEW_LINE> <INDENT> return self._instances <NEW_LINE> <DEDENT> @instances.setter <NEW_LINE> def instances(self, instances): <NEW_LINE> <INDENT> if instances is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `instances`, must not be `None`") <NEW_LINE> <DEDENT> self._instances = instances <NEW_LINE> <DEDENT> @property <NEW_LINE> def next_token(self): <NEW_LINE> <INDENT> return self._next_token <NEW_LINE> <DEDENT> @next_token.setter <NEW_LINE> def next_token(self, next_token): <NEW_LINE> <INDENT> self._next_token = next_token | NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually. | 62599092f9cc0f698b1c6156 |
class PathsNormalizer(RewritingVisitor): <NEW_LINE> <INDENT> def __init__(self, project, toolset=None): <NEW_LINE> <INDENT> super(PathsNormalizer, self).__init__() <NEW_LINE> self.toolset = toolset <NEW_LINE> self.project = project <NEW_LINE> self.module = self.target = None <NEW_LINE> self.top_srcdir = os.path.abspath(project.top_module.srcdir) <NEW_LINE> <DEDENT> def set_context(self, context): <NEW_LINE> <INDENT> if isinstance(context, bkl.model.Target): <NEW_LINE> <INDENT> self.module = context.parent <NEW_LINE> self.target = context <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.module = context <NEW_LINE> self.target = None <NEW_LINE> <DEDENT> <DEDENT> @memoized <NEW_LINE> def _src_prefix(self, source_file): <NEW_LINE> <INDENT> srcdir = os.path.abspath(self.project.get_srcdir(source_file)) <NEW_LINE> prefix = os.path.relpath(srcdir, start=self.top_srcdir) <NEW_LINE> logger.debug('translating paths from %s with prefix "%s"', source_file, prefix) <NEW_LINE> if prefix == ".": <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lst = prefix.split(os.path.sep) <NEW_LINE> return [bkl.expr.LiteralExpr(i) for i in lst] <NEW_LINE> <DEDENT> <DEDENT> @memoized <NEW_LINE> def _builddir(self, target): <NEW_LINE> <INDENT> builddir = self.toolset.get_builddir_for(target) <NEW_LINE> logger.debug('translating @builddir paths of %s into %s', target, builddir) <NEW_LINE> return builddir <NEW_LINE> <DEDENT> def path(self, e): <NEW_LINE> <INDENT> if e.anchor == bkl.expr.ANCHOR_BUILDDIR and self.toolset is not None: <NEW_LINE> <INDENT> if self.target is None: <NEW_LINE> <INDENT> raise Error("@builddir references are not allowed outside of targets", pos=e.pos) <NEW_LINE> <DEDENT> bdir = self._builddir(self.target) <NEW_LINE> e = bkl.expr.PathExpr(bdir.components + e.components, bdir.anchor, bdir.anchor_file, pos=e.pos) <NEW_LINE> <DEDENT> if e.anchor == bkl.expr.ANCHOR_SRCDIR: <NEW_LINE> <INDENT> assert self.module is not None <NEW_LINE> if e.anchor_file: <NEW_LINE> <INDENT> source_file = e.anchor_file <NEW_LINE> <DEDENT> elif e.pos and e.pos.filename: <NEW_LINE> <INDENT> source_file = e.pos.filename <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> source_file = self.module.source_file <NEW_LINE> <DEDENT> prefix = self._src_prefix(source_file) <NEW_LINE> components = e.components <NEW_LINE> if prefix is not None: <NEW_LINE> <INDENT> if not e.is_external_absolute(): <NEW_LINE> <INDENT> components = prefix + components <NEW_LINE> <DEDENT> <DEDENT> e = bkl.expr.PathExpr(components, bkl.expr.ANCHOR_TOP_SRCDIR, None, pos=e.pos) <NEW_LINE> <DEDENT> return e | Normalizes relative paths so that they are absolute. Paths relative to
@srcdir are rewritten in terms of @top_srcdir. Paths relative to @builddir
are translated in toolset-specific way. This is needed so that cross-module
variables and paths uses produce correct results.
You must call :meth:`set_context()` to associate a module or target before
calling :meth:`visit()`. Paths relative to @builddir can only be processed
if the context was set to a target. | 62599093bf627c535bcb31eb |
class Ada(GtfsdbBase, Base): <NEW_LINE> <INDENT> datasource = config.DATASOURCE_DERIVED <NEW_LINE> __tablename__ = 'ada' <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.start_date = self.end_date = datetime.datetime.now() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def post_process(cls, db, **kwargs): <NEW_LINE> <INDENT> if hasattr(cls, 'geom'): <NEW_LINE> <INDENT> from gtfsdb.model.route import Route <NEW_LINE> db.prep_an_orm_class(Route) <NEW_LINE> log.debug('{0}.post_process'.format(cls.__name__)) <NEW_LINE> ada = cls(name='ADA Boundary') <NEW_LINE> geom = db.session.query( func.ST_ExteriorRing( func.ST_Union( Route.geom.ST_Buffer(0.011025, 'quad_segs=50') ) ) ) <NEW_LINE> geom = func.ST_MakePolygon(geom) <NEW_LINE> ada.geom = geom <NEW_LINE> db.session.add(ada) <NEW_LINE> db.session.commit() <NEW_LINE> db.session.close() | The Americans with Disabilities Act (https://www.ada.gov) requires transit agencies to provide
complementary paratransit service to destinations within 3/4 mile of all fixed routes.
:see: https://en.wikipedia.org/wiki/Paratransit#Americans_with_Disabilities_Act_of_1990
This class will calculate and represent a Paratransit (or ADA) boundary against all active routes.
NOTE: to load this table, you need both a geospaitial db (postgis) and the --create_boundaries cmd-line parameter | 62599093283ffb24f3cf55b7 |
class ThreadBackground(QThread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> QThread.__init__(self) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.wait() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while(True): <NEW_LINE> <INDENT> schedule.run_pending() <NEW_LINE> self.sleep(1) | Thread de ejecución continua para la realización de tareas de BackUp pendientes. | 62599093be7bc26dc9252ce1 |
class DatabaseHost(models.Model): <NEW_LINE> <INDENT> hostname = models.CharField(max_length=255) <NEW_LINE> port = models.PositiveIntegerField() <NEW_LINE> username = models.CharField(max_length=255) <NEW_LINE> password = models.CharField(max_length=255) <NEW_LINE> dbms = models.CharField(max_length=16, choices=( ("postgresql", "PostgreSQL"), ("mysql", "MySQL") )) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "<{}:{}>".format(self.hostname, self.port) | Represents a host for a collection of SQL databases.
Attributes:
hostname
The host to connect to (ex: postgres1.csl.tjhsst.edu).
port
The port the database server is running on.
username
The administrator username for creating and managing databases.
password
The administrator password for creating and managing databases.
dbms
The type of database (ex: postgres, mysql). | 62599093283ffb24f3cf55b8 |
class SpMV(Function): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def forward(ctx, row, col, val, vector, size): <NEW_LINE> <INDENT> ctx.save_for_backward(row, col, val, vector) <NEW_LINE> ctx.matrix_size = size <NEW_LINE> output = vector.new() <NEW_LINE> sparse.spmv( row, col, val, vector, output, size[0], size[1], False) <NEW_LINE> return output <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def backward(ctx, grad_output): <NEW_LINE> <INDENT> row, col, val, vector = ctx.saved_variables <NEW_LINE> size = ctx.matrix_size <NEW_LINE> grad_row = grad_col = grad_val = None <NEW_LINE> grad_vector = None <NEW_LINE> grad_size = None <NEW_LINE> grad_vector = vector.data.new() <NEW_LINE> sparse.spmv( row.data, col.data, val.data, grad_output.data, grad_vector, size[0], size[1], True) <NEW_LINE> grad_val = val.data.new() <NEW_LINE> sparse.spmv_backward_matrix( row.data, col.data, vector.data, grad_output.data, grad_val, size[0], size[1]) <NEW_LINE> grad_vector = Variable(grad_vector) <NEW_LINE> grad_val = Variable(grad_val) <NEW_LINE> return grad_row, grad_col, grad_val, grad_vector, grad_size | Sparse matrix-vector product. | 62599093099cdd3c63676286 |
class ProjectFQName(object): <NEW_LINE> <INDENT> domain_name_key = 'domain' <NEW_LINE> name = 'project' <NEW_LINE> @classmethod <NEW_LINE> def project_fq_name_key(cls): <NEW_LINE> <INDENT> return [cls.domain_name_key, cls.project_name_key] | Defines the keywords and format of a Project FQName specification.
| 62599093adb09d7d5dc0c274 |
class CharacteristicsDetail(viewsets.ModelViewSet): <NEW_LINE> <INDENT> permission_classes=(GivingPermissions,) <NEW_LINE> authentication_classes = (SimpleAuthentication,) <NEW_LINE> serializer_class = CharacteristicSerializer <NEW_LINE> def get_characteristic(self,name): <NEW_LINE> <INDENT> characteristics = Characteristics.objects.filter(name=name) <NEW_LINE> if characteristics.count() == 0: <NEW_LINE> <INDENT> logger.warning("The characteristic {0} does not exist".format(name)) <NEW_LINE> raise Http404 <NEW_LINE> <DEDENT> return characteristics[0] <NEW_LINE> <DEDENT> def get_characteristics(self, request, *args, **kwargs): <NEW_LINE> <INDENT> instrumentId = kwargs['instrumentId'] <NEW_LINE> characteristics = Characteristics.objects.filter(instrument__instrumentId = instrumentId, name = kwargs['characteristic']) <NEW_LINE> if characteristics.count() == 0: <NEW_LINE> <INDENT> _state = status.HTTP_404_NOT_FOUND <NEW_LINE> data = {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _state = status.HTTP_200_OK <NEW_LINE> serializer = CharacteristicSerializer(characteristics[0]) <NEW_LINE> data = serializer.data <NEW_LINE> <DEDENT> return Response(data, status=_state) <NEW_LINE> <DEDENT> def put_characteristics (self, request, *args, **kwargs): <NEW_LINE> <INDENT> characteristic_name = kwargs['characteristic'] <NEW_LINE> characteristic = self.get_characteristic(characteristic_name) <NEW_LINE> serializer = CharacteristicSerializer(characteristic, data=request.data, partial=True) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def delete_characteristics(self, request, *args, **kwargs): <NEW_LINE> <INDENT> characteristic_name = kwargs['characteristic'] <NEW_LINE> characteristic = self.get_characteristic(characteristic_name) <NEW_LINE> characteristic.delete() <NEW_LINE> return Response(status=status.HTTP_204_NO_CONTENT) | Verbs implementation for a specific characteristic, GET, PUT, DELETE | 62599093dc8b845886d552d2 |
class FeedTheFoxAdapter(DefaultAccountAdapter): <NEW_LINE> <INDENT> def is_open_for_signup(self, request): <NEW_LINE> <INDENT> return False | Customize the default allauth account adapter. | 62599093283ffb24f3cf55ba |
class MessageViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Message.objects.all() <NEW_LINE> serializer_class = MessageSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticatedOrReadOnly,) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = Message.objects.all() <NEW_LINE> sender = self.request.query_params.get('sender') <NEW_LINE> receiver = self.request.query_params.get('receiver') <NEW_LINE> if sender is not None and receiver is not None: <NEW_LINE> <INDENT> queryset = queryset.filter(sender=sender, receiver=receiver) | queryset.filter(sender=receiver, receiver=sender) <NEW_LINE> <DEDENT> return queryset <NEW_LINE> <DEDENT> @action(detail=True, renderer_classes=[renderers.StaticHTMLRenderer]) <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> queryset = Node.objects.all() <NEW_LINE> filtered_nodes = queryset.filter(userid=serializer.data["receiver"]) <NEW_LINE> token = filtered_nodes[0].device_token <NEW_LINE> apns.post(token) <NEW_LINE> return Response({'status': 'message created'}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions. | 6259909397e22403b383cc11 |
class EmailAuthBackend(backends.ModelBackend): <NEW_LINE> <INDENT> def authenticate(self, username=None, password=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(email=username) <NEW_LINE> if user.check_password(password): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(username=username) <NEW_LINE> if user.check_password(password): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_user(self, user_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return User.objects.get(pk=user_id) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None | Email Authentication Backend
Allows a user to sign in using an email/password pair, then check
a username/password pair if email failed | 62599093283ffb24f3cf55bb |
class Index(Generic[T]): <NEW_LINE> <INDENT> def __init__(self, column, index=None): <NEW_LINE> <INDENT> self.column = column <NEW_LINE> if index is None: <NEW_LINE> <INDENT> self.all = True <NEW_LINE> <DEDENT> elif isinstance(index, list): <NEW_LINE> <INDENT> self.index = tuple(index) <NEW_LINE> self.all = len(index) == 0 <NEW_LINE> <DEDENT> elif not isinstance(index, tuple): <NEW_LINE> <INDENT> self.index = tuple([index]) <NEW_LINE> self.all = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.index = index <NEW_LINE> self.all = False <NEW_LINE> <DEDENT> <DEDENT> def filter(self, df: pd.DataFrame) -> pd.Series: <NEW_LINE> <INDENT> if self.all: <NEW_LINE> <INDENT> return df[self.column].notnull() <NEW_LINE> <DEDENT> return df[self.column].isin(self.index) <NEW_LINE> <DEDENT> def is_alone(self) -> bool: <NEW_LINE> <INDENT> return not self.all and len(self.index) <= 1 | Generic Index to use to select and rank data. | 625990933617ad0b5ee07e6d |
@pytest.mark.skipif(not HAVE_NP, reason='Numpy is not available') <NEW_LINE> class TestNumpy_NoRLEHandler: <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> self.original_handlers = pydicom.config.pixel_data_handlers <NEW_LINE> pydicom.config.pixel_data_handlers = [] <NEW_LINE> <DEDENT> def teardown(self): <NEW_LINE> <INDENT> pydicom.config.pixel_data_handlers = self.original_handlers <NEW_LINE> <DEDENT> def test_environment(self): <NEW_LINE> <INDENT> assert HAVE_NP <NEW_LINE> assert RLE_HANDLER is not None <NEW_LINE> <DEDENT> def test_can_access_supported_dataset(self): <NEW_LINE> <INDENT> ds = dcmread(RLE_16_1_1F) <NEW_LINE> assert 'CompressedSamples^MR1' == ds.PatientName <NEW_LINE> assert 6128 == len(ds.PixelData) <NEW_LINE> <DEDENT> @pytest.mark.parametrize("fpath,data", REFERENCE_DATA_UNSUPPORTED) <NEW_LINE> def test_can_access_unsupported_dataset(self, fpath, data): <NEW_LINE> <INDENT> ds = dcmread(fpath) <NEW_LINE> assert data[0] == ds.file_meta.TransferSyntaxUID <NEW_LINE> assert data[1] == ds.PatientName <NEW_LINE> <DEDENT> def test_pixel_array_raises(self): <NEW_LINE> <INDENT> ds = dcmread(EXPL_16_1_1F) <NEW_LINE> for uid in ALL_TRANSFER_SYNTAXES: <NEW_LINE> <INDENT> ds.file_meta.TransferSyntaxUID = uid <NEW_LINE> exc_msg = ( r"Unable to decode pixel data with a transfer syntax UID of " r"'{}'".format(uid) ) <NEW_LINE> with pytest.raises(NotImplementedError, match=exc_msg): <NEW_LINE> <INDENT> ds.pixel_array | Tests for handling datasets with no handler. | 62599093091ae3566870694e |
class EventQueue(Thread): <NEW_LINE> <INDENT> def __init__(self, handler, name=None, preload=[]): <NEW_LINE> <INDENT> if not callable(handler): <NEW_LINE> <INDENT> raise TypeError("handler should be a callable") <NEW_LINE> <DEDENT> Thread.__init__(self, name=name or self.__class__.__name__) <NEW_LINE> self.setDaemon(True) <NEW_LINE> self._exit = Event() <NEW_LINE> self._active = Event() <NEW_LINE> self._pause_counter = 0 <NEW_LINE> self._pause_lock = Lock() <NEW_LINE> self._accepting_events = True <NEW_LINE> self.queue = Queue.Queue() <NEW_LINE> self.handle = handler <NEW_LINE> self.load(preload) <NEW_LINE> self._active.set() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while not self._exit.isSet(): <NEW_LINE> <INDENT> self._active.wait() <NEW_LINE> event = self.queue.get() <NEW_LINE> if event is StopProcessing: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.handle(event) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> log.error("exception happened during event handling") <NEW_LINE> log.err() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> del event <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def stop(self, force_exit=False): <NEW_LINE> <INDENT> if force_exit: <NEW_LINE> <INDENT> self._exit.set() <NEW_LINE> <DEDENT> self.queue.put(StopProcessing) <NEW_LINE> self._pause_lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self._pause_counter = 0 <NEW_LINE> self._active.set() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self._pause_lock.release() <NEW_LINE> <DEDENT> <DEDENT> def pause(self): <NEW_LINE> <INDENT> self._pause_lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self._pause_counter += 1 <NEW_LINE> self._active.clear() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self._pause_lock.release() <NEW_LINE> <DEDENT> <DEDENT> def unpause(self): <NEW_LINE> <INDENT> self._pause_lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> if self._pause_counter == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._pause_counter -= 1 <NEW_LINE> if self._pause_counter == 0: <NEW_LINE> <INDENT> self._active.set() <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self._pause_lock.release() <NEW_LINE> <DEDENT> <DEDENT> def resume(self, events=[]): <NEW_LINE> <INDENT> [self.queue.put(event) for event in events] <NEW_LINE> self.unpause() <NEW_LINE> self.accept_events() <NEW_LINE> <DEDENT> def accept_events(self): <NEW_LINE> <INDENT> self._accepting_events = True <NEW_LINE> <DEDENT> def ignore_events(self): <NEW_LINE> <INDENT> self._accepting_events = False <NEW_LINE> <DEDENT> def put(self, event): <NEW_LINE> <INDENT> if self._accepting_events: <NEW_LINE> <INDENT> self.queue.put(event) <NEW_LINE> <DEDENT> <DEDENT> def load(self, events): <NEW_LINE> <INDENT> if self._accepting_events: <NEW_LINE> <INDENT> [self.queue.put(event) for event in events] <NEW_LINE> <DEDENT> <DEDENT> def empty(self): <NEW_LINE> <INDENT> self.pause() <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self.queue.get_nowait() <NEW_LINE> <DEDENT> <DEDENT> except Queue.Empty: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.unpause() <NEW_LINE> <DEDENT> def get_unhandled(self): <NEW_LINE> <INDENT> if self.isAlive(): <NEW_LINE> <INDENT> raise RuntimeError("Queue is still running") <NEW_LINE> <DEDENT> unhandled = [] <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> event = self.queue.get_nowait() <NEW_LINE> if event is not StopProcessing: <NEW_LINE> <INDENT> unhandled.append(event) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Queue.Empty: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return unhandled <NEW_LINE> <DEDENT> def handle(self, event): <NEW_LINE> <INDENT> raise RuntimeError("unhandled event") | Simple event processing queue that processes one event at a time | 6259909360cbc95b06365bf6 |
class ItemPedido(object): <NEW_LINE> <INDENT> qtd = Quantidade() <NEW_LINE> pr_unitario = Quantidade() <NEW_LINE> def __init__(self, descr, pr_unitario, qtd): <NEW_LINE> <INDENT> self.descr = descr <NEW_LINE> self.qtd = qtd <NEW_LINE> self.pr_unitario = pr_unitario | um item de um pedido | 62599093f9cc0f698b1c615a |
class SMIEyeOperator( EyeOperator ): <NEW_LINE> <INDENT> pass | Class for the analysis of SMI output.
Input is assumed to be already-converted text files, containing a mixture of samples and messages. | 62599093be7bc26dc9252ce4 |
class LogicalRouterSchema(base_schema_v2.BaseSchema): <NEW_LINE> <INDENT> table = (LogicalRouterEntrySchema,) | Schema class for Logical Routers
>>> import pprint
>>> py_dict = {'table': [{'lr_uuid': 'dc5d028a-0677-4f90-8bf2-846da02061',
... 'vdr_id': '1438272149',
... 'number_of_ports': '2',
... 'number_of_routes': '2',
... 'lr_state' : 'enabled',
... 'controller_ip': '10.10.10.10',
... 'control_plane_ip': '10.10.10.10',
... 'control_plane_active': 'yes',
... 'num_unique_nexthops': '0',
... 'generation_number': '0',
... 'edge_active': 'no'}]}
>>> pyobj = LogicalRouterSchema(py_dict=py_dict)
>>> pprint.pprint(pyobj.get_py_dict_from_object())
{'table': [{'control_plane_active': 'yes',
'control_plane_ip': '10.10.10.10',
'controller_ip': '10.10.10.10',
'edge_active': 'no',
'generation_number': '0',
'lr_hosts': None,
'lr_state': 'enabled',
'num_unique_nexthops': '0',
'number_of_ports': '2',
'number_of_routes': '2',
'vdr_id': '1438272149',
'lr_uuid': 'dc5d028a-0677-4f90-8bf2-846da02061'}]}
>>> for py_dict in (None, {}, {'table': []}):
... pyobj = LogicalRouterSchema(py_dict=py_dict)
... pprint.pprint(pyobj.get_py_dict_from_object())
{'table': []}
{'table': []}
{'table': []}
>>> for py_dict in ({'table': {}},):
... pyobj = LogicalRouterSchema(py_dict=py_dict)
Traceback (most recent call last):
...
RuntimeError: LogicalRouterSchema: Invalid value={} for attr=table | 6259909397e22403b383cc15 |
class Elementary: <NEW_LINE> <INDENT> def __add__(self, other): <NEW_LINE> <INDENT> if other == Zero: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> if other == self: <NEW_LINE> <INDENT> return Mul(Integer(2), self) <NEW_LINE> <DEDENT> return Add(self, other) <NEW_LINE> <DEDENT> def __radd__(self, other): <NEW_LINE> <INDENT> return other.__add__(self) <NEW_LINE> <DEDENT> def __mul__(self, other): <NEW_LINE> <INDENT> if other == Zero: <NEW_LINE> <INDENT> return Zero <NEW_LINE> <DEDENT> if other == One: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> return Mul(self, other) | Base class for all types | 6259909355399d3f05628233 |
class _WebDAVSearchMethodMock(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.counter = 0 <NEW_LINE> <DEDENT> def search(self, _, __): <NEW_LINE> <INDENT> if self.counter == 0: <NEW_LINE> <INDENT> self.counter += 1 <NEW_LINE> return _VALID_WEBDAV_GROUP_RESULT <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.counter = 0 <NEW_LINE> return _VALID_WEBDAV_USER_RESULT | Mock of the search method. | 62599093091ae35668706954 |
@command(server_cmds) <NEW_LINE> class server_wait(_CycladesInit, _ServerWait): <NEW_LINE> <INDENT> arguments = dict( timeout=IntArgument( 'Wait limit in seconds (default: 60)', '--timeout', default=60), server_status=StatusArgument( 'Status to wait for (%s, default: %s)' % ( ', '.join(server_states), server_states[0]), '--status', valid_states=server_states) ) <NEW_LINE> @errors.Generic.all <NEW_LINE> @errors.Cyclades.connection <NEW_LINE> @errors.Cyclades.server_id <NEW_LINE> def _run(self, server_id, current_status): <NEW_LINE> <INDENT> r = self.client.get_server_details(server_id) <NEW_LINE> if r['status'].lower() == current_status.lower(): <NEW_LINE> <INDENT> self.wait(server_id, current_status, timeout=self['timeout']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.error( 'Server %s: Cannot wait for status %s, ' 'status is already %s' % ( server_id, current_status, r['status'])) <NEW_LINE> <DEDENT> <DEDENT> def main(self, server_id): <NEW_LINE> <INDENT> super(self.__class__, self)._run() <NEW_LINE> self._run( server_id=server_id, current_status=self['server_status'] or 'BUILD') | Wait for server to change its status (default: BUILD) | 6259909355399d3f05628235 |
class DysonCarbonFilterLifeSensor(DysonSensor): <NEW_LINE> <INDENT> _SENSOR_TYPE = "carbon_filter_life" <NEW_LINE> @property <NEW_LINE> def state(self) -> int: <NEW_LINE> <INDENT> return self._device.carbon_filter_life | Dyson carbon filter life sensor (in percentage) for Pure Cool. | 62599093099cdd3c6367628b |
class ReserveEA(models.Model): <NEW_LINE> <INDENT> reserve = models.ForeignKey('inventory.Reserve') <NEW_LINE> ea = models.ForeignKey('inventory.EA') <NEW_LINE> count = models.PositiveIntegerField() | Для хранения забронированного инвентаря с api | 62599093656771135c48aec2 |
class Geolocation(BaseModel): <NEW_LINE> <INDENT> id: Optional[str] = None <NEW_LINE> latitude: Optional[float] = None <NEW_LINE> longitude: Optional[float] = None <NEW_LINE> zipcode: Optional[str] = None | Validation model for geolocation data to be uploaded.
## Fields
* id: Optional[str] = None
* latitude: Optional[float] = None
* longitude: Optional[float] = None
* zipcode: Optional[str] = None | 62599093091ae35668706956 |
class control(_RemoteControl): <NEW_LINE> <INDENT> name = 'control' <NEW_LINE> choices = { 'enable_events': (1.0, 'tell worker(s) to enable events'), 'disable_events': (1.0, 'tell worker(s) to disable events'), 'add_consumer': (1.0, 'tell worker(s) to start consuming a queue'), 'cancel_consumer': (1.0, 'tell worker(s) to stop consuming a queue'), 'rate_limit': ( 1.0, 'tell worker(s) to modify the rate limit for a task type'), 'time_limit': ( 1.0, 'tell worker(s) to modify the time limit for a task type.'), 'autoscale': (1.0, 'change autoscale settings'), 'pool_grow': (1.0, 'start more pool processes'), 'pool_shrink': (1.0, 'use less pool processes'), } <NEW_LINE> def call(self, method, *args, **options): <NEW_LINE> <INDENT> return getattr(self.app.control, method)(*args, reply=True, **options) <NEW_LINE> <DEDENT> def pool_grow(self, method, n=1, **kwargs): <NEW_LINE> <INDENT> return self.call(method, int(n), **kwargs) <NEW_LINE> <DEDENT> def pool_shrink(self, method, n=1, **kwargs): <NEW_LINE> <INDENT> return self.call(method, int(n), **kwargs) <NEW_LINE> <DEDENT> def autoscale(self, method, max=None, min=None, **kwargs): <NEW_LINE> <INDENT> return self.call(method, int(max), int(min), **kwargs) <NEW_LINE> <DEDENT> def rate_limit(self, method, task_name, rate_limit, **kwargs): <NEW_LINE> <INDENT> return self.call(method, task_name, rate_limit, reply=True, **kwargs) <NEW_LINE> <DEDENT> def time_limit(self, method, task_name, soft, hard=None, **kwargs): <NEW_LINE> <INDENT> return self.call(method, task_name, float(soft), float(hard), reply=True, **kwargs) <NEW_LINE> <DEDENT> def add_consumer(self, method, queue, exchange=None, exchange_type='direct', routing_key=None, **kwargs): <NEW_LINE> <INDENT> return self.call(method, queue, exchange, exchange_type, routing_key, reply=True, **kwargs) <NEW_LINE> <DEDENT> def cancel_consumer(self, method, queue, **kwargs): <NEW_LINE> <INDENT> return self.call(method, queue, reply=True, **kwargs) | Workers remote control.
Availability: RabbitMQ (amqp), Redis, and MongoDB transports.
Examples::
celery control enable_events --timeout=5
celery control -d [email protected] enable_events
celery control -d w1.e.com,w2.e.com enable_events
celery control -d w1.e.com add_consumer queue_name
celery control -d w1.e.com cancel_consumer queue_name
celery control -d w1.e.com add_consumer queue exchange direct rkey | 625990938a349b6b43687f83 |
class ExampleModuleException(Exception): <NEW_LINE> <INDENT> pass | All exceptions raised by the library inherit from this exception | 62599093be7bc26dc9252ce7 |
class LinkButton(Gtk.LinkButton): <NEW_LINE> <INDENT> def __init__(self, url, label): <NEW_LINE> <INDENT> Gtk.LinkButton.__init__(self, uri=url, label=label) <NEW_LINE> self.set_halign(Gtk.Align.START) <NEW_LINE> self.label = self.get_children()[0] <NEW_LINE> "If URI is too long reduce it for the label" <NEW_LINE> if len(label) > 60: <NEW_LINE> <INDENT> self.label.set_text(label[0:59] + " (...)") <NEW_LINE> <DEDENT> <DEDENT> def set_ellipsize(self, ellipsize): <NEW_LINE> <INDENT> self.label.set_ellipsize(ellipsize) <NEW_LINE> <DEDENT> def set_justify(self, justify): <NEW_LINE> <INDENT> self.label.set_justify(justify) | A link button | 6259909397e22403b383cc1b |
class WeightsPopup(Popup): <NEW_LINE> <INDENT> def __init__(self, parent_obj, text_list, **kwargs): <NEW_LINE> <INDENT> super(WeightsPopup, self).__init__(**kwargs) <NEW_LINE> self.parent_obj = parent_obj <NEW_LINE> self.pack(text_list) <NEW_LINE> <DEDENT> def pack(self, text_list): <NEW_LINE> <INDENT> spacing = 10. <NEW_LINE> cols_within_frame = 3 <NEW_LINE> die_size = len(text_list) <NEW_LINE> col_width = int(self.parent_obj.width / cols_within_frame) <NEW_LINE> add_drag = False <NEW_LINE> cols = ((die_size)//10 +1) <NEW_LINE> if cols > cols_within_frame: <NEW_LINE> <INDENT> cols = ((die_size+2)//10 +1) <NEW_LINE> add_drag = True <NEW_LINE> drag_it = Label(text='DRAG\n====>', bold=True) <NEW_LINE> <DEDENT> height = int(self.parent_obj.height* 0.9) <NEW_LINE> sz_hint = ((col_width - spacing)/(cols*col_width), 0.1 * (height-spacing)/height) <NEW_LINE> self.size = (min(1.1 * cols*col_width, self.parent_obj.width), self.parent_obj.height) <NEW_LINE> contents = self.ids['contents'] <NEW_LINE> contents.clear_widgets() <NEW_LINE> contents.size = (cols*col_width*0.88, height) <NEW_LINE> contents.spacing = spacing <NEW_LINE> if add_drag: <NEW_LINE> <INDENT> drag_it.size_hint = sz_hint <NEW_LINE> contents.add_widget(drag_it) <NEW_LINE> contents.add_widget(Button(on_press=self.record_weights, text='record\nweights', size_hint=sz_hint)) <NEW_LINE> <DEDENT> for text in text_list: <NEW_LINE> <INDENT> weighter = NumberSelect(0, 10, size_hint=sz_hint) <NEW_LINE> weighter.set_text(text, 1) <NEW_LINE> contents.add_widget(weighter) <NEW_LINE> <DEDENT> contents.add_widget(Button(on_press=self.record_weights, text='record\nweights', size_hint=sz_hint)) <NEW_LINE> <DEDENT> def record_weights(self, button): <NEW_LINE> <INDENT> out = [] <NEW_LINE> for child in self.ids['contents'].children[:]: <NEW_LINE> <INDENT> if isinstance(child, NumberSelect): <NEW_LINE> <INDENT> out.append(child.get_values()) <NEW_LINE> <DEDENT> <DEDENT> self.parent_obj.record_weights(out) <NEW_LINE> self.dismiss() | the popup called when weighting a die | 62599093099cdd3c6367628c |
class CpuProfileStatsHandler(RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> request_id = self.request.get("request_id") <NEW_LINE> request_stats = RequestStats.get(request_id) <NEW_LINE> if not request_stats: <NEW_LINE> <INDENT> self.response.out.write( "Profiler stats no longer exist for this request.") <NEW_LINE> return <NEW_LINE> <DEDENT> if not 'cpuprofile' in request_stats.profiler_results: <NEW_LINE> <INDENT> self.response.out.write( "No .cpuprofile available for this profile") <NEW_LINE> return <NEW_LINE> <DEDENT> self.response.headers['Content-Disposition'] = ( 'attachment; filename="gmp-%s-%s.cpuprofile"' % (request_stats.start_dt.strftime('%Y%m%d-%H%M%S'), str(request_id))) <NEW_LINE> self.response.headers['Content-type'] = "application/json" <NEW_LINE> self.response.out.write(request_stats.profiler_results['cpuprofile']) | Handler for retrieving the (sampling) profile in .cpuprofile format.
This is compatible with Chrome's flamechart profile viewer. | 62599093f9cc0f698b1c615f |
class Order(models.Model): <NEW_LINE> <INDENT> order_number = models.ForeignKey(Order_Number, on_delete=models.CASCADE) <NEW_LINE> username = models.CharField(max_length=200, default='') <NEW_LINE> name = models.CharField(max_length=200, default='') <NEW_LINE> email = models.CharField(max_length=200, default='') <NEW_LINE> MY_STATUSES = ( ('INCOMPLETE', 'incomplete'), ('SUBMITTED', 'submitted'), ('FILLED', 'filled'), ) <NEW_LINE> order_status = models.CharField(max_length=20, choices=MY_STATUSES, default='INCOMPLETE') <NEW_LINE> quantity_ordered = models.CharField(max_length=200, default='1') <NEW_LINE> title = models.CharField(max_length=200) <NEW_LINE> price = models.DecimalField(max_digits=20, decimal_places=2) <NEW_LINE> MY_SIZES = ( ('S', 'Small'), ('L', 'Large'), ) <NEW_LINE> size = models.CharField(max_length=20, choices=MY_SIZES, default='S') <NEW_LINE> MY_FOOD_TYPES = ( ('PIZZA', 'pizza'), ('SUB', 'sub'), ('SALAD', 'salad'), ('PASTA', 'pasta'), ('DINNER', 'dinner platter'), ) <NEW_LINE> food_type = models.CharField(max_length=10, choices=MY_FOOD_TYPES, default='PIZZA') <NEW_LINE> toppings = models.CharField(max_length=200) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f"order number: {self.order_number.id} {self.title}" | Model representing the ordered items on the menu. | 62599093283ffb24f3cf55c9 |
class ModelInitializerBase(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def load_data(self, args): <NEW_LINE> <INDENT> iterator = node_link_data_to_eden(args.input_file) <NEW_LINE> return iterator <NEW_LINE> <DEDENT> def load_positive_data(self, args): <NEW_LINE> <INDENT> iterator = node_link_data_to_eden(args.positive_input_file) <NEW_LINE> return iterator <NEW_LINE> <DEDENT> def load_negative_data(self, args): <NEW_LINE> <INDENT> iterator = node_link_data_to_eden(args.negative_input_file) <NEW_LINE> return iterator <NEW_LINE> return self.load_data(args.negative_input_file) <NEW_LINE> <DEDENT> def pre_processor_init(self, args): <NEW_LINE> <INDENT> def pre_processor(graphs, **args): <NEW_LINE> <INDENT> return graphs <NEW_LINE> <DEDENT> pre_processor_parameters = {} <NEW_LINE> return pre_processor, pre_processor_parameters <NEW_LINE> <DEDENT> def vectorizer_init(self, args): <NEW_LINE> <INDENT> vectorizer = Vectorizer() <NEW_LINE> vectorizer_parameters = {'complexity': [2, 3, 4]} <NEW_LINE> return vectorizer, vectorizer_parameters <NEW_LINE> <DEDENT> def estimator_init(self, args): <NEW_LINE> <INDENT> estimator = SGDClassifier(average=True, class_weight='balanced', shuffle=True) <NEW_LINE> estimator_parameters = {'n_iter': randint(5, 200, size=args.n_iter), 'penalty': ['l1', 'l2', 'elasticnet'], 'l1_ratio': uniform(0.1, 0.9, size=args.n_iter), 'loss': ['hinge', 'log', 'modified_huber', 'squared_hinge', 'perceptron'], 'power_t': uniform(0.1, size=args.n_iter), 'alpha': [10 ** x for x in range(-8, 0)], 'eta0': [10 ** x for x in range(-4, -1)], 'learning_rate': ["invscaling", "constant", "optimal"], 'n_jobs': [-1]} <NEW_LINE> return estimator, estimator_parameters <NEW_LINE> <DEDENT> def add_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument('--version', action='version', version='0.1') <NEW_LINE> return parser <NEW_LINE> <DEDENT> def add_arguments_fit(self, parser): <NEW_LINE> <INDENT> parser.add_argument("-p", "--positive-input-file", dest="positive_input_file", help="Path tofile containing input for the positive class.", required=True) <NEW_LINE> parser.add_argument("-n", "--negative-input-file", dest="negative_input_file", help="Path to file containing input for the negative class.", required=True) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def add_arguments_estimate(self, parser): <NEW_LINE> <INDENT> return self.add_arguments_fit(parser) <NEW_LINE> <DEDENT> def add_arguments_base(self, parser): <NEW_LINE> <INDENT> parser.add_argument("-i", "--input-file", dest="input_file", help="Path to file containing input.", required=True) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def add_arguments_matrix(self, parser): <NEW_LINE> <INDENT> return parser <NEW_LINE> <DEDENT> def add_arguments_predict(self, parser): <NEW_LINE> <INDENT> return parser <NEW_LINE> <DEDENT> def add_arguments_feature(self, parser): <NEW_LINE> <INDENT> return parser | Subclass to generate your own EDeN model driver. | 6259909355399d3f0562823d |
class Gather(Goal): <NEW_LINE> <INDENT> def __init__(self, what, max_amount=1, distance=30): <NEW_LINE> <INDENT> Goal.__init__(self, "gather a thing", self.is_there_none_around, [SpotSomething(what), PickUpFocus(what)]) <NEW_LINE> if isinstance(what, str): <NEW_LINE> <INDENT> self.what = what <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.what = str(what) <NEW_LINE> <DEDENT> self.filter = entity_filter.Filter(self.what) <NEW_LINE> self.max_amount = max_amount <NEW_LINE> self.distance = distance <NEW_LINE> self.vars = ["what", "max_amount", "distance"] <NEW_LINE> <DEDENT> def is_there_none_around(self, me): <NEW_LINE> <INDENT> amount = 0 <NEW_LINE> entities_in_inventory = me.match_entities(self.filter, me.entity.contains) <NEW_LINE> for entity in entities_in_inventory: <NEW_LINE> <INDENT> amount += entity.get_prop_int("amount", 1) <NEW_LINE> <DEDENT> if amount >= self.max_amount: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> what_all = me.map.find_by_filter(self.filter) <NEW_LINE> for thing in what_all: <NEW_LINE> <INDENT> distance_to_thing = me.steering.distance_to(thing, ai.EDGE, ai.EDGE) <NEW_LINE> if distance_to_thing < self.distance: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Base class for getting a freely available resource. | 62599093f9cc0f698b1c6160 |
Subsets and Splits