code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Background(FrameMinion): <NEW_LINE> <INDENT> name = 'background' <NEW_LINE> def run(self): <NEW_LINE> <INDENT> import os <NEW_LINE> import logging <NEW_LINE> import warnings <NEW_LINE> import numpy as np <NEW_LINE> from numpy.ma.core import MaskedArrayFutureWarning <NEW_LINE> logging.getLogger('tgk.science').debug(' Measuring background.') <NEW_LINE> sky = self.im.data[self.obs.trimsec] <NEW_LINE> row = [self.obs.frame_name, self.obs.filter] <NEW_LINE> bgsections = (np.s_[:100, :100], np.s_[-100:, :100], np.s_[-100:, -100:], np.s_[:100, -100:]) <NEW_LINE> all_backgrounds = [] <NEW_LINE> for s in bgsections: <NEW_LINE> <INDENT> clipped, bg, bgsig, bgarea = self._est(sky[s]) <NEW_LINE> row.extend((bg, bgsig, bgarea)) <NEW_LINE> all_backgrounds.extend(clipped[~clipped.mask]) <NEW_LINE> <DEDENT> row.extend(self._est(all_backgrounds)[1:]) <NEW_LINE> BackgroundTable().update(row) <NEW_LINE> <DEDENT> def _est(self, a, sigma_lower=3, sigma_upper=2.5, **kwargs): <NEW_LINE> <INDENT> import numpy as np <NEW_LINE> from astropy import stats <NEW_LINE> clipped = stats.sigma_clip(a, sigma_lower=sigma_lower, sigma_upper=sigma_upper) <NEW_LINE> bg = np.ma.median(clipped) <NEW_LINE> bgsig = clipped.std() <NEW_LINE> bgarea = np.sum(~clipped.mask, dtype=int) <NEW_LINE> return clipped, bg, bgsig, bgarea | Estimate comet background.
Parameters
----------
config : dict
Configuration parameters.
im : Image
Frame data.
obs : Observation
Frame meta data.
geom : Geometry
Comet geometric circumstances. | 6259908d4c3428357761bf12 |
class DistrictIspInfo(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Domain = None <NEW_LINE> self.Protocol = None <NEW_LINE> self.IpProtocol = None <NEW_LINE> self.StartTime = None <NEW_LINE> self.EndTime = None <NEW_LINE> self.Interval = None <NEW_LINE> self.Metric = None <NEW_LINE> self.District = None <NEW_LINE> self.Isp = None <NEW_LINE> self.DataPoints = None <NEW_LINE> self.DistrictName = None <NEW_LINE> self.IspName = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Domain = params.get("Domain") <NEW_LINE> self.Protocol = params.get("Protocol") <NEW_LINE> self.IpProtocol = params.get("IpProtocol") <NEW_LINE> self.StartTime = params.get("StartTime") <NEW_LINE> self.EndTime = params.get("EndTime") <NEW_LINE> self.Interval = params.get("Interval") <NEW_LINE> self.Metric = params.get("Metric") <NEW_LINE> self.District = params.get("District") <NEW_LINE> self.Isp = params.get("Isp") <NEW_LINE> self.DataPoints = params.get("DataPoints") <NEW_LINE> self.DistrictName = params.get("DistrictName") <NEW_LINE> self.IspName = params.get("IspName") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | 地区运营商明细数据
| 6259908dad47b63b2c5a94a8 |
class ProductSplitter(BaseVspSplitter): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _simuPOP_ba.ProductSplitter_swiginit(self, _simuPOP_ba.new_ProductSplitter(*args, **kwargs)) <NEW_LINE> <DEDENT> __swig_destroy__ = _simuPOP_ba.delete_ProductSplitter | Details:
This splitter takes several splitters and take their intersections
as new VSPs. For example, if the first splitter defines 3 VSPs and
the second splitter defines 2, 6 VSPs will be defined by splitting
3 VSPs defined by the first splitter each to two VSPs. This
splitter is usually used to define finer VSPs from existing VSPs. | 6259908d5fcc89381b266f89 |
class AbstractUniqueEmailUser(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> username = models.CharField( trans('username'), max_length=30, unique=True, help_text=trans( 'Required. 30 characters or fewer. Letters, numbers and ' '@/./+/-/_ characters'), validators=[ validators.RegexValidator( re.compile('^[\w.@+-]+$'), trans('Enter a valid username.'), 'invalid') ]) <NEW_LINE> email = models.EmailField(trans('email address'), blank=True, unique=True) <NEW_LINE> is_staff = models.BooleanField( trans('staff status'), default=False, help_text=trans('Designates whether the user can log into this admin ' 'site.')) <NEW_LINE> is_active = models.BooleanField( trans('active'), default=True, help_text=trans('Designates whether this user should be treated as ' 'active. Unselect this instead of deleting accounts.')) <NEW_LINE> date_joined = models.DateTimeField(trans('date joined'), default=timezone.now) <NEW_LINE> objects = UserManager() <NEW_LINE> USERNAME_FIELD = 'username' <NEW_LINE> REQUIRED_FIELDS = ['email'] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = trans('user') <NEW_LINE> verbose_name_plural = trans('users') <NEW_LINE> abstract = True <NEW_LINE> <DEDENT> def get_full_name(self): <NEW_LINE> <INDENT> full_name = '%s %s' % (self.get_first_name(), self.get_last_name()) <NEW_LINE> return full_name.strip() <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.get_first_name() <NEW_LINE> <DEDENT> def email_user(self, subject, message, from_email=None, **kwargs): <NEW_LINE> <INDENT> send_mail(subject, message, from_email, [self.email], **kwargs) | An abstract base class implementing a fully featured User model with
admin-compliant permissions.
Username, password and email are required. Other fields are optional. | 6259908d5fdd1c0f98e5fbce |
class Game(models.Model): <NEW_LINE> <INDENT> developer = models.ForeignKey(User, limit_choices_to={'groups__name': "Developers"}) <NEW_LINE> title = models.CharField(max_length=60, blank=False) <NEW_LINE> search_title = models.CharField(max_length=60, null=False, default='') <NEW_LINE> url = models.URLField(blank=False, null=False, default="http://example.com") <NEW_LINE> price = models.DecimalField(max_digits=5, decimal_places=2, null=False, default=0.00, validators=[validate_price]) <NEW_LINE> tags = models.TextField(null=False, default='', blank=True) <NEW_LINE> description = models.TextField(null=False, default='', blank=False) <NEW_LINE> img_url = models.URLField(null=True, blank=True, default="") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '<Game object (id: {}, title: {}, developer: {})>'.format(self.pk, self.title, self.developer) <NEW_LINE> <DEDENT> def get_tags(self): <NEW_LINE> <INDENT> return [x for x in self.tags.split(',')] <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> tags = [x.strip() for x in self.tags.split(',')] <NEW_LINE> tags = [re.sub(r'(\s|_)+', '_', x.lower()) for x in tags if x] <NEW_LINE> tags = set(re.sub(r'[^a-zA-Z0-9_]', '', x) for x in tags) <NEW_LINE> self.tags = ','.join(sorted(tags)[:10]) <NEW_LINE> s_title = re.sub(r'(\s)+', ' ', self.title.lower().strip()) <NEW_LINE> s_title = s_title.replace('%', 'percent').replace('&', 'and') <NEW_LINE> self.search_title = re.sub(r'[^\w\s]', '', s_title) <NEW_LINE> super(Game, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> def get_related_games(self): <NEW_LINE> <INDENT> t1 = set(self.get_tags()) <NEW_LINE> if not t1: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> games = Game.objects.exclude(pk=self.pk) <NEW_LINE> related = [] <NEW_LINE> for g in games: <NEW_LINE> <INDENT> t2 = set(g.get_tags()) <NEW_LINE> isect = t1 & t2; <NEW_LINE> union = t1 | t2; <NEW_LINE> if isect: <NEW_LINE> <INDENT> related.append((g, len(isect)/len(union))) <NEW_LINE> <DEDENT> <DEDENT> return related | Model representing Game objects.
Attributes:
title: a string: name of the game (certain characters are not allowed)
search_title: a string: plain version of title to be used in searches (certain characters removed)
developer: a User object belonging to group Developers
url: a string: URL link to the game
price: a Decimal: price of the game (must be non-negative)
tags: a string: comma-separated list of tags to categorize the game by (no spaces allowed)
description: a string describing the game
img_url: a string: URL to an image of the game (optional) | 6259908d8a349b6b43687eb6 |
class Delete(Base): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> id = self.options['<id>'] <NEW_LINE> password_record = password_service.get(id) <NEW_LINE> answer = input('Do you want to delete {name}({account_name})? (y/n): '.format(name=password_record['name'], account_name=password_record['account_name'])) <NEW_LINE> if answer == 'y': <NEW_LINE> <INDENT> master_password = self.requirePassword('Master password') <NEW_LINE> password_service.delete(id, master_password) <NEW_LINE> print('Password deleted!') | Delete passwords | 6259908d7cff6e4e811b769a |
class DeleteRawModifiedDetails(FrozenClass): <NEW_LINE> <INDENT> ua_types = [ ('NodeId', 'NodeId'), ('IsDeleteModified', 'Boolean'), ('StartTime', 'DateTime'), ('EndTime', 'DateTime'), ] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.NodeId = NodeId() <NEW_LINE> self.IsDeleteModified = True <NEW_LINE> self.StartTime = datetime.utcnow() <NEW_LINE> self.EndTime = datetime.utcnow() <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'DeleteRawModifiedDetails(NodeId:{self.NodeId}, IsDeleteModified:{self.IsDeleteModified}, StartTime:{self.StartTime}, EndTime:{self.EndTime})' <NEW_LINE> <DEDENT> __repr__ = __str__ | :ivar NodeId:
:vartype NodeId: NodeId
:ivar IsDeleteModified:
:vartype IsDeleteModified: Boolean
:ivar StartTime:
:vartype StartTime: DateTime
:ivar EndTime:
:vartype EndTime: DateTime | 6259908df9cc0f698b1c60f7 |
class TextValueGenerator(object): <NEW_LINE> <INDENT> def __init__(self,meanlen,stdlen): <NEW_LINE> <INDENT> self.meanlen = meanlen <NEW_LINE> self.stdlen = stdlen <NEW_LINE> <DEDENT> def generate(self): <NEW_LINE> <INDENT> length = max(0,int(random.gauss(self.meanlen, self.stdlen))) <NEW_LINE> return '"' + "".join([random.choice(string.ascii_letters + string.digits) for _ in range(length)]) + '"' | Generates random text. | 6259908d55399d3f0562816c |
class RegenerateCredentialParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, name: Union[str, "PasswordName"], **kwargs ): <NEW_LINE> <INDENT> super(RegenerateCredentialParameters, self).__init__(**kwargs) <NEW_LINE> self.name = name | The parameters used to regenerate the login credential.
All required parameters must be populated in order to send to Azure.
:ivar name: Required. Specifies name of the password which should be regenerated -- password or
password2. Possible values include: "password", "password2".
:vartype name: str or ~azure.mgmt.containerregistry.v2017_10_01.models.PasswordName | 6259908d26068e7796d4e59a |
class TestRowWiseThreshold(unittest.TestCase): <NEW_LINE> <INDENT> def test_3by3_matrix_percentile(self): <NEW_LINE> <INDENT> matrix = np.array([[0.5, 2.0, 3.0], [3.0, 4.0, 5.0], [4.0, 2.0, 1.0]]) <NEW_LINE> adjusted_matrix = refinement.RowWiseThreshold( p_percentile=0.5, thresholding_soft_multiplier=0.01, thresholding_type=ThresholdType.Percentile).refine(matrix) <NEW_LINE> expected = np.array([[0.005, 2.0, 3.0], [0.03, 4.0, 5.0], [4.0, 2.0, 0.01]]) <NEW_LINE> self.assertTrue(np.allclose(expected, adjusted_matrix, atol=0.001)) <NEW_LINE> <DEDENT> def test_3by3_matrix_row_max(self): <NEW_LINE> <INDENT> matrix = np.array([[0.5, 2.0, 3.0], [3.0, 4.0, 5.0], [4.0, 2.0, 1.0]]) <NEW_LINE> adjusted_matrix = refinement.RowWiseThreshold( p_percentile=0.5, thresholding_soft_multiplier=0.01, thresholding_type=ThresholdType.RowMax).refine(matrix) <NEW_LINE> expected = np.array([[0.005, 2.0, 3.0], [3.0, 4.0, 5.0], [4.0, 2.0, 0.01]]) <NEW_LINE> self.assertTrue(np.allclose(expected, adjusted_matrix, atol=0.001)) <NEW_LINE> <DEDENT> def test_3by3_matrix_binarization(self): <NEW_LINE> <INDENT> matrix = np.array([[0.5, 2.0, 3.0], [3.0, 4.0, 5.0], [4.0, 2.0, 1.0]]) <NEW_LINE> adjusted_matrix = refinement.RowWiseThreshold( p_percentile=0.5, thresholding_soft_multiplier=0.01, thresholding_type=ThresholdType.RowMax, thresholding_with_binarization=True).refine(matrix) <NEW_LINE> expected = np.array([[0.005, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 0.01]]) <NEW_LINE> self.assertTrue(np.allclose(expected, adjusted_matrix, atol=0.001)) <NEW_LINE> <DEDENT> def test_3by3_matrix_preserve_diagonal(self): <NEW_LINE> <INDENT> matrix = np.array([[0.5, 2.0, 3.0], [3.0, 4.0, 5.0], [4.0, 2.0, 1.0]]) <NEW_LINE> adjusted_matrix = refinement.RowWiseThreshold( p_percentile=0.5, thresholding_soft_multiplier=0.01, thresholding_type=ThresholdType.RowMax, thresholding_with_binarization=True, thresholding_preserve_diagonal=True).refine(matrix) <NEW_LINE> expected = np.array([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]) <NEW_LINE> self.assertTrue(np.allclose(expected, adjusted_matrix, atol=0.001)) | Tests for the RowWiseThreshold class. | 6259908dbf627c535bcb312b |
class PrepaidRentTestCase(DataProvider, BalanceUtils, TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.cash = self.account(type=Account.TYPES.asset) <NEW_LINE> self.rent_expense = self.account(type=Account.TYPES.expense) <NEW_LINE> self.prepaid_rent = self.account(type=Account.TYPES.asset) <NEW_LINE> <DEDENT> def test_prepaid_rent(self): <NEW_LINE> <INDENT> self.assertBalanceEqual(self.cash.balance(), 0) <NEW_LINE> self.assertBalanceEqual(self.rent_expense.balance(), 0) <NEW_LINE> self.assertBalanceEqual(self.prepaid_rent.balance(), 0) <NEW_LINE> self.cash.transfer_to(self.prepaid_rent, Money(3000, "EUR")) <NEW_LINE> self.assertBalanceEqual(self.cash.balance(), -3000) <NEW_LINE> self.assertBalanceEqual(self.rent_expense.balance(), 0) <NEW_LINE> self.assertBalanceEqual(self.prepaid_rent.balance(), 3000) <NEW_LINE> self.prepaid_rent.transfer_to(self.rent_expense, Money(1000, "EUR")) <NEW_LINE> self.assertBalanceEqual(self.cash.balance(), -3000) <NEW_LINE> self.assertBalanceEqual(self.rent_expense.balance(), 1000) <NEW_LINE> self.assertBalanceEqual(self.prepaid_rent.balance(), 2000) <NEW_LINE> self.prepaid_rent.transfer_to(self.rent_expense, Money(1000, "EUR")) <NEW_LINE> self.assertBalanceEqual(self.cash.balance(), -3000) <NEW_LINE> self.assertBalanceEqual(self.rent_expense.balance(), 2000) <NEW_LINE> self.assertBalanceEqual(self.prepaid_rent.balance(), 1000) <NEW_LINE> self.prepaid_rent.transfer_to(self.rent_expense, Money(1000, "EUR")) <NEW_LINE> self.assertBalanceEqual(self.cash.balance(), -3000) <NEW_LINE> self.assertBalanceEqual(self.rent_expense.balance(), 3000) <NEW_LINE> self.assertBalanceEqual(self.prepaid_rent.balance(), 0) <NEW_LINE> Account.validate_accounting_equation() | Prepay three months rent in advance
Based on example here:
http://www.double-entry-bookkeeping.com/other-current-assets/prepaid-rent/ | 6259908d5fcc89381b266f8a |
class StatementParser: <NEW_LINE> <INDENT> def __init__(self, bankname: str): <NEW_LINE> <INDENT> self.confbank = bankparser.config.get_bank_config(bankname) <NEW_LINE> self.bankname = bankname <NEW_LINE> self.filename = None <NEW_LINE> self.content = None <NEW_LINE> <DEDENT> def parse(self, filename, is_content: bool=False): <NEW_LINE> <INDENT> if is_content: <NEW_LINE> <INDENT> self.content = filename <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> encoding = self.confbank.bank.encoding <NEW_LINE> with open(filename, 'r', encoding=encoding)as f: <NEW_LINE> <INDENT> self.content = f.read() <NEW_LINE> <DEDENT> <DEDENT> statement = bankparser.statement.Statement(bank=self.bankname, typest=self.confbank.bank.type) <NEW_LINE> reader = self._split_records() <NEW_LINE> for line in reader: <NEW_LINE> <INDENT> if not line: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> stmt_line = self._parse_record(line) <NEW_LINE> if stmt_line: <NEW_LINE> <INDENT> statement.lines.append(stmt_line) <NEW_LINE> if statement.account is None: <NEW_LINE> <INDENT> statement.account = stmt_line.account <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return statement <NEW_LINE> <DEDENT> def _split_records(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def _parse_record(self, line): <NEW_LINE> <INDENT> sl = bankparser.statementline.StatementLine() <NEW_LINE> inifields = line.keys() <NEW_LINE> objfields = [arg for arg in dir(bankparser.statementline.StatementLine) if not arg.startswith('_')] <NEW_LINE> for field in objfields: <NEW_LINE> <INDENT> if field in inifields: <NEW_LINE> <INDENT> rawvalue = line[field] <NEW_LINE> changemap = getattr(self.confbank.bank, 'm_' + field, None) <NEW_LINE> if changemap: <NEW_LINE> <INDENT> rawvalue = changemap.get(rawvalue, rawvalue) <NEW_LINE> <DEDENT> value = self._parse_value(rawvalue, field) <NEW_LINE> setattr(sl, field, value) <NEW_LINE> <DEDENT> <DEDENT> listDescr=list(self.confbank.bank.m_descr_account.keys()) <NEW_LINE> for strFind in listDescr: <NEW_LINE> <INDENT> if strFind in sl.description: <NEW_LINE> <INDENT> sl.category = self.confbank.bank.m_descr_account[strFind] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if sl.amount and sl.amountsign == '-': <NEW_LINE> <INDENT> sl.amount = sl.amount * Decimal(sl.amountsign+'1') <NEW_LINE> <DEDENT> sl = self.confbank.bank.after_row_parsed(sl, line) <NEW_LINE> return sl <NEW_LINE> <DEDENT> def _parse_value(self, value, field): <NEW_LINE> <INDENT> tp = type(getattr(bankparser.statementline.StatementLine, field)) <NEW_LINE> if tp == datetime: <NEW_LINE> <INDENT> return self._parse_datetime(value) <NEW_LINE> <DEDENT> elif tp == float: <NEW_LINE> <INDENT> return self._parse_float(value) <NEW_LINE> <DEDENT> elif tp == Decimal: <NEW_LINE> <INDENT> return self._parse_decimal(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value.strip() <NEW_LINE> <DEDENT> <DEDENT> def _parse_datetime(self, value): <NEW_LINE> <INDENT> date_format = self.confbank.bank.dateformat <NEW_LINE> return datetime.strptime(value, date_format) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse_float(value): <NEW_LINE> <INDENT> val = value.replace(',', '.') <NEW_LINE> return float(val) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse_decimal(value): <NEW_LINE> <INDENT> val = value.replace(',', '.').strip('0') <NEW_LINE> val = val.replace(' ','') <NEW_LINE> return Decimal(val) | Базовый класс для разбора выписки | 6259908d3346ee7daa33848e |
@mock.patch('cinder.volume.api.API.get_snapshot', api_snapshot_get) <NEW_LINE> class SnapshotUnmanageTest(test.TestCase): <NEW_LINE> <INDENT> def _get_resp(self, snapshot_id): <NEW_LINE> <INDENT> req = webob.Request.blank('/v3/%s/snapshots/%s/action' % ( fake.PROJECT_ID, snapshot_id)) <NEW_LINE> req.method = 'POST' <NEW_LINE> req.headers['Content-Type'] = 'application/json' <NEW_LINE> req.environ['cinder.context'] = context.RequestContext(fake.USER_ID, fake.PROJECT_ID, True) <NEW_LINE> body = {'os-unmanage': ''} <NEW_LINE> req.body = jsonutils.dump_as_bytes(body) <NEW_LINE> res = req.get_response(app()) <NEW_LINE> return res <NEW_LINE> <DEDENT> @mock.patch('cinder.db.conditional_update', return_value=1) <NEW_LINE> @mock.patch('cinder.db.snapshot_update') <NEW_LINE> @mock.patch('cinder.volume.rpcapi.VolumeAPI.delete_snapshot') <NEW_LINE> def test_unmanage_snapshot_ok(self, mock_rpcapi, mock_db_update, mock_conditional_update): <NEW_LINE> <INDENT> res = self._get_resp(snapshot_id) <NEW_LINE> self.assertEqual(1, mock_rpcapi.call_count) <NEW_LINE> self.assertEqual(3, len(mock_rpcapi.call_args[0])) <NEW_LINE> self.assertEqual(0, len(mock_rpcapi.call_args[1])) <NEW_LINE> self.assertEqual(HTTPStatus.ACCEPTED, res.status_int, res) <NEW_LINE> <DEDENT> def test_unmanage_snapshot_bad_snapshot_id(self): <NEW_LINE> <INDENT> res = self._get_resp(bad_snp_id) <NEW_LINE> self.assertEqual(HTTPStatus.NOT_FOUND, res.status_int, res) | Test cases for cinder/api/contrib/snapshot_unmanage.py
The API extension adds an action to snapshots, "os-unmanage", which will
effectively issue a delete operation on the snapshot, but with a flag set
that means that a different method will be invoked on the driver, so that
the snapshot is not actually deleted in the storage backend.
In this set of test cases, we are ensuring that the code correctly parses
the request structure and raises the correct exceptions when things are not
right, and calls down into cinder.volume.api.API.delete_snapshot with the
correct arguments. | 6259908d283ffb24f3cf54f9 |
class MyForm(forms.Form): <NEW_LINE> <INDENT> pass | Example .
i have no Question object so comment it | 6259908d656771135c48ae5d |
class IssueStatusSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Issue <NEW_LINE> fields = ['status'] <NEW_LINE> <DEDENT> def update(self, id): <NEW_LINE> <INDENT> issue = Issue.objects.get(id=id) <NEW_LINE> issue.status = self.validated_data['status'] <NEW_LINE> issue.save() | Serializer of an issue's status | 6259908da05bb46b3848bf52 |
class TestingConfig(Config): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> DATABASE_URL = os.getenv("DATABASE_TEST_URL") | Configurations for Testing | 6259908d167d2b6e312b83c5 |
class G43dot1(GCodeMaker): <NEW_LINE> <INDENT> _cmd = "G43.1" <NEW_LINE> _order = ['z'] <NEW_LINE> _tmpls = {'z':"Z%s"} <NEW_LINE> def __init__(self, **data): <NEW_LINE> <INDENT> GCodeMaker.__init__(self, **data) | Dynamic Tool Length Offset | 6259908de1aae11d1e7cf642 |
class TarballContains(Matcher): <NEW_LINE> <INDENT> def __init__(self, paths): <NEW_LINE> <INDENT> super(TarballContains, self).__init__() <NEW_LINE> self.paths = paths <NEW_LINE> self.path_matcher = Equals(sorted(self.paths)) <NEW_LINE> <DEDENT> def match(self, tarball_path): <NEW_LINE> <INDENT> f = open(tarball_path, "rb") <NEW_LINE> try: <NEW_LINE> <INDENT> tarball = tarfile.open(tarball_path, fileobj=f) <NEW_LINE> try: <NEW_LINE> <INDENT> return self.path_matcher.match(sorted(tarball.getnames())) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> tarball.close() <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> f.close() | Matches if the given tarball contains the given paths.
Uses TarFile.getnames() to get the paths out of the tarball. | 6259908dad47b63b2c5a94ac |
class Solution: <NEW_LINE> <INDENT> def isPalindrome(self, s: str) -> bool: <NEW_LINE> <INDENT> if not s: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> s = ''.join([x.lower() for x in s if x.isalnum()]) <NEW_LINE> return s[::-1] == s | 判断字符串是否是回文字符串
正读反读都一样的字符串 忽略大小写及特殊标点 | 6259908d60cbc95b06365b95 |
class ObjectDict(dict): <NEW_LINE> <INDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AttributeError(name) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> self[name] = value | Makes a dictionary behave like an object, with attribute-style access. | 6259908d3346ee7daa33848f |
class TestStartpage(BaseCherryPyTestCase, ResponseAssertions): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls) -> None: <NEW_LINE> <INDENT> helpers.start_server(apps.startpage.main.Controller) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls) -> None: <NEW_LINE> <INDENT> helpers.stop_server() <NEW_LINE> <DEDENT> def test_allow(self) -> None: <NEW_LINE> <INDENT> response = self.request("/", method="HEAD") <NEW_LINE> self.assert_allowed(response, ("GET", "POST")) <NEW_LINE> <DEDENT> def test_exposed(self) -> None: <NEW_LINE> <INDENT> self.assert_exposed(apps.startpage.main.Controller) <NEW_LINE> <DEDENT> def test_show_on_homepage(self) -> None: <NEW_LINE> <INDENT> self.assert_show_on_homepage(apps.startpage.main.Controller) | Unit tests for the template app | 6259908d97e22403b383cb52 |
class TestEzsigndocumentlogResponse(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testEzsigndocumentlogResponse(self): <NEW_LINE> <INDENT> pass | EzsigndocumentlogResponse unit test stubs | 6259908dfff4ab517ebcf472 |
class UserProfileManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self, email, name, password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError('Users must have an email address.') <NEW_LINE> <DEDENT> email = self.normalize_email(email) <NEW_LINE> user = self.model(email=email, name=name) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self, email, name, password): <NEW_LINE> <INDENT> user = self.create_user(email, name, password) <NEW_LINE> user.is_superuser = True <NEW_LINE> user.is_staff = True <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user | Helps Django work with our custom user model. | 6259908d26068e7796d4e59e |
class ResultTracker(object): <NEW_LINE> <INDENT> MISSING = object() <NEW_LINE> SKIPPED = object() <NEW_LINE> def __init__(self, request, parent_object, placeholder, items, all_cacheable=True): <NEW_LINE> <INDENT> self.request = request <NEW_LINE> self.parent_object = parent_object <NEW_LINE> self.placeholder = placeholder <NEW_LINE> self.items = items <NEW_LINE> self.all_timeout = DEFAULT_TIMEOUT <NEW_LINE> self.all_cacheable = all_cacheable <NEW_LINE> self.output_ordering = [] <NEW_LINE> self.remaining_items = [] <NEW_LINE> self.item_output = {} <NEW_LINE> self.item_source = {} <NEW_LINE> self.placeholder_name = get_placeholder_name(placeholder) <NEW_LINE> <DEDENT> def store_output(self, contentitem, output): <NEW_LINE> <INDENT> self._set_output(contentitem, output) <NEW_LINE> <DEDENT> def store_exception(self, contentitem, exception): <NEW_LINE> <INDENT> self._set_output(contentitem, exception) <NEW_LINE> <DEDENT> def set_skipped(self, contentitem): <NEW_LINE> <INDENT> self._set_output(contentitem, self.SKIPPED) <NEW_LINE> <DEDENT> def _set_output(self, contentitem, output): <NEW_LINE> <INDENT> item_id = self._get_item_id(contentitem) <NEW_LINE> self.item_output[item_id] = output <NEW_LINE> self.item_source[item_id] = contentitem <NEW_LINE> <DEDENT> def _get_item_id(self, contentitem): <NEW_LINE> <INDENT> return contentitem.pk or id(contentitem) <NEW_LINE> <DEDENT> def add_ordering(self, contentitem): <NEW_LINE> <INDENT> item_id = self._get_item_id(contentitem) <NEW_LINE> self.item_source[item_id] = contentitem <NEW_LINE> self.output_ordering.append(item_id) <NEW_LINE> <DEDENT> def add_remaining(self, contentitem): <NEW_LINE> <INDENT> self.remaining_items.append(contentitem) <NEW_LINE> <DEDENT> def add_remaining_list(self, contentitems): <NEW_LINE> <INDENT> self.remaining_items.extend(contentitems) <NEW_LINE> self.output_ordering.extend(self._get_item_id(item) for item in contentitems) <NEW_LINE> <DEDENT> def fetch_remaining_instances(self, queryset): <NEW_LINE> <INDENT> if self.remaining_items: <NEW_LINE> <INDENT> self.remaining_items = queryset.get_real_instances(self.remaining_items) <NEW_LINE> <DEDENT> <DEDENT> def add_plugin_timeout(self, plugin): <NEW_LINE> <INDENT> self.all_timeout = _min_timeout(self.all_timeout, plugin.cache_timeout) <NEW_LINE> <DEDENT> def set_uncachable(self): <NEW_LINE> <INDENT> self.all_cacheable = False <NEW_LINE> <DEDENT> def get_output(self, include_exceptions=False): <NEW_LINE> <INDENT> ordered_output = [] <NEW_LINE> for item_id in self.output_ordering: <NEW_LINE> <INDENT> contentitem = self.item_source[item_id] <NEW_LINE> try: <NEW_LINE> <INDENT> output = self.item_output[item_id] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> if not include_exceptions: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> output = self.MISSING <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not include_exceptions: <NEW_LINE> <INDENT> if isinstance(output, Exception) or output is self.SKIPPED: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> ordered_output.append((contentitem, output)) <NEW_LINE> <DEDENT> return ordered_output | A tracking of intermediate results during rendering.
This object is completely agnostic to what is's rendering,
it just stores "output" for a "contentitem". | 6259908dec188e330fdfa50b |
class MathExtension(Extension): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(MathExtension, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def extendMarkdown(self, md, md_globals): <NEW_LINE> <INDENT> def handle_match_inline(m): <NEW_LINE> <INDENT> node = etree.Element('script') <NEW_LINE> node.set('type', 'math/tex') <NEW_LINE> node.text = AtomicString(m.group(3)) <NEW_LINE> return node <NEW_LINE> <DEDENT> def handle_match(m): <NEW_LINE> <INDENT> node = etree.Element('script') <NEW_LINE> node.set('type', 'math/tex; mode=display') <NEW_LINE> if '\\begin' in m.group(2): <NEW_LINE> <INDENT> node.text = AtomicString(m.group(2) + m.group(4) + m.group(5)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node.text = AtomicString(m.group(3)) <NEW_LINE> <DEDENT> return node <NEW_LINE> <DEDENT> inlinemathpatterns = ( Pattern(r'(?<!\\|\$)(\$)([^\$]+)(\$)'), Pattern(r'(?<!\\)(\\\()(.+?)(\\\))') ) <NEW_LINE> mathpatterns = ( Pattern(r'(?<!\\)(\$\$)([^\$]+)(\$\$)'), Pattern(r'(?<!\\)(\\\[)(.+?)(\\\])'), Pattern(r'(?<!\\)(\\begin{([a-z]+?\*?)})(.+?)(\\end{\3})') ) <NEW_LINE> for i, pattern in enumerate(inlinemathpatterns): <NEW_LINE> <INDENT> pattern.handleMatch = handle_match_inline <NEW_LINE> md.inlinePatterns.add('math-inline-%d' % i, pattern, '<escape') <NEW_LINE> <DEDENT> for i, pattern in enumerate(mathpatterns): <NEW_LINE> <INDENT> pattern.handleMatch = handle_match <NEW_LINE> md.inlinePatterns.add('math-%d' % i, pattern, '<escape') | ## Math extension for Python-Markdown
Adds support for displaying math formulas using [MathJax](http://www.mathjax.org/).
Author: 2015, Dmitry Shachnev <[email protected]>.
Slightly customized by cryptonomicon314 | 6259908d5fc7496912d49099 |
class UserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = get_user_model() <NEW_LINE> fields = ('email', 'password', 'name') <NEW_LINE> extra_kwargs = {'password': {'write_only': True, 'min_length': 5}} <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> return get_user_model().objects.create_user(**validated_data) <NEW_LINE> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> password = validated_data.pop('password', None) <NEW_LINE> user = super().update(instance, validated_data) <NEW_LINE> if password: <NEW_LINE> <INDENT> user.set_password(password) <NEW_LINE> user.save() <NEW_LINE> <DEDENT> return user | Serializer for user object | 6259908daad79263cf43040f |
class KeyView(BaseView): <NEW_LINE> <INDENT> route_base = '/api/1/auth/key' <NEW_LINE> def get(self): <NEW_LINE> <INDENT> resp = {} <NEW_LINE> resp['format'] = const.AUTH_TOKEN_KEY_FORMAT <NEW_LINE> resp['key'] = const.AUTH_TOKEN_PUB_KEY <NEW_LINE> resp['algorithm'] = const.AUTH_TOKEN_ALGORITHM <NEW_LINE> return ujson.dumps({'content' : resp}), 200 | API end point for obtaining the public key, used to decode the JWT tokens | 6259908d8a349b6b43687ebc |
class LastModifiedCleaner(Feeder): <NEW_LINE> <INDENT> def __init__(self, feeder_dir, linger_time): <NEW_LINE> <INDENT> self.feeder_dir = str(feeder_dir) <NEW_LINE> self.linger_time = float(linger_time) <NEW_LINE> if not os.path.isdir(feeder_dir): <NEW_LINE> <INDENT> raise ValueError("Feeder directory must be an existing directory path; got '%s'" % self.feeder_dir) <NEW_LINE> <DEDENT> <DEDENT> def clean(self): <NEW_LINE> <INDENT> if self.linger_time < 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> now = time.time() <NEW_LINE> removed = [] <NEW_LINE> for fname in os.listdir(self.feeder_dir): <NEW_LINE> <INDENT> absname = os.path.join(self.feeder_dir, fname) <NEW_LINE> if os.path.isfile(absname) and now - os.stat(absname).st_mtime > self.linger_time: <NEW_LINE> <INDENT> os.remove(absname) <NEW_LINE> removed.append(fname) <NEW_LINE> <DEDENT> <DEDENT> removed.sort() <NEW_LINE> return removed | Abstract subclass of Feeder that provides a "delete after delay" clean() method.
| 6259908ddc8b845886d55217 |
class UsageError(Exception): <NEW_LINE> <INDENT> pass | Exception used to detect an invalid usage error. | 6259908d167d2b6e312b83c7 |
class RequestContext(context.RequestContext): <NEW_LINE> <INDENT> def __init__(self, auth_token=None, auth_url=None, domain_id=None, domain_name=None, user=None, user_id=None, project=None, project_id=None, is_admin=False, is_public_api=False, read_only=False, show_deleted=False, request_id=None, trust_id=None, auth_token_info=None, roles=None): <NEW_LINE> <INDENT> super(RequestContext, self).__init__(auth_token=auth_token, user=user, tenant=project, is_admin=is_admin, read_only=read_only, show_deleted=show_deleted, request_id=request_id, roles=roles) <NEW_LINE> self.is_public_api = is_public_api <NEW_LINE> self.user_id = user_id <NEW_LINE> self.project = project <NEW_LINE> self.project_id = project_id <NEW_LINE> self.domain_id = domain_id <NEW_LINE> self.domain_name = domain_name <NEW_LINE> self.auth_url = auth_url <NEW_LINE> self.auth_token_info = auth_token_info <NEW_LINE> self.trust_id = trust_id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return {'auth_token': self.auth_token, 'auth_url': self.auth_url, 'domain_id': self.domain_id, 'domain_name': self.domain_name, 'user': self.user, 'user_id': self.user_id, 'project': self.project, 'project_id': self.project_id, 'is_admin': self.is_admin, 'is_public_api': self.is_public_api, 'read_only': self.read_only, 'show_deleted': self.show_deleted, 'request_id': self.request_id, 'trust_id': self.trust_id, 'auth_token_info': self.auth_token_info, 'roles': self.roles} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, values): <NEW_LINE> <INDENT> return cls(**values) | Extends security contexts from the OpenStack common library. | 6259908d091ae356687068a3 |
class LocalVar(Assignment): <NEW_LINE> <INDENT> subparts = [('names', NameList), ('values', ExpressionList)] <NEW_LINE> template = 'local {names} = {values}' <NEW_LINE> def render(self, wrapper=empty_wrapper): <NEW_LINE> <INDENT> if len(self) == 1: <NEW_LINE> <INDENT> self.template = LocalVar.template.replace('= {values}', '') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.template = LocalVar.template <NEW_LINE> <DEDENT> return super(LocalVar, self).render(wrapper) | Variable declaration with "local" modifier. | 6259908d5fcc89381b266f8d |
class CustomUser(User,models.Model): <NEW_LINE> <INDENT> core_num=models.IntegerField(default=1) <NEW_LINE> mem_limit=models.IntegerField(default=256) <NEW_LINE> vms=models.ManyToManyField(VM) <NEW_LINE> objects = UserManager() | User with app settings. | 6259908d656771135c48ae60 |
class ApiEndpoints(Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return {'paths': sorted(_resource_paths)} | Implementation of / REST API call. | 6259908dadb09d7d5dc0c1b9 |
class RecursivePermDirFixture(fixtures.Fixture): <NEW_LINE> <INDENT> def __init__(self, directory, perms): <NEW_LINE> <INDENT> super(RecursivePermDirFixture, self).__init__() <NEW_LINE> self.directory = directory <NEW_LINE> self.least_perms = perms <NEW_LINE> <DEDENT> def _setUp(self): <NEW_LINE> <INDENT> previous_directory = None <NEW_LINE> current_directory = self.directory <NEW_LINE> while previous_directory != current_directory: <NEW_LINE> <INDENT> perms = os.stat(current_directory).st_mode <NEW_LINE> if perms & self.least_perms != self.least_perms: <NEW_LINE> <INDENT> os.chmod(current_directory, perms | self.least_perms) <NEW_LINE> <DEDENT> previous_directory = current_directory <NEW_LINE> current_directory = os.path.dirname(current_directory) | Ensure at least perms permissions on directory and ancestors. | 6259908d7cff6e4e811b76a2 |
@attr.s <NEW_LINE> class GitTreeStructure(GitHubBase): <NEW_LINE> <INDENT> _SCHEMA: typing.ClassVar[Schema] = schemas.GIT_TREE_STRUCTURE_SCHEMA <NEW_LINE> mode = attr.ib(type=int) <NEW_LINE> path = attr.ib(type=str) <NEW_LINE> sha = attr.ib(type=str) <NEW_LINE> type = attr.ib(type=str) <NEW_LINE> size = attr.ib(type=int, default=None) <NEW_LINE> url = attr.ib(type=str, default=None) | "Git tree structure. | 6259908d5fcc89381b266f8e |
class FormSpider(CrawlSpider): <NEW_LINE> <INDENT> name = 'form' <NEW_LINE> def __init__(self, urls, crawl, auth, *args, **kwargs): <NEW_LINE> <INDENT> if crawl: <NEW_LINE> <INDENT> link_extractor = LinkExtractor() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> link_extractor = LinkExtractor(deny=('.*')) <NEW_LINE> <DEDENT> self.rules = [ Rule(link_extractor, callback='parse_page', follow=crawl) ] <NEW_LINE> self.parse_start_url = self.parse_page <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> if auth: <NEW_LINE> <INDENT> auth_split = auth.split(':') <NEW_LINE> self.http_user = auth_split[0] <NEW_LINE> self.http_pass = auth_split[1] <NEW_LINE> <DEDENT> urls = list(map(default_scheme, urls)) <NEW_LINE> domains = list(set(map(get_domain, urls))) <NEW_LINE> self.start_urls = urls <NEW_LINE> self.allowed_domains = domains <NEW_LINE> <DEDENT> def parse_page(self, response): <NEW_LINE> <INDENT> return [self.parse_form(form, response) for form in response.xpath('//form')] <NEW_LINE> <DEDENT> def parse_form(self, selector, response): <NEW_LINE> <INDENT> f = FormLoader(selector=selector) <NEW_LINE> f.add_value('url', response.url) <NEW_LINE> f.add_xpath('action', '@action') <NEW_LINE> f.add_value( 'inputs', [self.parse_input(input_field) for input_field in selector.xpath('.//input') if self.is_input_field(input_field)] ) <NEW_LINE> return f.load_item() <NEW_LINE> <DEDENT> def parse_input(self, selector): <NEW_LINE> <INDENT> i = InputLoader(selector=selector) <NEW_LINE> i.add_xpath('type', '@type') <NEW_LINE> i.add_xpath('name', '@name') <NEW_LINE> i.add_xpath('id', '@id') <NEW_LINE> return i.load_item() <NEW_LINE> <DEDENT> def is_input_field(self, input_field): <NEW_LINE> <INDENT> input_type = next(iter(input_field.xpath('@type').extract()), None) <NEW_LINE> return input_type not in ['hidden', 'submit'] | Form spider | 6259908d60cbc95b06365b98 |
class PointSelOperator: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.delta = [] <NEW_LINE> return <NEW_LINE> <DEDENT> def operate(self, draw=True, override=False): <NEW_LINE> <INDENT> self.delta = [] <NEW_LINE> if not draw: <NEW_LINE> <INDENT> return self.delta <NEW_LINE> <DEDENT> return self.delta <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> if type(value) is not None: <NEW_LINE> <INDENT> logging.warning("Operator Failed, rewinding") <NEW_LINE> self.unwind() <NEW_LINE> <DEDENT> self.delta = [] <NEW_LINE> <DEDENT> def is_oneshot(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def unwind(self): <NEW_LINE> <INDENT> raise Exception("Unwind: Unimplemented") | Operator to select a point in the map | 6259908d3346ee7daa338492 |
class PushUrlCacheRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Urls = None <NEW_LINE> self.SubAppId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Urls = params.get("Urls") <NEW_LINE> self.SubAppId = params.get("SubAppId") | PushUrlCache request structure.
| 6259908d656771135c48ae61 |
class WebsocketClient(BaseClient): <NEW_LINE> <INDENT> def __init__(self, currency, secret, config): <NEW_LINE> <INDENT> BaseClient.__init__(self, currency, secret, config) <NEW_LINE> <DEDENT> def _recv_thread_func(self): <NEW_LINE> <INDENT> reconnect_time = 5 <NEW_LINE> use_ssl = self.config.get_bool("gox", "use_ssl") <NEW_LINE> wsp = {True: "wss://", False: "ws://"}[use_ssl] <NEW_LINE> while not self._terminating: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ws_url = wsp + self.WEBSOCKET_HOST + "/mtgox?Currency=" + self.currency <NEW_LINE> self.debug("*** Hint: connection problems? try: use_plain_old_websocket=False") <NEW_LINE> self.debug("trying plain old Websocket: %s ... " % ws_url) <NEW_LINE> self.socket = websocket.WebSocket() <NEW_LINE> self.socket.connect(ws_url) <NEW_LINE> self._time_last_received = time.time() <NEW_LINE> self.connected = True <NEW_LINE> self.debug("connected, subscribing needed channels") <NEW_LINE> self.channel_subscribe() <NEW_LINE> reconnect_time = 5 <NEW_LINE> self.debug("waiting for data...") <NEW_LINE> while not self._terminating: <NEW_LINE> <INDENT> str_json = self.socket.recv() <NEW_LINE> self._time_last_received = time.time() <NEW_LINE> if str_json[0] == "{": <NEW_LINE> <INDENT> self.signal_recv(self, (str_json)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> self.connected = False <NEW_LINE> if not self._terminating: <NEW_LINE> <INDENT> self.debug(exc, "reconnecting in %i seconds..." % reconnect_time) <NEW_LINE> if self.socket: <NEW_LINE> <INDENT> self.socket.close() <NEW_LINE> <DEDENT> time.sleep(reconnect_time) <NEW_LINE> reconnect_time = int(reconnect_time * 1.2) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def send(self, json_str): <NEW_LINE> <INDENT> self._try_send_raw(json_str) | this implements a connection to MtGox through the older (but faster)
websocket protocol. Unfortuntely its just as unreliable as the socket.io. | 6259908d3617ad0b5ee07db2 |
class Compiler(object): <NEW_LINE> <INDENT> backend_extension = '.stoneg' <NEW_LINE> def __init__(self, api, backend_module, backend_args, build_path, clean_build=False): <NEW_LINE> <INDENT> self._logger = logging.getLogger('stone.compiler') <NEW_LINE> self.api = api <NEW_LINE> self.backend_module = backend_module <NEW_LINE> self.backend_args = backend_args <NEW_LINE> self.build_path = build_path <NEW_LINE> if clean_build and os.path.exists(self.build_path): <NEW_LINE> <INDENT> logging.info('Cleaning existing build directory %s...', self.build_path) <NEW_LINE> shutil.rmtree(self.build_path) <NEW_LINE> <DEDENT> <DEDENT> def build(self): <NEW_LINE> <INDENT> if os.path.exists(self.build_path) and not os.path.isdir(self.build_path): <NEW_LINE> <INDENT> self._logger.error('Output path must be a folder if it already exists') <NEW_LINE> return <NEW_LINE> <DEDENT> Compiler._mkdir(self.build_path) <NEW_LINE> self._execute_backend_on_spec() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _mkdir(path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.makedirs(path) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> if e.errno != 17: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def is_stone_backend(cls, path): <NEW_LINE> <INDENT> path_without_ext, _ = os.path.splitext(path) <NEW_LINE> _, second_ext = os.path.splitext(path_without_ext) <NEW_LINE> return second_ext == cls.backend_extension <NEW_LINE> <DEDENT> def _execute_backend_on_spec(self): <NEW_LINE> <INDENT> api_no_aliases_cache = None <NEW_LINE> for attr_key in dir(self.backend_module): <NEW_LINE> <INDENT> attr_value = getattr(self.backend_module, attr_key) <NEW_LINE> if (inspect.isclass(attr_value) and issubclass(attr_value, Backend) and not inspect.isabstract(attr_value)): <NEW_LINE> <INDENT> self._logger.info('Running backend: %s', attr_value.__name__) <NEW_LINE> backend = attr_value(self.build_path, self.backend_args) <NEW_LINE> if backend.preserve_aliases: <NEW_LINE> <INDENT> api = self.api <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not api_no_aliases_cache: <NEW_LINE> <INDENT> api_no_aliases_cache = remove_aliases_from_api(self.api) <NEW_LINE> <DEDENT> api = api_no_aliases_cache <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> backend.generate(api) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise BackendException( attr_value.__name__, traceback.format_exc()[:-1]) | Applies a collection of backends found in a single backend module to an
API specification. | 6259908d283ffb24f3cf5502 |
class FieldQuery(Query): <NEW_LINE> <INDENT> def __init__(self, field, pattern): <NEW_LINE> <INDENT> self.field = field <NEW_LINE> self.pattern = pattern | An abstract query that searches in a specific field for a
pattern. | 6259908d26068e7796d4e5a4 |
class SimpleTypeError(ValidationError, TypeError): <NEW_LINE> <INDENT> def __init__( self, value, target_type, from_err=None, msg=None, ): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> self.target_type = target_type <NEW_LINE> self.from_err = from_err <NEW_LINE> self._msg = msg <NEW_LINE> if msg is None: <NEW_LINE> <INDENT> msg = self.default_message(value, target_type) <NEW_LINE> <DEDENT> super().__init__(msg) <NEW_LINE> <DEDENT> def default_message(self, value, target_type): <NEW_LINE> <INDENT> return f'value {value!r} incompatible with {target_type!r}' <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> args = ( self.value, self.target_type, self.from_err, self._msg, ) <NEW_LINE> return (type(self), args) | Encountered a value with an incorrect simple type. | 6259908de1aae11d1e7cf646 |
class DispatcherMiddleware(object): <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> self.lock = Lock() <NEW_LINE> self.config = app.config <NEW_LINE> self.wsgi_app = app.wsgi_app <NEW_LINE> self.realm = self.config.get('AUTH_REALM', 'Basic realm="Login Required"') <NEW_LINE> self.git_root = os.path.abspath(app.config.get('GIT_ROOT', './repos')) <NEW_LINE> self.httpauth = BasicAuth(self.realm, self.authenticate) <NEW_LINE> self.git_app = None <NEW_LINE> <DEDENT> def get_project(self, environ): <NEW_LINE> <INDENT> from .models.project import Project <NEW_LINE> path = environ.get('PATH_INFO') <NEW_LINE> match = re.match(r'^/(?P<owner_name>\w+)/(?P<project_name>\w+)(\.git)?(/.+)?', path) <NEW_LINE> if match: <NEW_LINE> <INDENT> owner_name = match.group('owner_name') <NEW_LINE> project_name = match.group('project_name') <NEW_LINE> return Project.select().where( Project.owner_name == owner_name, Project.name == project_name).get() <NEW_LINE> <DEDENT> <DEDENT> def is_push(self, environ): <NEW_LINE> <INDENT> path = environ.get('PATH_INFO') <NEW_LINE> query_string = environ['QUERY_STRING'] <NEW_LINE> return ('service=git-receive-pack' in query_string or '/git-receive-pack' in environ['PATH_INFO']) <NEW_LINE> <DEDENT> def authenticate(self, environ, username, password): <NEW_LINE> <INDENT> from .models.user import User <NEW_LINE> from .models.permission import Permission, READ_PERM, WRITE_PERM <NEW_LINE> project = self.get_project(environ) <NEW_LINE> user = User.authenticate(username, password) <NEW_LINE> perm = WRITE_PERM if self.is_push(environ) else READ_PERM <NEW_LINE> if Permission.check_permission(project, user, perm): <NEW_LINE> <INDENT> environ['GIT_PATH_INFO'] = os.path.join(self.git_root, project.path_info) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def get_application(self, environ): <NEW_LINE> <INDENT> user_agent = environ.get('HTTP_USER_AGENT') <NEW_LINE> if user_agent and 'git' not in user_agent: <NEW_LINE> <INDENT> return self.wsgi_app <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if environ.get('REMOTE_USER') is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = self.httpauth(environ) <NEW_LINE> <DEDENT> except Project.DoesNotExist: <NEW_LINE> <INDENT> return NotFound() <NEW_LINE> <DEDENT> if not isinstance(res, basestring): <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> environ['REMOTE_USER'] = res <NEW_LINE> environ['AUTH_TYPE'] = self.httpauth.authtype <NEW_LINE> <DEDENT> <DEDENT> with self.lock: <NEW_LINE> <INDENT> if not self.git_app: <NEW_LINE> <INDENT> self.git_app = assemble_WSGI_git_app(self.git_root) <NEW_LINE> <DEDENT> return self.git_app <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __call__(self, environ, start_response): <NEW_LINE> <INDENT> application = self.get_application(environ) <NEW_LINE> return application(environ, start_response) | Dispatch http request to flask app or git app | 6259908d5fcc89381b266f8f |
class MatchTimeBase(models.Model): <NEW_LINE> <INDENT> start = TimeField() <NEW_LINE> interval = models.IntegerField() <NEW_LINE> count = models.IntegerField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> def rrule_dtstart(self): <NEW_LINE> <INDENT> return timezone.now().replace( hour=self.start.hour, minute=self.start.minute, second=0 ) <NEW_LINE> <DEDENT> def rrule_kwargs(self): <NEW_LINE> <INDENT> return { "dtstart": self.rrule_dtstart(), "interval": self.interval, "count": self.count, } <NEW_LINE> <DEDENT> def rrule(self): <NEW_LINE> <INDENT> return rrule(MINUTELY, **self.rrule_kwargs()) | Abstract base class that stores enough information to create a recurring
rule set. | 6259908daad79263cf430415 |
class pyIndIterator(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, begin: 'vector< simuPOP::Individual,std::allocator< simuPOP::Individual > >::iterator const', end: 'vector< simuPOP::Individual,std::allocator< simuPOP::Individual > >::iterator const', allInds: 'bool', func: 'simuPOP::vspFunctor'): <NEW_LINE> <INDENT> _simuPOP_la.pyIndIterator_swiginit(self, _simuPOP_la.new_pyIndIterator(begin, end, allInds, func)) <NEW_LINE> <DEDENT> __swig_destroy__ = _simuPOP_la.delete_pyIndIterator <NEW_LINE> def next(self) -> "simuPOP::Individual &": <NEW_LINE> <INDENT> return _simuPOP_la.pyIndIterator_next(self) | Details:
this class implements a Python itertor class that can be used to
iterate through individuals in a (sub)population. If allInds are
true, visiblility of individuals will not be checked. Otherwise, a
functor will be used to check if indiviudals belong to a specified
virtual subpopulation. An instance of this class is returned by
population::Individuals() and Population::Individuals(subPop) | 6259908d7cff6e4e811b76a6 |
class ApplicationGesture(Enum,IComparable,IFormattable,IConvertible): <NEW_LINE> <INDENT> def __eq__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __format__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __ge__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __gt__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __le__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __lt__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __ne__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __reduce_ex__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __str__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> AllGestures=None <NEW_LINE> ArrowDown=None <NEW_LINE> ArrowLeft=None <NEW_LINE> ArrowRight=None <NEW_LINE> ArrowUp=None <NEW_LINE> Check=None <NEW_LINE> ChevronDown=None <NEW_LINE> ChevronLeft=None <NEW_LINE> ChevronRight=None <NEW_LINE> ChevronUp=None <NEW_LINE> Circle=None <NEW_LINE> Curlicue=None <NEW_LINE> DoubleCircle=None <NEW_LINE> DoubleCurlicue=None <NEW_LINE> DoubleTap=None <NEW_LINE> Down=None <NEW_LINE> DownLeft=None <NEW_LINE> DownLeftLong=None <NEW_LINE> DownRight=None <NEW_LINE> DownRightLong=None <NEW_LINE> DownUp=None <NEW_LINE> Exclamation=None <NEW_LINE> Left=None <NEW_LINE> LeftDown=None <NEW_LINE> LeftRight=None <NEW_LINE> LeftUp=None <NEW_LINE> NoGesture=None <NEW_LINE> Right=None <NEW_LINE> RightDown=None <NEW_LINE> RightLeft=None <NEW_LINE> RightUp=None <NEW_LINE> ScratchOut=None <NEW_LINE> SemicircleLeft=None <NEW_LINE> SemicircleRight=None <NEW_LINE> Square=None <NEW_LINE> Star=None <NEW_LINE> Tap=None <NEW_LINE> Triangle=None <NEW_LINE> Up=None <NEW_LINE> UpDown=None <NEW_LINE> UpLeft=None <NEW_LINE> UpLeftLong=None <NEW_LINE> UpRight=None <NEW_LINE> UpRightLong=None <NEW_LINE> value__=None | Specifies the available application-specific gesture.
enum ApplicationGesture,values: AllGestures (0),ArrowDown (61497),ArrowLeft (61498),ArrowRight (61499),ArrowUp (61496),Check (61445),ChevronDown (61489),ChevronLeft (61490),ChevronRight (61491),ChevronUp (61488),Circle (61472),Curlicue (61456),DoubleCircle (61473),DoubleCurlicue (61457),DoubleTap (61681),Down (61529),DownLeft (61546),DownLeftLong (61542),DownRight (61547),DownRightLong (61543),DownUp (61537),Exclamation (61604),Left (61530),LeftDown (61549),LeftRight (61538),LeftUp (61548),NoGesture (61440),Right (61531),RightDown (61551),RightLeft (61539),RightUp (61550),ScratchOut (61441),SemicircleLeft (61480),SemicircleRight (61481),Square (61443),Star (61444),Tap (61680),Triangle (61442),Up (61528),UpDown (61536),UpLeft (61544),UpLeftLong (61540),UpRight (61545),UpRightLong (61541) | 6259908d4c3428357761bf20 |
class classproperty(object): <NEW_LINE> <INDENT> def __init__(self, fget): <NEW_LINE> <INDENT> self.fget = fget <NEW_LINE> <DEDENT> def __get__(self, owner_self, owner_cls): <NEW_LINE> <INDENT> return self.fget(owner_cls) | Decorator which allows read only class properties | 6259908dd8ef3951e32c8c8f |
class PlayerPaddle(Entity): <NEW_LINE> <INDENT> CTYPE = 50 <NEW_LINE> def __init__(self, uuid, host=None, port=None, number=None, foe=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(uuid, mass=spot_get('sv_paddle_mass'), size=spot_get('paddle_size'), **kwargs) <NEW_LINE> self.box.elasticity = 1.0 <NEW_LINE> self.box.collision_type = self.CTYPE <NEW_LINE> self.velocity_limit = spot_get('sv_paddle_max_velocity') <NEW_LINE> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self.number = number <NEW_LINE> self.foe = foe <NEW_LINE> self.ready = False <NEW_LINE> self.score = 0 | Paddle as an entity | 6259908d5fdd1c0f98e5fbdc |
class Config(FlaskConfig): <NEW_LINE> <INDENT> def from_mapping(self, *mapping, **kwargs): <NEW_LINE> <INDENT> mappings = [] <NEW_LINE> if len(mapping) == 1: <NEW_LINE> <INDENT> if hasattr(mapping[0], 'items'): <NEW_LINE> <INDENT> mappings.append(list(mapping[0].items())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mappings.append(mapping[0]) <NEW_LINE> <DEDENT> <DEDENT> elif len(mapping) > 1: <NEW_LINE> <INDENT> raise TypeError( 'expected at most 1 positional argument, got %d' % len(mapping) ) <NEW_LINE> <DEDENT> deep_update = kwargs.pop('_deep_update', False) <NEW_LINE> mappings.append(list(kwargs.items())) <NEW_LINE> for mapping in mappings: <NEW_LINE> <INDENT> if deep_update: <NEW_LINE> <INDENT> deep_update_dict(self, dict((k.upper(), v) for (k, v) in mapping)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for (key, value) in mapping: <NEW_LINE> <INDENT> self[key.upper()] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def from_file(self, filename, load=None, silent=False, deep_update=False): <NEW_LINE> <INDENT> if not load: <NEW_LINE> <INDENT> if filename.endswith(".py"): <NEW_LINE> <INDENT> return self.from_pyfile(filename, silent) <NEW_LINE> <DEDENT> if filename.endswith(".js") or filename.endswith(".json"): <NEW_LINE> <INDENT> load = json.load <NEW_LINE> <DEDENT> if filename.endswith(".yml") or filename.endswith(".yaml"): <NEW_LINE> <INDENT> load = yaml.safe_load <NEW_LINE> <DEDENT> <DEDENT> filename = os.path.join(self.root_path, filename) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(filename) as f: <NEW_LINE> <INDENT> obj = load(f) <NEW_LINE> <DEDENT> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> if silent and e.errno in (errno.ENOENT, errno.EISDIR): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> e.strerror = f"Unable to load configuration file ({e.strerror})" <NEW_LINE> raise <NEW_LINE> <DEDENT> return self.from_mapping(obj, _deep_update=deep_update) <NEW_LINE> <DEDENT> def from_json(self, filename, **kwargs): <NEW_LINE> <INDENT> return self.from_file(filename, json.load, **kwargs) <NEW_LINE> <DEDENT> def from_yaml(self, filename, **kwargs): <NEW_LINE> <INDENT> return self.from_file(filename, yaml.safe_load, **kwargs) | Subclass of Flask's Config class to add support to load from YAML file
| 6259908d60cbc95b06365b9a |
class CPU(models.Model): <NEW_LINE> <INDENT> asset = models.OneToOneField('Asset', on_delete=models.SET_NULL, null=True) <NEW_LINE> cpu_model = models.CharField(u'CPU型号', max_length=128, blank=True) <NEW_LINE> cpu_count = models.SmallIntegerField(u'物理cpu个数') <NEW_LINE> cpu_core_count = models.SmallIntegerField(u'cpu核数') <NEW_LINE> memo = models.TextField(u'备注', null=True, blank=True) <NEW_LINE> create_date = models.DateTimeField(auto_now_add=True) <NEW_LINE> update_date = models.DateTimeField(blank=True, null=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'CPU部件' <NEW_LINE> verbose_name_plural = "CPU部件" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.cpu_model | CPU组件 | 6259908df9cc0f698b1c60fe |
class CompareResponse: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swaggerTypes = { 'result': 'CompareResult', 'status': 'str', 'error_message': 'str' } <NEW_LINE> self.result = None <NEW_LINE> self.status = None <NEW_LINE> self.error_message = None | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259908d7cff6e4e811b76a8 |
@mark.django_db <NEW_LINE> class TestEnterpriseConfig(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestEnterpriseConfig, self).setUp() <NEW_LINE> self.post_save_mock = mock.Mock() <NEW_LINE> patcher = mock.patch('enterprise.signals.handle_user_post_save', self.post_save_mock) <NEW_LINE> patcher.start() <NEW_LINE> self.app_config = enterprise.apps.EnterpriseConfig('enterprise', enterprise) <NEW_LINE> self.addCleanup(patcher.stop) <NEW_LINE> <DEDENT> def test_ready_connects_user_post_save_handler(self): <NEW_LINE> <INDENT> self.app_config.ready() <NEW_LINE> user = UserFactory() <NEW_LINE> assert self.post_save_mock.call_count == 1 <NEW_LINE> call_args, call_kwargs = self.post_save_mock.call_args_list[0] <NEW_LINE> assert call_args == () <NEW_LINE> assert call_kwargs["sender"] == User <NEW_LINE> assert call_kwargs["instance"] == user <NEW_LINE> assert call_kwargs["created"] <NEW_LINE> <DEDENT> def test_ready_does_not_fire_user_post_save_handler_for_other_models(self): <NEW_LINE> <INDENT> self.app_config.ready() <NEW_LINE> EnterpriseCustomerFactory() <NEW_LINE> assert not self.post_save_mock.called <NEW_LINE> <DEDENT> def test_ready_disconnects_user_post_save_handler_for_migration(self): <NEW_LINE> <INDENT> self.app_config.ready() <NEW_LINE> pre_migrate.send(mock.Mock()) <NEW_LINE> UserFactory() <NEW_LINE> assert not self.post_save_mock.called | Test edx-enterprise app config. | 6259908da05bb46b3848bf58 |
class internal_open: <NEW_LINE> <INDENT> def openers(self, filename, names, extensions, mode): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for extension in extensions: <NEW_LINE> <INDENT> full_filename = filename+extension <NEW_LINE> dir = os.path.splitext(full_filename)[1][1:] <NEW_LINE> result.append(lambda: builtinopen(os.path.join(os.path.dirname(__file__), "data", dir, full_filename), mode)) <NEW_LINE> <DEDENT> return result | locates files within the PyX data tree (via an open relative to the path of this file) | 6259908d63b5f9789fe86dd0 |
class UpdateTutorial(webapp2.RequestHandler): <NEW_LINE> <INDENT> def put(self): <NEW_LINE> <INDENT> data = json.loads(self.request.body) <NEW_LINE> if_view = data['ifView'] <NEW_LINE> uid = users.get_current_user().nickname() <NEW_LINE> user_query = db.GqlQuery(r"SELECT * FROM UsersHistory WHERE name = :1", str(uid)) <NEW_LINE> record = user_query[0] <NEW_LINE> record.tutorial = if_view <NEW_LINE> record.put() | update if view tutorial to user DB | 6259908d60cbc95b06365b9b |
class IPAddress(Base): <NEW_LINE> <INDENT> def __init__(self, ipv4=True, ipv6=False, message=None): <NEW_LINE> <INDENT> kwargs = { 'ipv4': ipv4, 'ipv6': ipv6, 'message': message } <NEW_LINE> super(IPAddress, self).__init__(wtf_ip_address, **kwargs) | Validates an IP address.
:param ipv4:
If True, accept IPv4 addresses as valid (default True)
:param ipv6:
If True, accept IPv6 addresses as valid (default False)
:param message:
Error message to raise in case of a validation error. | 6259908d3346ee7daa338495 |
class rule_009(blank_line_above_line_starting_with_token): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> blank_line_above_line_starting_with_token.__init__(self, 'case', '009', [token.end_keyword]) | This rule checks for blank lines or comments above the **end** keyword.
|configuring_blank_lines_link|
**Violation**
.. code-block:: vhdl
when others =>
null;
end case;
**Fix**
.. code-block:: vhdl
when others =>
null;
end case; | 6259908d656771135c48ae64 |
class PhoneNumber(object): <NEW_LINE> <INDENT> deserialized_types = { 'country_code': 'str', 'phone_number': 'str' } <NEW_LINE> attribute_map = { 'country_code': 'countryCode', 'phone_number': 'phoneNumber' } <NEW_LINE> def __init__(self, country_code=None, phone_number=None): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> self.country_code = country_code <NEW_LINE> self.phone_number = phone_number <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PhoneNumber): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | :param country_code:
:type country_code: (optional) str
:param phone_number:
:type phone_number: (optional) str | 6259908ddc8b845886d55221 |
class Event: <NEW_LINE> <INDENT> def __init__(self, eventClass): <NEW_LINE> <INDENT> self.timestamp = time.time() <NEW_LINE> self.type = eventClass <NEW_LINE> <DEDENT> def id(self): <NEW_LINE> <INDENT> return "%d" % self.timestamp <NEW_LINE> <DEDENT> def getTimestamp(self): <NEW_LINE> <INDENT> return str(self.timestamp) <NEW_LINE> <DEDENT> def data(self): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def description(self): <NEW_LINE> <INDENT> return "[%s] Abstract event" % self.id() | Base event class | 6259908d4c3428357761bf24 |
class ExtractableI18NDirective(I18NDirective): <NEW_LINE> <INDENT> def extract(self, stream, comment_stack): <NEW_LINE> <INDENT> raise NotImplementedError | Simple interface for directives to support messages extraction. | 6259908d5fcc89381b266f92 |
class SpecificationHandler(MPlaneHandler): <NEW_LINE> <INDENT> def initialize(self, supervisor): <NEW_LINE> <INDENT> self._supervisor = supervisor <NEW_LINE> self.dn = get_dn(self._supervisor, self.request) <NEW_LINE> self._supervisor._dn_to_ip[self.dn] = self.request.remote_ip <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> if self.dn.find("Components") == -1: <NEW_LINE> <INDENT> self._respond_plain_text(401, "Not Authorized. Only Components can use this function") <NEW_LINE> return <NEW_LINE> <DEDENT> if self.dn not in self._supervisor._registered_dn: <NEW_LINE> <INDENT> self._respond_plain_text(428) <NEW_LINE> <DEDENT> specs = self._supervisor._specifications.pop(self.dn, []) <NEW_LINE> self.set_status(200) <NEW_LINE> self.set_header("Content-Type", "application/x-mplane+json") <NEW_LINE> msg = "" <NEW_LINE> for spec in specs: <NEW_LINE> <INDENT> msg = msg + mplane.model.unparse_json(spec) + "," <NEW_LINE> mplane.utils.add_value_to(self._supervisor._receipts, self.dn, mplane.model.Receipt(specification=spec)) <NEW_LINE> mplane.utils.print_then_prompt("Specification " + spec.get_label() + " successfully pulled by " + self.dn) <NEW_LINE> <DEDENT> msg = "[" + msg[:-1].replace("\n","") + "]" <NEW_LINE> self.write(msg) <NEW_LINE> self.finish() | Exposes the specifications, that will be periodically pulled by the
components | 6259908d60cbc95b06365b9c |
class ForumEntry(db.Model, SerializableObject, TextRendererMixin): <NEW_LINE> <INDENT> __tablename__ = 'forum_entry' <NEW_LINE> query = db.session.query_property(ForumEntryQuery) <NEW_LINE> object_type = 'forum.entry' <NEW_LINE> public_fields = ('entry_id', 'discriminator', 'author', 'date_created', 'date_active', 'score', 'text', 'votes') <NEW_LINE> entry_id = db.Column(db.Integer, primary_key=True) <NEW_LINE> discriminator = db.Column('type', db.Unicode(12)) <NEW_LINE> author_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False) <NEW_LINE> date_created = db.Column(db.DateTime, nullable=False, default=datetime.utcnow) <NEW_LINE> date_active = db.Column(db.DateTime, nullable=False, default=datetime.utcnow) <NEW_LINE> score = db.Column(db.Integer, nullable=False, default=0) <NEW_LINE> text = db.Column(db.Text, nullable=False) <NEW_LINE> view_count = db.Column(db.Integer, default=0, nullable=False) <NEW_LINE> author = db.relationship(User, lazy='joined', innerjoin=True) <NEW_LINE> votes = db.relationship('Vote', backref='entry', extension=ForumEntryVotesExtension()) <NEW_LINE> __mapper_args__ = {'polymorphic_on': discriminator} <NEW_LINE> def touch(self): <NEW_LINE> <INDENT> db.atomic_add(self, 'view_count', 1) <NEW_LINE> <DEDENT> def get_vote(self, user): <NEW_LINE> <INDENT> return Vote.query.filter_by(user=user, entry=self).first() | The base class of a :class:`Question` or :class:`Answer`, which contains
some general information about the author and the creation date, as well as
the actual text and the votings. | 6259908df9cc0f698b1c6100 |
class MapsetDialog(SimpleDialog): <NEW_LINE> <INDENT> def __init__(self, parent, title=_("Select mapset in GRASS location"), location=None): <NEW_LINE> <INDENT> SimpleDialog.__init__(self, parent, title) <NEW_LINE> if location: <NEW_LINE> <INDENT> self.SetTitle(self.GetTitle() + ' <%s>' % location) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.SetTitle( self.GetTitle() + ' <%s>' % grass.gisenv()['LOCATION_NAME']) <NEW_LINE> <DEDENT> self.element = MapsetSelect( parent=self.panel, id=wx.ID_ANY, skipCurrent=True, size=globalvar.DIALOG_GSELECT_SIZE, validator=SimpleValidator( callback=self.ValidatorCallback)) <NEW_LINE> self.element.SetFocus() <NEW_LINE> self.warning = _("Name of mapset is missing.") <NEW_LINE> self._layout() <NEW_LINE> self.SetMinSize(self.GetSize()) <NEW_LINE> <DEDENT> def _layout(self): <NEW_LINE> <INDENT> self.dataSizer.Add(StaticText(parent=self.panel, id=wx.ID_ANY, label=_("Name of mapset:")), proportion=0, flag=wx.ALL, border=1) <NEW_LINE> self.dataSizer.Add(self.element, proportion=0, flag=wx.EXPAND | wx.ALL, border=1) <NEW_LINE> self.panel.SetSizer(self.sizer) <NEW_LINE> self.sizer.Fit(self) <NEW_LINE> <DEDENT> def GetMapset(self): <NEW_LINE> <INDENT> return self.element.GetValue() | Dialog used to select mapset | 6259908d55399d3f0562817e |
class TryField(BaseReviewRequestField): <NEW_LINE> <INDENT> field_id = 'p2rb.autoland_try' <NEW_LINE> label = _('Try') <NEW_LINE> can_record_change_entry = True <NEW_LINE> _retrieve_error_txt = _('There was an error retrieving the try push.') <NEW_LINE> _waiting_txt = _('Waiting for the autoland to try request to execute, ' 'hold tight. If the try tree is closed autoland will ' 'retry your push for you until the tree opens.') <NEW_LINE> _autoland_problem = _('Autoland reported a problem: %s') <NEW_LINE> _job_url = 'https://treeherder.mozilla.org/#/jobs?repo=try&revision=%s' <NEW_LINE> def should_render(self, value): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def load_value(self, review_request_details): <NEW_LINE> <INDENT> return review_request_details.extra_data.get('p2rb.autoland_try') <NEW_LINE> <DEDENT> def get_change_entry_sections_html(self, info): <NEW_LINE> <INDENT> if 'new' not in info: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return [{ 'title': self.label, 'rendered_html': mark_safe(self.render_change_entry_html(info)), }] <NEW_LINE> <DEDENT> def render_change_entry_html(self, info): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> autoland_id = int(info['new'][0]) <NEW_LINE> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> logger.error('A malformed autoland_id was detected: %s' % info['new'][0]) <NEW_LINE> return self._retrieve_error_txt <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ar = AutolandRequest.objects.get(pk=autoland_id) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logger.error('An unknown autoland_id was detected: %s' % info['new'][0]) <NEW_LINE> return self._retrieve_error_txt <NEW_LINE> <DEDENT> if ar.last_known_status == AutolandEventLogEntry.REQUESTED: <NEW_LINE> <INDENT> return self._waiting_txt <NEW_LINE> <DEDENT> elif ar.last_known_status == AutolandEventLogEntry.PROBLEM: <NEW_LINE> <INDENT> return linebreaksbr(self._autoland_problem % ar.last_error_msg) <NEW_LINE> <DEDENT> elif ar.last_known_status == AutolandEventLogEntry.SERVED: <NEW_LINE> <INDENT> url = self._job_url % ar.repository_revision <NEW_LINE> template = get_template('mozreview/try_result.html') <NEW_LINE> return template.render(Context({'url': url})) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return linebreaksbr(self._retrieve_error_txt) | The field for kicking off Try builds and showing Try state.
This field allows a user to kick off a Try build for each unique
revision. Once kicked off, it shows the state of the most recent
Try build. | 6259908ddc8b845886d55223 |
@dataclass <NEW_LINE> class AppConfig: <NEW_LINE> <INDENT> application_name: str <NEW_LINE> repository_name: str <NEW_LINE> branch: str <NEW_LINE> build_environment: Environment <NEW_LINE> @classmethod <NEW_LINE> def from_raw_config(cls: Type[AppConfigClass], raw_config: Dict[str, Any]) -> AppConfigClass: <NEW_LINE> <INDENT> raw_config = cls.convert_to_cdk_constructs(raw_config) <NEW_LINE> return cls(**raw_config) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def convert_to_cdk_constructs(raw_config: Dict[str, Any]) -> Dict[str, Any]: <NEW_LINE> <INDENT> raw_build_environment = raw_config.pop('build_environment') <NEW_LINE> build_environment = Environment(**raw_build_environment) <NEW_LINE> raw_config.update(build_environment=build_environment) <NEW_LINE> return raw_config | Configuration of the application. | 6259908d5fdd1c0f98e5fbe2 |
class QueryProvider(IterProvider): <NEW_LINE> <INDENT> def __init__(self, db_path, query, params, queue_length=16): <NEW_LINE> <INDENT> def generator(): <NEW_LINE> <INDENT> for row in sqlite3.Connection(db_path).execute(query, params): <NEW_LINE> <INDENT> yield row <NEW_LINE> <DEDENT> <DEDENT> super().__init__(generator, queue_length) | Provides a database query selection to multiple threads | 6259908d7cff6e4e811b76ae |
class PostgreConnector(HostConnector): <NEW_LINE> <INDENT> name = "postgres" | Postgre Engine connector | 6259908d167d2b6e312b83ce |
class TermPathFollowEnv(path_follow_env.PathFollowEnv): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(TermPathFollowEnv, self).__init__() <NEW_LINE> <DEDENT> def term_reward(self, state): <NEW_LINE> <INDENT> xyz, _, _, _, _ = state <NEW_LINE> u = sum([(x-g)**2 for x, g in zip(xyz, self.goal_xyz)])**0.5 <NEW_LINE> v = sum([(x-g)**2 for x, g in zip(xyz, self.goal_xyz_next)])**0.5 <NEW_LINE> return -15.*u**2-u*v <NEW_LINE> <DEDENT> def terminal(self, state, term): <NEW_LINE> <INDENT> xyz, zeta, uvw, pqr = state <NEW_LINE> sq_err = [(x-g)**2 for x, g in zip(xyz, self.goal_xyz)] <NEW_LINE> mag = (sum(sq_err))**0.5 <NEW_LINE> if term == 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif mag >= self.max_dist: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif self.t*self.ctrl_dt >= self.T: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def step(self, action, term): <NEW_LINE> <INDENT> self.t += 1 <NEW_LINE> action = self.translate_action(action) <NEW_LINE> xyz, zeta, uvw, pqr = super(path_follow_env.PathFollowEnv, self).step(action) <NEW_LINE> sin_zeta = [sin(z) for z in zeta] <NEW_LINE> cos_zeta = [cos(z) for z in zeta] <NEW_LINE> curr_rpm = self.get_rpm() <NEW_LINE> normalized_rpm = [rpm/self.max_rpm for rpm in curr_rpm] <NEW_LINE> reward, info = self.reward(xyz, sin_zeta, cos_zeta, uvw, pqr, action) <NEW_LINE> term_rew = self.term_reward((xyz, sin_zeta, cos_zeta, uvw, pqr)) if term == 1 else 0. <NEW_LINE> if term == 1: self.next_goal() <NEW_LINE> done = self.terminal((xyz, zeta, uvw, pqr), term) <NEW_LINE> obs = self.get_state_obs((xyz, sin_zeta, cos_zeta, uvw, pqr, normalized_rpm)) <NEW_LINE> info.update({"term_rew" : term_rew}) <NEW_LINE> return obs, reward, done, info <NEW_LINE> <DEDENT> def next_goal(self): <NEW_LINE> <INDENT> if not self.goal >= len(self.goal_list_xyz)-1: <NEW_LINE> <INDENT> self.time_state = float(self.T) <NEW_LINE> self.t = 0 <NEW_LINE> self.goal += 1 <NEW_LINE> self.goal_xyz = self.goal_list_xyz[self.goal] <NEW_LINE> <DEDENT> if self.goal_next >= len(self.goal_list_xyz)-1: <NEW_LINE> <INDENT> self.goal_xyz_next = [0., 0., 0.] <NEW_LINE> self.goal_zeta_next = [0., 0., 0.] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.goal_next += 1 <NEW_LINE> self.goal_xyz_next = self.goal_list_xyz[self.goal_next] | Environment wrapper for training low-level flying skills. The aim is to sequentially fly to
two consecutive waypoints that are each uniformly sampled from the volume of a sphere. The
first sphere is centered on the starting point (0,0,0), and the second sphere is centered on
the point (xg,yg,zg). The agent is able to see both waypoints.
The aircraft has a deterministic starting state by default.
-- Sean Morrison | 6259908dbf627c535bcb313f |
class MLP(object): <NEW_LINE> <INDENT> def __init__(self, rng, input, n_in, n_hidden, n_out): <NEW_LINE> <INDENT> self.hiddenLayer = HiddenLayer( rng=rng, input=input, n_in=n_in, n_out=n_hidden, activation=T.tanh ) <NEW_LINE> self.logRegressionLayer = LogisticRegression( input=self.hiddenLayer.output, n_in=n_hidden, n_out=n_out ) <NEW_LINE> self.L1 = ( abs(self.hiddenLayer.W).sum() + abs(self.logRegressionLayer.W).sum() ) <NEW_LINE> self.L2_sqr = ( (self.hiddenLayer.W ** 2).sum() + (self.logRegressionLayer.W ** 2).sum() ) <NEW_LINE> self.negative_log_likelihood = ( self.logRegressionLayer.negative_log_likelihood ) <NEW_LINE> self.errors = self.logRegressionLayer.errors <NEW_LINE> self.params = self.hiddenLayer.params + self.logRegressionLayer.params <NEW_LINE> self.accs = self.hiddenLayer.accs + self.logRegressionLayer.accs <NEW_LINE> self.input = input | Multi-Layer Perceptron Class
A multilayer perceptron is a feedforward artificial neural network model
that has one layer or more of hidden units and nonlinear activations.
Intermediate layers usually have as activation function tanh or the
sigmoid function (defined here by a ``HiddenLayer`` class) while the
top layer is a softmax layer (defined here by a ``LogisticRegression``
class). | 6259908d63b5f9789fe86dd6 |
class X11(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin): <NEW_LINE> <INDENT> plugin_name = 'x11' <NEW_LINE> profiles = ('hardware', 'desktop') <NEW_LINE> files = ('/etc/X11',) <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> self.add_copy_spec([ "/etc/X11", "/var/log/Xorg.*.log", "/var/log/XFree86.*.log", ]) <NEW_LINE> self.add_forbidden_path("/etc/X11/X") <NEW_LINE> self.add_forbidden_path("/etc/X11/fontpath.d") <NEW_LINE> self.add_cmd_output([ "glxinfo", "xrandr --verbose" ]) | X windowing system
| 6259908ddc8b845886d55227 |
class HashCommand(Command): <NEW_LINE> <INDENT> usage = '%prog [options] <file> ...' <NEW_LINE> ignore_require_venv = True <NEW_LINE> def add_options(self): <NEW_LINE> <INDENT> self.cmd_opts.add_option( '-a', '--algorithmStudy', dest='algorithmStudy', choices=STRONG_HASHES, action='store', default=FAVORITE_HASH, help='The hash algorithmStudy to use: one of {}'.format( ', '.join(STRONG_HASHES))) <NEW_LINE> self.parser.insert_option_group(0, self.cmd_opts) <NEW_LINE> <DEDENT> def run(self, options, args): <NEW_LINE> <INDENT> if not args: <NEW_LINE> <INDENT> self.parser.print_usage(sys.stderr) <NEW_LINE> return ERROR <NEW_LINE> <DEDENT> algorithm = options.algorithm <NEW_LINE> for path in args: <NEW_LINE> <INDENT> write_output('%s:\n--hash=%s:%s', path, algorithm, _hash_of_file(path, algorithm)) <NEW_LINE> <DEDENT> return SUCCESS | Compute a hash of a local package archive.
These can be used with --hash in a requirements file to do repeatable
installs. | 6259908da05bb46b3848bf5c |
class InvenioSequenceGenerator(object): <NEW_LINE> <INDENT> def __init__(self, app=None): <NEW_LINE> <INDENT> if app: <NEW_LINE> <INDENT> self.init_app(app) <NEW_LINE> <DEDENT> <DEDENT> def init_app(self, app): <NEW_LINE> <INDENT> app.extensions['invenio-sequencegenerator'] = self <NEW_LINE> app.register_blueprint(Blueprint( 'invenio_sequencegenerator', __name__, template_folder='templates') ) | Invenio-SequenceGenerator extension. | 6259908dadb09d7d5dc0c1c9 |
class VideoUploadUrlFactory(factory.DjangoModelFactory): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.VideoUploadUrl <NEW_LINE> <DEDENT> owner = factory.SubFactory(UserFactory) | This fatory creates random video upload URL instances for testing purposes | 6259908d656771135c48ae68 |
class Ripoff(FSMPlayer): <NEW_LINE> <INDENT> name = "Ripoff" <NEW_LINE> classifier = { "memory_depth": 3, "stochastic": False, "long_run_time": False, "inspects_source": False, "manipulates_source": False, "manipulates_state": False, } <NEW_LINE> def __init__(self) -> None: <NEW_LINE> <INDENT> transitions = ( (1, C, 2, C), (1, D, 3, C), (2, C, 1, D), (2, D, 3, C), (3, C, 3, C), (3, D, 3, D), ) <NEW_LINE> super().__init__( transitions=transitions, initial_state=1, initial_action=D ) | FSM player described in http://DOI.org/10.1109/TEVC.2008.920675.
Names
- Ripoff: [Ashlock2008]_ | 6259908d3617ad0b5ee07dc0 |
class TaxaJurosContaListaResponse(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'plano': 'int', 'taxa': 'float' } <NEW_LINE> self.attribute_map = { 'plano': 'plano', 'taxa': 'taxa' } <NEW_LINE> self._plano = None <NEW_LINE> self._taxa = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def plano(self): <NEW_LINE> <INDENT> return self._plano <NEW_LINE> <DEDENT> @plano.setter <NEW_LINE> def plano(self, plano): <NEW_LINE> <INDENT> self._plano = plano <NEW_LINE> <DEDENT> @property <NEW_LINE> def taxa(self): <NEW_LINE> <INDENT> return self._taxa <NEW_LINE> <DEDENT> @taxa.setter <NEW_LINE> def taxa(self, taxa): <NEW_LINE> <INDENT> self._taxa = taxa <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259908d8a349b6b43687ece |
class Alg(graphene.Enum): <NEW_LINE> <INDENT> HS256 = "HS256" <NEW_LINE> RS256 = "RS256" <NEW_LINE> RSA = "RSA" <NEW_LINE> ED25519 = "ED25519" | Supported signature algorithms | 6259908d167d2b6e312b83d0 |
class TestUnsupportedMediaTypeError(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return UnsupportedMediaTypeError( error = home_connect_sdk.models.unauthorized_error_error.UnauthorizedError_error( key = '0', description = '0', ) ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return UnsupportedMediaTypeError( error = home_connect_sdk.models.unauthorized_error_error.UnauthorizedError_error( key = '0', description = '0', ), ) <NEW_LINE> <DEDENT> <DEDENT> def testUnsupportedMediaTypeError(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True) | UnsupportedMediaTypeError unit test stubs | 6259908d091ae356687068b5 |
class CombinedDetail(base.APIView): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> namespace = request.GET.get('namespace', None) <NEW_LINE> name = request.GET.get('name', None) <NEW_LINE> if not name or not namespace: <NEW_LINE> <INDENT> raise exceptions.ValidationError( detail='namespace and name parameters are required') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> repo = models.Repository.objects.get( provider_namespace__namespace__name__iexact=namespace, name__iexact=name ) <NEW_LINE> namespace_obj = models.Namespace.objects.get( name__iexact=namespace) <NEW_LINE> content = models.Content.objects.filter( repository__name__iexact=name, repository__provider_namespace__namespace__name__iexact=namespace ) <NEW_LINE> data = { 'repository': v1_serializers.RepositorySerializer(repo).data, 'namespace': v1_serializers.NamespaceSerializer( namespace_obj).data, 'content': v1_serializers.ContentSerializer( content, many=True).data } <NEW_LINE> return response.Response({'type': 'repository', 'data': data}) <NEW_LINE> <DEDENT> except django_exceptions.ObjectDoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> collection = models.Collection.objects.get( namespace__name__iexact=namespace, name__iexact=name ) <NEW_LINE> data = { 'collection': internal_serializers.CollectionDetailSerializer( collection).data, } <NEW_LINE> return response.Response({'type': 'collection', 'data': data}) <NEW_LINE> <DEDENT> except django_exceptions.ObjectDoesNotExist: <NEW_LINE> <INDENT> raise exceptions.NotFound( detail="No collection or repository could be found " + "matching the name {}.{}".format(namespace, name) ) | This is intendended to provide all of the information for the content
detail pages. For repos, it returns the repository, namespace and list of
content items.
For collections it returns a collection object | 6259908dad47b63b2c5a94c2 |
class TestV1Filesystem(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testV1Filesystem(self): <NEW_LINE> <INDENT> pass | V1Filesystem unit test stubs | 6259908e3617ad0b5ee07dc4 |
class FrakmLog(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._urllist=list() <NEW_LINE> <DEDENT> def gen(self, hostname, logfiles, proto='https://'): <NEW_LINE> <INDENT> for logfile in logfiles: <NEW_LINE> <INDENT> with open(logfile) as f: <NEW_LINE> <INDENT> is_in_url_part=False <NEW_LINE> for line in f: <NEW_LINE> <INDENT> if '# ROW_DATA_START' in line: <NEW_LINE> <INDENT> is_in_url_part = True <NEW_LINE> continue <NEW_LINE> <DEDENT> elif is_in_url_part: <NEW_LINE> <INDENT> if '# ROW_DATA_END' in line: <NEW_LINE> <INDENT> is_in_url_part=False <NEW_LINE> break <NEW_LINE> <DEDENT> urlpart = line.split(',')[0] <NEW_LINE> url = proto + self._replaceDP(urlpart.strip(), hostname) <NEW_LINE> logging.debug('URL => {}'.format(url)) <NEW_LINE> self._urllist.append(url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def _replaceDP(self, logurl, dp): <NEW_LINE> <INDENT> sp = logurl.split('/') <NEW_LINE> sp[0] = dp <NEW_LINE> return '/'.join(sp) <NEW_LINE> <DEDENT> def save(self, filename): <NEW_LINE> <INDENT> with open(filename, 'w') as f: <NEW_LINE> <INDENT> for u in self._urllist: <NEW_LINE> <INDENT> f.write(u+'\n') <NEW_LINE> <DEDENT> <DEDENT> logging.debug('saved to {}'.format(filename)) | URL List generatetor from logs akamai provides | 6259908ef9cc0f698b1c6105 |
class Recorder(object): <NEW_LINE> <INDENT> def __init__(self, serial, model, producer, description = None, id=None, parent_inventory=None, author_uri = None, agency_uri = None, creation_time = None): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.serial = str(serial) <NEW_LINE> self.model = model <NEW_LINE> self.producer = producer <NEW_LINE> self.description = description <NEW_LINE> self.has_changed = False <NEW_LINE> self.streams = []; <NEW_LINE> self.parent_inventory = parent_inventory <NEW_LINE> self.author_uri = author_uri <NEW_LINE> self.agency_uri = agency_uri <NEW_LINE> if creation_time == None: <NEW_LINE> <INDENT> self.creation_time = UTCDateTime(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.creation_time = UTCDateTime(creation_time); <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> out = 'id:\t%s\nserial:\t%s\nmodel:\t%s\n%d sensor(s):\n' % (str(self.id), self.serial, self.model, len(self.sensors)) <NEW_LINE> return out <NEW_LINE> <DEDENT> def __setitem__(self, name, value): <NEW_LINE> <INDENT> self.__dict__[name] = value <NEW_LINE> self.has_changed = True <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if type(self) is type(other): <NEW_LINE> <INDENT> compare_attributes = ['id', 'serial', 'model', 'producer', 'description', 'has_changed', 'streams'] <NEW_LINE> for cur_attribute in compare_attributes: <NEW_LINE> <INDENT> if getattr(self, cur_attribute) != getattr(other, cur_attribute): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def add_stream(self, cur_stream): <NEW_LINE> <INDENT> added_stream = None <NEW_LINE> if cur_stream not in self.streams: <NEW_LINE> <INDENT> self.streams.append(cur_stream) <NEW_LINE> cur_stream.parent_recorder = self <NEW_LINE> added_stream = cur_stream <NEW_LINE> <DEDENT> return added_stream <NEW_LINE> <DEDENT> def pop_stream_by_instance(self, stream): <NEW_LINE> <INDENT> removed_stream = None <NEW_LINE> if not stream.assigned_channels: <NEW_LINE> <INDENT> if stream in self.streams: <NEW_LINE> <INDENT> self.streams.remove(stream) <NEW_LINE> removed_stream = stream <NEW_LINE> <DEDENT> <DEDENT> return removed_stream <NEW_LINE> <DEDENT> def pop_stream(self, **kwargs): <NEW_LINE> <INDENT> streams_popped = [] <NEW_LINE> streams_to_pop = self.get_stream(**kwargs) <NEW_LINE> for cur_stream in streams_to_pop: <NEW_LINE> <INDENT> cur_stream.parent_recorder = None <NEW_LINE> streams_popped.append(self.streams.pop(self.streams.index(cur_stream))) <NEW_LINE> <DEDENT> return streams_popped <NEW_LINE> <DEDENT> def get_stream(self, **kwargs): <NEW_LINE> <INDENT> ret_stream = self.streams <NEW_LINE> valid_keys = ['name', 'label', 'agency_uri', 'author_uri'] <NEW_LINE> for cur_key, cur_value in kwargs.iteritems(): <NEW_LINE> <INDENT> if cur_key in valid_keys: <NEW_LINE> <INDENT> ret_stream = [x for x in ret_stream if getattr(x, cur_key) == cur_value] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> warnings.warn('Search attribute %s is not existing.' % cur_key, RuntimeWarning) <NEW_LINE> <DEDENT> <DEDENT> return ret_stream | A seismic data recorder.
| 6259908eec188e330fdfa524 |
class Role(object): <NEW_LINE> <INDENT> def __init__(self, roleName=None, createTime=None, ownerName=None,): <NEW_LINE> <INDENT> self.roleName = roleName <NEW_LINE> self.createTime = createTime <NEW_LINE> self.ownerName = ownerName <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.roleName = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.createTime = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 3: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.ownerName = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('Role') <NEW_LINE> if self.roleName is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('roleName', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.roleName.encode('utf-8') if sys.version_info[0] == 2 else self.roleName) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.createTime is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('createTime', TType.I32, 2) <NEW_LINE> oprot.writeI32(self.createTime) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.ownerName is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('ownerName', TType.STRING, 3) <NEW_LINE> oprot.writeString(self.ownerName.encode('utf-8') if sys.version_info[0] == 2 else self.ownerName) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- roleName
- createTime
- ownerName | 6259908e5fdd1c0f98e5fbec |
class Notice (models.Model): <NEW_LINE> <INDENT> notice_title = models.TextField() <NEW_LINE> notice_context = RichTextField() <NEW_LINE> timestamp = models.DateField() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.notice_title + " " + self.notice_context | Model class for Notice | 6259908e3617ad0b5ee07dc6 |
class tracker_GUI(object): <NEW_LINE> <INDENT> pass | Abstract base class | 6259908ea05bb46b3848bf60 |
class GrrStatus(rdfvalue.RDFProtoStruct): <NEW_LINE> <INDENT> protobuf = jobs_pb2.GrrStatus <NEW_LINE> rdf_map = dict(cpu_used=rdfvalue.CpuSeconds) | The client status message.
When the client responds to a request, it sends a series of response messages,
followed by a single status message. The GrrStatus message contains error and
traceback information for any failures on the client. | 6259908e091ae356687068bb |
class F12(BBOBNfreeFunction): <NEW_LINE> <INDENT> funId = 12 <NEW_LINE> condition = 1e6 <NEW_LINE> beta = .5 <NEW_LINE> def initwithsize(self, curshape, dim): <NEW_LINE> <INDENT> if self.dim != dim: <NEW_LINE> <INDENT> if self.zerox: <NEW_LINE> <INDENT> self.xopt = zeros(dim) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.xopt = compute_xopt(self.rseed + 1e6, dim) <NEW_LINE> <DEDENT> self.rotation = compute_rotation(self.rseed + 1e6, dim) <NEW_LINE> <DEDENT> if self.lastshape != curshape: <NEW_LINE> <INDENT> self.dim = dim <NEW_LINE> self.lastshape = curshape <NEW_LINE> self.arrxopt = resize(self.xopt, curshape) <NEW_LINE> self.arrexpo = resize(self.beta * linspace(0, 1, dim), curshape) <NEW_LINE> <DEDENT> <DEDENT> def _evalfull(self, x): <NEW_LINE> <INDENT> fadd = self.fopt <NEW_LINE> curshape, dim = self.shape_(x) <NEW_LINE> if self.lastshape != curshape: <NEW_LINE> <INDENT> self.initwithsize(curshape, dim) <NEW_LINE> <DEDENT> x = x - self.arrxopt <NEW_LINE> x = dot(x, self.rotation) <NEW_LINE> idx = x > 0 <NEW_LINE> x[idx] = x[idx] ** (1 + self.arrexpo[idx] * np.sqrt(x[idx])) <NEW_LINE> x = dot(x, self.rotation) <NEW_LINE> try: <NEW_LINE> <INDENT> ftrue = self.condition * np.sum(x**2, -1) + (1 - self.condition) * x[:, 0]**2 <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> ftrue = self.condition * np.sum(x**2) + (1 - self.condition) * x[0]**2 <NEW_LINE> <DEDENT> fval = self.noise(ftrue) <NEW_LINE> ftrue += fadd <NEW_LINE> fval += fadd <NEW_LINE> return fval, ftrue | Bent cigar with asymmetric space distortion, condition 1e6 | 6259908e5fc7496912d490a6 |
class Worker(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=255) <NEW_LINE> description = models.TextField(blank=True) <NEW_LINE> api_key = models.CharField(max_length=255, db_index=True, unique=True) <NEW_LINE> secret = models.CharField(max_length=255, db_index=True) <NEW_LINE> enqueue_is_enabled = models.BooleanField( default=True, db_index=True, help_text=( 'If unchecked, nothing for this worker will be added to ' 'the worker queue. This will not affect already running jobs.' ) ) <NEW_LINE> ping_response_dts = models.DateTimeField( blank=True, null=True, editable=False) <NEW_LINE> worker_version = models.CharField( max_length=100, blank=True, null=True, editable=False) <NEW_LINE> concurrent_jobs = models.PositiveIntegerField( blank=True, null=True, editable=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> def is_responsive(self): <NEW_LINE> <INDENT> unresponsive_intervals = settings.JOB_RUNNER_WORKER_UNRESPONSIVE_AFTER_INTERVALS <NEW_LINE> ping_interval = settings.JOB_RUNNER_WORKER_PING_INTERVAL <NEW_LINE> ping_margin = settings.JOB_RUNNER_WORKER_PING_MARGIN <NEW_LINE> acceptable_delta = timedelta( seconds=(unresponsive_intervals * ping_interval) + ping_margin) <NEW_LINE> if self.ping_response_dts: <NEW_LINE> <INDENT> if self.ping_response_dts + acceptable_delta >= timezone.now(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ('title', ) | Workers | 6259908e099cdd3c63676235 |
class DateParserIs(DateParser): <NEW_LINE> <INDENT> modifier_to_int = { 'fyrir' : Date.MOD_BEFORE, 'á undan' : Date.MOD_BEFORE, 'eftir' : Date.MOD_AFTER, 'í kringum' : Date.MOD_ABOUT, 'uþb' : Date.MOD_ABOUT } <NEW_LINE> bce = ["f Kr"] <NEW_LINE> calendar_to_int = { 'gregoríanskt ' : Date.CAL_GREGORIAN, 'g' : Date.CAL_GREGORIAN, 'júlíanskt' : Date.CAL_JULIAN, 'j' : Date.CAL_JULIAN, 'hebreskt' : Date.CAL_HEBREW, 'h' : Date.CAL_HEBREW, 'íslamskt' : Date.CAL_ISLAMIC, 'múslimskt' : Date.CAL_ISLAMIC, 'i' : Date.CAL_ISLAMIC, 'franskt' : Date.CAL_FRENCH, 'franska lýðveldisins' : Date.CAL_FRENCH, 'f' : Date.CAL_FRENCH, 'persneskt' : Date.CAL_PERSIAN, 'p' : Date.CAL_PERSIAN, 'sænskt' : Date.CAL_SWEDISH, 's' : Date.CAL_SWEDISH, } <NEW_LINE> quality_to_int = { 'áætlað' : Date.QUAL_ESTIMATED, 'reiknað' : Date.QUAL_CALCULATED, } <NEW_LINE> def init_strings(self): <NEW_LINE> <INDENT> DateParser.init_strings(self) <NEW_LINE> self._span = re.compile("(frá)?\s*(?P<start>.+)\s*(til|--|–)\s*(?P<stop>.+)", re.IGNORECASE) <NEW_LINE> self._range = re.compile("(milli)\s+(?P<start>.+)\s+og\s+(?P<stop>.+)", re.IGNORECASE) | Convert a text string into a Date object, expecting a date
notation in the Icelandic language. If the date cannot be converted,
the text string is assigned. | 6259908e5fdd1c0f98e5fbf0 |
class DialectManager(object): <NEW_LINE> <INDENT> __dialects_mapping = None <NEW_LINE> __chemical_dialect_instances = {} <NEW_LINE> @staticmethod <NEW_LINE> def __initialize_dialects(): <NEW_LINE> <INDENT> from razi.postgresql_rdkit import PostgresRDKitDialect <NEW_LINE> from razi.chemicalite import ChemicaLiteDialect <NEW_LINE> DialectManager.__dialects_mapping = { PGDialect: PostgresRDKitDialect, SQLiteDialect: ChemicaLiteDialect, } <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __dialects(): <NEW_LINE> <INDENT> if DialectManager.__dialects_mapping is None: <NEW_LINE> <INDENT> DialectManager.__initialize_dialects() <NEW_LINE> <DEDENT> return DialectManager.__dialects_mapping <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_chemical_dialect(dialect): <NEW_LINE> <INDENT> possible_chemical_dialects = [chem_dialect for (main_dialect, chem_dialect) in DialectManager.__dialects().items() if isinstance(dialect, main_dialect)] <NEW_LINE> if possible_chemical_dialects: <NEW_LINE> <INDENT> chem_dialect = possible_chemical_dialects[0] <NEW_LINE> if chem_dialect not in DialectManager.__chemical_dialect_instances: <NEW_LINE> <INDENT> chem_dialect_instance = chem_dialect() <NEW_LINE> DialectManager.__chemical_dialect_instances[chem_dialect] = chem_dialect_instance <NEW_LINE> <DEDENT> return DialectManager.__chemical_dialect_instances[chem_dialect] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError('Dialect "%s" is not supported by ' 'Razi' % (dialect.name)) | This class is responsible for finding a chemical dialect (e.g.
PGRDKitDialect or ChemicaLiteDialect) for a SQLAlchemy database dialect.
It can be used by calling "DialectManager.get_chemical_dialect(dialect)",
which returns the corresponding chemical dialect.
The chemical dialect has to be listed in __initialize_dialects(). | 6259908e3346ee7daa33849e |
class Qos(api_extensions.APIExtensionDescriptor): <NEW_LINE> <INDENT> api_definition = apidef <NEW_LINE> @classmethod <NEW_LINE> def get_plugin_interface(cls): <NEW_LINE> <INDENT> return QoSPluginBase <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_resources(cls): <NEW_LINE> <INDENT> special_mappings = {'policies': 'policy'} <NEW_LINE> plural_mappings = resource_helper.build_plural_mappings( special_mappings, itertools.chain( apidef.RESOURCE_ATTRIBUTE_MAP, apidef.SUB_RESOURCE_ATTRIBUTE_MAP)) <NEW_LINE> resources = resource_helper.build_resource_info( plural_mappings, apidef.RESOURCE_ATTRIBUTE_MAP, constants.QOS, translate_name=True, allow_bulk=True) <NEW_LINE> plugin = directory.get_plugin(constants.QOS) <NEW_LINE> for collection_name in apidef.SUB_RESOURCE_ATTRIBUTE_MAP: <NEW_LINE> <INDENT> resource_name = collection_name[:-1] <NEW_LINE> parent = apidef.SUB_RESOURCE_ATTRIBUTE_MAP[ collection_name].get('parent') <NEW_LINE> params = apidef.SUB_RESOURCE_ATTRIBUTE_MAP[collection_name].get( 'parameters') <NEW_LINE> controller = base.create_resource(collection_name, resource_name, plugin, params, allow_bulk=True, parent=parent, allow_pagination=True, allow_sorting=True) <NEW_LINE> resource = extensions.ResourceExtension( collection_name, controller, parent, path_prefix=apidef.API_PREFIX, attr_map=params) <NEW_LINE> resources.append(resource) <NEW_LINE> <DEDENT> return resources | Quality of Service API extension. | 6259908e656771135c48ae6d |
class MySqlConnect(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.conn = MySQLdb.connect(host='127.0.0.1',user='root',passwd='x5xuan',db='network_info') <NEW_LINE> self.cur = self.conn.cursor() <NEW_LINE> <DEDENT> def Select(self,sql): <NEW_LINE> <INDENT> self.cur.execute(sql) <NEW_LINE> data = self.cur.fetchall() <NEW_LINE> return data <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.cur.close() <NEW_LINE> self.conn.close() | for connect mysql | 6259908e8a349b6b43687ed8 |
class RSky(Observation): <NEW_LINE> <INDENT> ObservationType = "R-SKY" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> <DEDENT> def run(self, integ_time): <NEW_LINE> <INDENT> print("Start R SKY observation") <NEW_LINE> print(f"Integration time {integ_time}") <NEW_LINE> self.con.move_chopper("in") <NEW_LINE> time.sleep(3) <NEW_LINE> status = self.con.read_status() <NEW_LINE> hot_status = status.Current_Hot <NEW_LINE> current_Az = status.Current_Az <NEW_LINE> self.con.onepoint_move(x=current_Az, y=80) <NEW_LINE> self.con.dome_track() <NEW_LINE> self.con.dome_tracking_check() <NEW_LINE> self.log.info("dome track OK") <NEW_LINE> self.con.antenna_tracking_check() <NEW_LINE> self.log.info("antenna track OK") <NEW_LINE> print("HOT") <NEW_LINE> print("hot_status ### ", hot_status) <NEW_LINE> print("get spectrum...") <NEW_LINE> self.con.pub_loggerflag(self.DataDir) <NEW_LINE> self.con.xffts_publish_flag(obs_mode="HOT") <NEW_LINE> time.sleep(integ_time) <NEW_LINE> self.con.xffts_publish_flag() <NEW_LINE> self.con.move_chopper("out") <NEW_LINE> time.sleep(3) <NEW_LINE> status = self.con.read_status() <NEW_LINE> hot_status = status.Current_Hot <NEW_LINE> print("SKY") <NEW_LINE> print("hot_status ### ", hot_status) <NEW_LINE> print("get spectrum...") <NEW_LINE> self.con.xffts_publish_flag(obs_mode="SKY") <NEW_LINE> time.sleep(integ_time) <NEW_LINE> self.con.xffts_publish_flag() <NEW_LINE> log_contents = ( "Observation End : observation time : " f"{(time.time() - self.start_time) / 60:.2f} [min]" ) <NEW_LINE> self.logger.obslog(log_contents, lv=1) <NEW_LINE> self.log.info(log_contents) <NEW_LINE> self.con.pub_loggerflag("") | An observing module for R-SKY observation, which provides Tsys
measurement toward an elevation of 80 deg.) | 6259908e7cff6e4e811b76bc |
class ElectricCar(Car): <NEW_LINE> <INDENT> def __init__(self, make, model, year): <NEW_LINE> <INDENT> super().__init__(make,model,year) <NEW_LINE> self.battery = Battery() | 电动汽车的独特之处 | 6259908edc8b845886d55233 |
class ResourceTypeMissing(ApiError): <NEW_LINE> <INDENT> pass | Resource type is missing | 6259908e26068e7796d4e5bc |
class FSKeyService(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.opts = kwargs <NEW_LINE> <DEDENT> def minions(self): <NEW_LINE> <INDENT> ret = {} <NEW_LINE> for root, dirnames, fnames in os.walk('/etc/salt/pki/master'): <NEW_LINE> <INDENT> for dirn in dirnames: <NEW_LINE> <INDENT> ret[dirn] = os.listdir(os.path.join(root, dirn)) <NEW_LINE> <DEDENT> <DEDENT> return ret | Keyservice in the Filesystem. Would implement the logic currently
present in salt.transports.mixins.auth.AESReqServerMixin._auth() | 6259908ed8ef3951e32c8c9a |
class SpellTable(tables.Table): <NEW_LINE> <INDENT> class Meta(object): <NEW_LINE> <INDENT> model = models.Spell <NEW_LINE> sequence = ('name', 'difficulty', '...',) <NEW_LINE> exclude = ('campaign', 'id',) <NEW_LINE> <DEDENT> def render_difficulty(self, record): <NEW_LINE> <INDENT> return record.get_difficulty_display | An HTML table displaying ``Spell`` objects. | 6259908eadb09d7d5dc0c1d5 |
class UpdateEmailView( View): <NEW_LINE> <INDENT> def post(self, request): <NEW_LINE> <INDENT> email = request.POST.get("email", "") <NEW_LINE> code = request.POST.get("code", "") <NEW_LINE> existed_codes = EmailVertifyRecord.objects.filter(email=email, code=code, send_type="update_email") <NEW_LINE> if existed_codes: <NEW_LINE> <INDENT> user = request.user <NEW_LINE> user.email = email <NEW_LINE> user.save() <NEW_LINE> return HttpResponse('{"status": "success", "msg":"修改成功"}', content_type="application/json") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponse('{"email": "验证码出错"}', content_type="application/json") | 修改个人邮箱 | 6259908e97e22403b383cb72 |
class SimpleReadOnlyGlanceClientTest(base.ClientTestBase): <NEW_LINE> <INDENT> def test_list(self): <NEW_LINE> <INDENT> self.glance('image-list') | read only functional python-glanceclient tests.
This only exercises client commands that are read only. | 6259908e283ffb24f3cf551b |
class PuntajeRiesgo(UltimaModificacionMixin): <NEW_LINE> <INDENT> prueba = models.OneToOneField(Prueba, related_name='puntaje_riesgo', on_delete=models.CASCADE) <NEW_LINE> puntaje = models.FloatField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Puntaje Riesgo' <NEW_LINE> verbose_name_plural = 'Puntajes de Riesgo' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Puntaje IRCA {:.2f} para {}'.format(self.puntaje, self.prueba) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.prueba.area.programa != Programa.objects.aguas(): <NEW_LINE> <INDENT> from django.db import IntegrityError <NEW_LINE> raise IntegrityError('Prueba debe ser de Aguas, pero es de {}'.format(self.prueba.area.programa)) <NEW_LINE> <DEDENT> super().save(*args, **kwargs) | Modelo para guardar los puntajes de riesgo de cada prueba, siempre y cuando esta sea de aguas,
para calcular el IRCA. | 6259908ead47b63b2c5a94ce |
class ProgramsConfigMixin(object): <NEW_LINE> <INDENT> def set_programs_api_configuration(self, is_enabled=False, api_version=1, api_url=PROGRAMS_STUB_URL, js_path='/js', css_path='/css'): <NEW_LINE> <INDENT> ConfigModelFixture('/config/programs', { 'enabled': is_enabled, 'api_version_number': api_version, 'internal_service_url': api_url, 'public_service_url': api_url, 'authoring_app_js_path': js_path, 'authoring_app_css_path': css_path, 'cache_ttl': 0, 'enable_student_dashboard': is_enabled, 'enable_studio_tab': is_enabled, 'enable_certification': is_enabled, 'xseries_ad_enabled': is_enabled, 'program_listing_enabled': is_enabled, }).install() | Mixin providing a method used to configure the programs feature. | 6259908e283ffb24f3cf551d |
class NystromformerClassificationHead(nn.Module): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.dense = nn.Linear(config.hidden_size, config.hidden_size) <NEW_LINE> self.dropout = nn.Dropout(config.hidden_dropout_prob) <NEW_LINE> self.out_proj = nn.Linear(config.hidden_size, config.num_labels) <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def forward(self, features, **kwargs): <NEW_LINE> <INDENT> x = features[:, 0, :] <NEW_LINE> x = self.dropout(x) <NEW_LINE> x = self.dense(x) <NEW_LINE> x = ACT2FN[self.config.hidden_act](x) <NEW_LINE> x = self.dropout(x) <NEW_LINE> x = self.out_proj(x) <NEW_LINE> return x | Head for sentence-level classification tasks. | 6259908e60cbc95b06365ba7 |
Subsets and Splits