signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
---|---|---|---|
@wrap_boto_errors<EOL><INDENT>def _read(self):<DEDENT> | return self.native_obj.read()<EOL> | Return contents of object. | f1630:c3:m3 |
@classmethod<EOL><INDENT>def from_result(cls, container, result):<DEDENT> | if result is None:<EOL><INDENT>raise errors.NoObjectException<EOL><DEDENT>elif cls.is_prefix(result):<EOL><INDENT>return cls.from_prefix(container, result)<EOL><DEDENT>elif cls.is_key(result):<EOL><INDENT>return cls.from_key(container, result)<EOL><DEDENT>raise errors.CloudException("<STR_LIT>" %<EOL>type(result))<EOL> | Create from ambiguous result. | f1630:c3:m4 |
@classmethod<EOL><INDENT>def from_prefix(cls, container, prefix):<DEDENT> | if prefix is None:<EOL><INDENT>raise errors.NoObjectException<EOL><DEDENT>return cls(container,<EOL>name=prefix.name,<EOL>obj_type=cls.type_cls.SUBDIR)<EOL> | Create from prefix object. | f1630:c3:m5 |
@classmethod<EOL><INDENT>def from_key(cls, container, key):<DEDENT> | if key is None:<EOL><INDENT>raise errors.NoObjectException<EOL><DEDENT>return cls(container,<EOL>name=key.name,<EOL>size=key.size,<EOL>content_type=key.content_type,<EOL>content_encoding=key.content_encoding,<EOL>last_modified=dt_from_header(key.last_modified),<EOL>obj_type=cls.type_cls.FILE)<EOL> | Create from key object. | f1630:c3:m6 |
@wrap_boto_errors<EOL><INDENT>def _get_container(self):<DEDENT> | return self.conn.native_conn.get_bucket(self.name)<EOL> | Return native container object. | f1630:c4:m0 |
@wrap_boto_errors<EOL><INDENT>def get_objects(self, path, marker=None,<EOL>limit=settings.CLOUD_BROWSER_DEFAULT_LIST_LIMIT):<DEDENT> | from itertools import islice<EOL>path = path.rstrip(SEP) + SEP if path else path<EOL>result_set = self.native_container.list(path, SEP, marker)<EOL>results = list(islice(result_set, limit+<NUM_LIT:1>))<EOL>if results:<EOL><INDENT>if marker and results[<NUM_LIT:0>].name.rstrip(SEP) == marker.rstrip(SEP):<EOL><INDENT>results = results[<NUM_LIT:1>:]<EOL><DEDENT>else:<EOL><INDENT>results = results[:limit]<EOL><DEDENT><DEDENT>return [self.obj_cls.from_result(self, r) for r in results]<EOL> | Get objects. | f1630:c4:m1 |
@wrap_boto_errors<EOL><INDENT>def get_object(self, path):<DEDENT> | key = self.native_container.get_key(path)<EOL>return self.obj_cls.from_key(self, key)<EOL> | Get single object. | f1630:c4:m2 |
@classmethod<EOL><INDENT>def from_bucket(cls, connection, bucket):<DEDENT> | if bucket is None:<EOL><INDENT>raise errors.NoContainerException<EOL><DEDENT>return cls(connection, bucket.name)<EOL> | Create from bucket object. | f1630:c4:m3 |
def _get_connection(self): | raise NotImplementedError("<STR_LIT>")<EOL> | Return native connection object. | f1630:c5:m0 |
@wrap_boto_errors<EOL><INDENT>def _get_containers(self):<DEDENT> | buckets = self.native_conn.get_all_buckets()<EOL>return [self.cont_cls.from_bucket(self, b) for b in buckets]<EOL> | Return available containers. | f1630:c5:m1 |
@wrap_boto_errors<EOL><INDENT>def _get_container(self, path):<DEDENT> | bucket = self.native_conn.get_bucket(path)<EOL>return self.cont_cls.from_bucket(self, bucket)<EOL> | Return single container. | f1630:c5:m2 |
@register.filter<EOL>@stringfilter<EOL>def truncatechars(value, num, end_text="<STR_LIT>"): | length = None<EOL>try:<EOL><INDENT>length = int(num)<EOL><DEDENT>except ValueError:<EOL><INDENT>pass<EOL><DEDENT>if length is not None and len(value) > length:<EOL><INDENT>return value[:length - len(end_text)] + end_text<EOL><DEDENT>return value<EOL> | Truncate string on character boundary.
.. note::
Django ticket `5025 <http://code.djangoproject.com/ticket/5025>`_ has a
patch for a more extensible and robust truncate characters tag filter.
Example::
{{ my_variable|truncatechars:22 }}
:param value: Value to truncate.
:type value: ``string``
:param num: Number of characters to trim to.
:type num: ``int`` | f1632:m0 |
@register.tag<EOL>def cloud_browser_media_url(_, token): | bits = token.split_contents()<EOL>if len(bits) != <NUM_LIT:2>:<EOL><INDENT>raise TemplateSyntaxError("<STR_LIT>" % bits[<NUM_LIT:0>])<EOL><DEDENT>rel_path = bits[<NUM_LIT:1>]<EOL>return MediaUrlNode(rel_path)<EOL> | Get base media URL for application static media.
Correctly handles whether or not the settings variable
``CLOUD_BROWSER_STATIC_MEDIA_DIR`` is set and served.
For example::
<link rel="stylesheet" type="text/css"
href="{% cloud_browser_media_url "css/cloud-browser.css" %}" /> | f1632:m1 |
def __init__(self, rel_path): | super(MediaUrlNode, self).__init__()<EOL>self.rel_path = rel_path.lstrip('<STR_LIT:/>').strip("<STR_LIT:'>").strip('<STR_LIT:">')<EOL> | Initializer. | f1632:c0:m0 |
def render(self, context): | try:<EOL><INDENT>from django.core.urlresolvers import reverse<EOL><DEDENT>except ImportError:<EOL><INDENT>from django.urls import reverse<EOL><DEDENT>if self.static_media_url is not None:<EOL><INDENT>return os.path.join(self.static_media_url, self.rel_path)<EOL><DEDENT>return reverse("<STR_LIT>",<EOL>args=[self.rel_path],<EOL>current_app='<STR_LIT>')<EOL> | Render. | f1632:c0:m1 |
def settings_view_decorator(function): | dec = settings.CLOUD_BROWSER_VIEW_DECORATOR<EOL>if isinstance(dec, str):<EOL><INDENT>mod_str, _, dec_str = dec.rpartition('<STR_LIT:.>')<EOL>if not (mod_str and dec_str):<EOL><INDENT>raise ImportError("<STR_LIT>" % mod_str)<EOL><DEDENT>mod = import_module(mod_str)<EOL>if not hasattr(mod, dec_str):<EOL><INDENT>raise ImportError("<STR_LIT>" % dec)<EOL><DEDENT>dec = getattr(mod, dec_str)<EOL><DEDENT>if dec and callable(dec):<EOL><INDENT>return dec(function)<EOL><DEDENT>return function<EOL> | Insert decorator from settings, if any.
.. note:: Decorator in ``CLOUD_BROWSER_VIEW_DECORATOR`` can be either a
callable or a fully-qualified string path (the latter, which we'll
lazy import). | f1634:m0 |
def _breadcrumbs(path): | full = None<EOL>crumbs = []<EOL>for part in path_yield(path):<EOL><INDENT>full = path_join(full, part) if full else part<EOL>crumbs.append((full, part))<EOL><DEDENT>return crumbs<EOL> | Return breadcrumb dict from path. | f1634:m1 |
@settings_view_decorator<EOL>def browser(request, path='<STR_LIT>', template="<STR_LIT>"): | from itertools import islice<EOL>try:<EOL><INDENT>from future_builtins import filter<EOL><DEDENT>except ImportError:<EOL><INDENT>from builtins import filter<EOL><DEDENT>container_path, object_path = path_parts(path)<EOL>incoming = request.POST or request.GET or {}<EOL>marker = incoming.get('<STR_LIT>', None)<EOL>marker_part = incoming.get('<STR_LIT>', None)<EOL>if marker_part:<EOL><INDENT>marker = path_join(object_path, marker_part)<EOL><DEDENT>limit_default = settings.CLOUD_BROWSER_DEFAULT_LIST_LIMIT<EOL>def limit_test(num):<EOL><INDENT>return num > <NUM_LIT:0> and (MAX_LIMIT is None or num <= MAX_LIMIT - <NUM_LIT:1>)<EOL><DEDENT>limit = get_int(incoming.get('<STR_LIT>', limit_default),<EOL>limit_default,<EOL>limit_test)<EOL>conn = get_connection()<EOL>containers = conn.get_containers()<EOL>marker_part = None<EOL>container = None<EOL>objects = None<EOL>if container_path != '<STR_LIT>':<EOL><INDENT>def cont_eq(container):<EOL><INDENT>return container.name == container_path<EOL><DEDENT>filtered_conts = filter(cont_eq, containers)<EOL>cont_list = list(islice(filtered_conts, <NUM_LIT:1>))<EOL>if not cont_list:<EOL><INDENT>raise Http404("<STR_LIT>" % container_path)<EOL><DEDENT>container = cont_list[<NUM_LIT:0>]<EOL>objects = container.get_objects(object_path, marker, limit + <NUM_LIT:1>)<EOL>marker = None<EOL>if len(objects) == limit + <NUM_LIT:1>:<EOL><INDENT>objects = objects[:limit]<EOL>marker = objects[-<NUM_LIT:1>].name<EOL>marker_part = relpath(marker, object_path)<EOL><DEDENT><DEDENT>return render(request, template,<EOL>{'<STR_LIT:path>': path,<EOL>'<STR_LIT>': marker,<EOL>'<STR_LIT>': marker_part,<EOL>'<STR_LIT>': limit,<EOL>'<STR_LIT>': _breadcrumbs(path),<EOL>'<STR_LIT>': container_path,<EOL>'<STR_LIT>': containers,<EOL>'<STR_LIT>': container,<EOL>'<STR_LIT>': object_path,<EOL>'<STR_LIT>': objects})<EOL> | View files in a file path.
:param request: The request.
:param path: Path to resource, including container as first part of path.
:param template: Template to render. | f1634:m2 |
@settings_view_decorator<EOL>def document(_, path='<STR_LIT>'): | container_path, object_path = path_parts(path)<EOL>conn = get_connection()<EOL>try:<EOL><INDENT>container = conn.get_container(container_path)<EOL><DEDENT>except errors.NoContainerException:<EOL><INDENT>raise Http404("<STR_LIT>" % container_path)<EOL><DEDENT>except errors.NotPermittedException:<EOL><INDENT>raise Http404("<STR_LIT>" % container_path)<EOL><DEDENT>try:<EOL><INDENT>storage_obj = container.get_object(object_path)<EOL><DEDENT>except errors.NoObjectException:<EOL><INDENT>raise Http404("<STR_LIT>" % object_path)<EOL><DEDENT>content_type = storage_obj.smart_content_type<EOL>encoding = storage_obj.smart_content_encoding<EOL>response = HttpResponse(content=storage_obj.read(),<EOL>content_type=content_type)<EOL>if encoding not in (None, '<STR_LIT>'):<EOL><INDENT>response['<STR_LIT>'] = encoding<EOL><DEDENT>return response<EOL> | View single document from path.
:param path: Path to resource, including container as first part of path. | f1634:m3 |
def check_version(mod, required): | vers = tuple(int(v) for v in mod.__version__.split('<STR_LIT:.>')[:<NUM_LIT:3>])<EOL>if vers < required:<EOL><INDENT>req = '<STR_LIT:.>'.join(str(v) for v in required)<EOL>raise ImproperlyConfigured(<EOL>"<STR_LIT>" %<EOL>(mod.__name__, mod.__version__, req))<EOL><DEDENT> | Require minimum version of module using ``__version__`` member. | f1636:m1 |
def requires(module, name="<STR_LIT>"): | def wrapped(method):<EOL><INDENT>"""<STR_LIT>"""<EOL>if module is None:<EOL><INDENT>raise ImproperlyConfigured("<STR_LIT>" % name)<EOL><DEDENT>return method<EOL><DEDENT>return wrapped<EOL> | Enforces module presence.
The general use here is to allow conditional imports that may fail (e.g., a
required python package is not installed) but still allow the rest of the
python package to compile and run fine. If the wrapped method with this
decorated is invoked, then a runtime error is generated.
:param module: required module (set as variable to ``None`` on import fail)
:type module: ``module`` or ``None``
:param name: module name
:type name: ``string`` | f1636:m2 |
def dt_from_rfc8601(date_str): | <EOL>date_str = date_str.rstrip('<STR_LIT>').split('<STR_LIT:.>')[<NUM_LIT:0>]<EOL>fmt = "<STR_LIT>"<EOL>return datetime.strptime(date_str, fmt)<EOL> | Convert 8601 (ISO) date string to datetime object.
Handles "Z" and milliseconds transparently.
:param date_str: Date string.
:type date_str: ``string``
:return: Date time.
:rtype: :class:`datetime.datetime` | f1636:m3 |
def dt_from_rfc1123(date_str): | fmt = "<STR_LIT>"<EOL>return datetime.strptime(date_str, fmt)<EOL> | Convert 1123 (HTTP header) date string to datetime object.
:param date_str: Date string.
:type date_str: ``string``
:return: Date time.
:rtype: :class:`datetime.datetime` | f1636:m4 |
def dt_from_header(date_str): | convert_fns = (<EOL>dt_from_rfc8601,<EOL>dt_from_rfc1123,<EOL>)<EOL>for convert_fn in convert_fns:<EOL><INDENT>try:<EOL><INDENT>return convert_fn(date_str)<EOL><DEDENT>except ValueError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>return None<EOL> | Try various RFC conversions to ``datetime`` or return ``None``.
:param date_str: Date string.
:type date_str: ``string``
:return: Date time.
:rtype: :class:`datetime.datetime` or ``None`` | f1636:m5 |
def basename(path): | base_path = path.strip(SEP)<EOL>sep_ind = base_path.rfind(SEP)<EOL>if sep_ind < <NUM_LIT:0>:<EOL><INDENT>return path<EOL><DEDENT>return base_path[sep_ind + <NUM_LIT:1>:]<EOL> | Rightmost part of path after separator. | f1636:m6 |
def path_parts(path): | path = path if path is not None else '<STR_LIT>'<EOL>container_path = object_path = '<STR_LIT>'<EOL>parts = path_list(path)<EOL>if len(parts) >= <NUM_LIT:1>:<EOL><INDENT>container_path = parts[<NUM_LIT:0>]<EOL><DEDENT>if len(parts) > <NUM_LIT:1>:<EOL><INDENT>object_path = path_join(*parts[<NUM_LIT:1>:])<EOL><DEDENT>return container_path, object_path<EOL> | Split path into container, object.
:param path: Path to resource (including container).
:type path: `string`
:return: Container, storage object tuple.
:rtype: `tuple` of `string`, `string` | f1636:m7 |
def path_yield(path): | for part in (x for x in path.strip(SEP).split(SEP) if x not in (None, '<STR_LIT>')):<EOL><INDENT>yield part<EOL><DEDENT> | Yield on all path parts. | f1636:m8 |
def path_list(path): | return list(path_yield(path))<EOL> | Return list of path parts. | f1636:m9 |
def path_join(*args): | return SEP.join((x for x in args if x not in (None, '<STR_LIT>'))).strip(SEP)<EOL> | Join path parts to single path. | f1636:m10 |
def relpath(path, start): | path_items = path_list(path)<EOL>start_items = path_list(start)<EOL>common = []<EOL>for pth, stt in zip(path_items, start_items):<EOL><INDENT>if pth != stt:<EOL><INDENT>break<EOL><DEDENT>common.append(pth)<EOL><DEDENT>common_ind = len(common)<EOL>parent_num = len(start_items) - common_ind<EOL>rel_items = [PARENT] * parent_num + path_items[common_ind:]<EOL>return path_join(*rel_items)<EOL> | Get relative path to start.
Note: Modeled after python2.6 :meth:`os.path.relpath`. | f1636:m11 |
def __init__(self, **kwargs): | self.from_env = kwargs.get('<STR_LIT>', False)<EOL>self.default = kwargs.get('<STR_LIT:default>', None)<EOL>self.valid_set = kwargs.get('<STR_LIT>', None)<EOL> | Initializer.
:kwarg default: Override default for getting.
:type default: ``object``
:kwarg from_env: Allow variable from evironment.
:type from_env: ``bool``
:kwarg valid_set: Set of valid values for setting.
:type valid_set: ``set`` | f1639:c0:m0 |
def validate(self, name, value): | if self.valid_set and value not in self.valid_set:<EOL><INDENT>raise ImproperlyConfigured(<EOL>"<STR_LIT>" %<EOL>(name, value, "<STR_LIT:U+002CU+0020>".join("<STR_LIT>" % x for x in self.valid_set)))<EOL><DEDENT>return value<EOL> | Validate and return a value. | f1639:c0:m1 |
def env_clean(self, value): | return value<EOL> | Clean / convert environment variable to proper type. | f1639:c0:m2 |
def get(self, name, default=None): | default = default if default is not None else self.default<EOL>try:<EOL><INDENT>value = getattr(_settings, name)<EOL><DEDENT>except AttributeError:<EOL><INDENT>value = os.environ.get(name, default) if self.from_env else default<EOL>if value != default:<EOL><INDENT>value = self.env_clean(value)<EOL><DEDENT><DEDENT>return self.validate(name, value)<EOL> | Get value. | f1639:c0:m3 |
def env_clean(self, value): | return self.parse_bool(value)<EOL> | Clean / convert environment variable to proper type. | f1639:c1:m0 |
@classmethod<EOL><INDENT>def parse_bool(cls, value, default=None):<DEDENT> | if value is None:<EOL><INDENT>return default<EOL><DEDENT>elif isinstance(value, bool):<EOL><INDENT>return value<EOL><DEDENT>elif isinstance(value, str):<EOL><INDENT>if value == '<STR_LIT:True>':<EOL><INDENT>return True<EOL><DEDENT>elif value == '<STR_LIT:False>':<EOL><INDENT>return False<EOL><DEDENT><DEDENT>raise Exception("<STR_LIT>" % value)<EOL> | Convert ``string`` or ``bool`` to ``bool``. | f1639:c1:m1 |
def __init__(self): | self.__container_whitelist = None<EOL>self.__container_blacklist = None<EOL> | Initializer. | f1639:c2:m0 |
def __getattr__(self, name, default=None): | if name in self.SETTINGS:<EOL><INDENT>return self.SETTINGS[name].get(name, default)<EOL><DEDENT>return getattr(_settings, name, default)<EOL> | Get setting. | f1639:c2:m1 |
@property<EOL><INDENT>def _container_whitelist(self):<DEDENT> | if self.__container_whitelist is None:<EOL><INDENT>self.__container_whitelist =set(self.CLOUD_BROWSER_CONTAINER_WHITELIST or [])<EOL><DEDENT>return self.__container_whitelist<EOL> | Container whitelist. | f1639:c2:m2 |
@property<EOL><INDENT>def _container_blacklist(self):<DEDENT> | if self.__container_blacklist is None:<EOL><INDENT>self.__container_blacklist =set(self.CLOUD_BROWSER_CONTAINER_BLACKLIST or [])<EOL><DEDENT>return self.__container_blacklist<EOL> | Container blacklist. | f1639:c2:m3 |
def container_permitted(self, name): | white = self._container_whitelist<EOL>black = self._container_blacklist<EOL>return name not in black and (not white or name in white)<EOL> | Return whether or not a container is permitted.
:param name: Container name.
:return: ``True`` if container is permitted.
:rtype: ``bool`` | f1639:c2:m4 |
@property<EOL><INDENT>def app_media_url(self):<DEDENT> | url = None<EOL>media_dir = self.CLOUD_BROWSER_STATIC_MEDIA_DIR<EOL>if media_dir:<EOL><INDENT>url = os.path.join(self.MEDIA_URL, media_dir).rstrip('<STR_LIT:/>') + '<STR_LIT:/>'<EOL><DEDENT>return url<EOL> | Get application media root from real media root URL. | f1639:c2:m5 |
@property<EOL><INDENT>def app_media_doc_root(self): <DEDENT> | app_dir = os.path.abspath(os.path.dirname(__file__))<EOL>media_root = os.path.join(app_dir, '<STR_LIT>')<EOL>return media_root<EOL> | Get application media document (file) root. | f1639:c2:m6 |
def read_file(name): | cur_path = os.path.dirname(__file__)<EOL>exts = ('<STR_LIT>', '<STR_LIT>')<EOL>for ext in exts:<EOL><INDENT>path = os.path.join(cur_path, '<STR_LIT:.>'.join((name, ext)))<EOL>if os.path.exists(path):<EOL><INDENT>with open(path, '<STR_LIT:r>') as file_obj:<EOL><INDENT>return file_obj.read()<EOL><DEDENT><DEDENT><DEDENT>return '<STR_LIT>'<EOL> | Read file name (without extension) to string. | f1642:m0 |
def get_field_remote_field(field): | if django.VERSION < (<NUM_LIT:1>, <NUM_LIT:9>):<EOL><INDENT>return field.rel<EOL><DEDENT>else:<EOL><INDENT>return field.remote_field<EOL><DEDENT> | For Django 1.8/2.0 compatibility | f1645:m1 |
@contextmanager<EOL>def json_encoder_with_precision(precision, JSONEncoderClass): | needs_class_hack = not hasattr(json.encoder, '<STR_LIT>')<EOL>try:<EOL><INDENT>if precision is not None:<EOL><INDENT>def float_repr(o):<EOL><INDENT>return format(o, '<STR_LIT>' % precision)<EOL><DEDENT>if not needs_class_hack:<EOL><INDENT>original_float_repr = json.encoder.FLOAT_REPR<EOL>json.encoder.FLOAT_REPR = float_repr<EOL><DEDENT>else:<EOL><INDENT>class JSONEncoderClass(JSONEncoderClass):<EOL><INDENT>FLOAT_REPR = float.__repr__<EOL>def iterencode(self, o, _one_shot=False):<EOL><INDENT>"""<STR_LIT>"""<EOL>if self.check_circular:<EOL><INDENT>markers = {}<EOL><DEDENT>else:<EOL><INDENT>markers = None<EOL><DEDENT>if self.ensure_ascii:<EOL><INDENT>_encoder = json.encoder.encode_basestring_ascii<EOL><DEDENT>else:<EOL><INDENT>_encoder = json.encoder.encode_basestring<EOL><DEDENT>def floatstr(o, allow_nan=self.allow_nan, _repr=float_repr, _inf=json.encoder.INFINITY, _neginf=-json.encoder.INFINITY):<EOL><INDENT>if o != o:<EOL><INDENT>text = '<STR_LIT>'<EOL><DEDENT>elif o == _inf:<EOL><INDENT>text = '<STR_LIT>'<EOL><DEDENT>elif o == _neginf:<EOL><INDENT>text = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>return _repr(o)<EOL><DEDENT>if not allow_nan:<EOL><INDENT>raise ValueError(<EOL>"<STR_LIT>" +<EOL>repr(o))<EOL><DEDENT>return text<EOL><DEDENT>if (_one_shot and json.encoder.c_make_encoder is not None and self.indent is None):<EOL><INDENT>_iterencode = json.encoder.c_make_encoder(<EOL>markers, self.default, _encoder, self.indent,<EOL>self.key_separator, self.item_separator, self.sort_keys,<EOL>self.skipkeys, self.allow_nan)<EOL><DEDENT>else:<EOL><INDENT>_iterencode = json.encoder._make_iterencode(<EOL>markers, self.default, _encoder, self.indent, floatstr,<EOL>self.key_separator, self.item_separator, self.sort_keys,<EOL>self.skipkeys, _one_shot)<EOL><DEDENT>return _iterencode(o, <NUM_LIT:0>)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>yield JSONEncoderClass<EOL><DEDENT>finally:<EOL><INDENT>if precision is not None:<EOL><INDENT>if not needs_class_hack:<EOL><INDENT>json.encoder.FLOAT_REPR = original_float_repr<EOL><DEDENT><DEDENT><DEDENT> | Context manager to set float precision during json encoding | f1645:m2 |
def Deserializer(stream_or_string, **options): | geometry_field = options.get("<STR_LIT>", "<STR_LIT>")<EOL>def FeatureToPython(dictobj):<EOL><INDENT>properties = dictobj['<STR_LIT>']<EOL>model_name = options.get("<STR_LIT>") or properties.pop('<STR_LIT>')<EOL>model = _get_model(model_name)<EOL>field_names = [f.name for f in model._meta.fields]<EOL>fields = {}<EOL>for k, v in iteritems(properties):<EOL><INDENT>if k in field_names:<EOL><INDENT>fields[k] = v<EOL><DEDENT><DEDENT>obj = {<EOL>"<STR_LIT>": model_name,<EOL>"<STR_LIT>": dictobj.get('<STR_LIT:id>') or properties.get('<STR_LIT:id>'),<EOL>"<STR_LIT>": fields<EOL>}<EOL>if isinstance(model._meta.get_field(geometry_field), GeoJSONField):<EOL><INDENT>obj['<STR_LIT>'][geometry_field] = dictobj['<STR_LIT>']<EOL><DEDENT>else:<EOL><INDENT>shape = GEOSGeometry(json.dumps(dictobj['<STR_LIT>']))<EOL>obj['<STR_LIT>'][geometry_field] = shape.wkt<EOL><DEDENT>return obj<EOL><DEDENT>if isinstance(stream_or_string, string_types):<EOL><INDENT>stream = StringIO(stream_or_string)<EOL><DEDENT>else:<EOL><INDENT>stream = stream_or_string<EOL><DEDENT>try:<EOL><INDENT>collection = json.load(stream)<EOL>objects = [FeatureToPython(f) for f in collection['<STR_LIT>']]<EOL>for obj in PythonDeserializer(objects, **options):<EOL><INDENT>yield obj<EOL><DEDENT><DEDENT>except GeneratorExit:<EOL><INDENT>raise<EOL><DEDENT>except Exception as e:<EOL><INDENT>raise DeserializationError(repr(e))<EOL><DEDENT> | Deserialize a stream or string of JSON data. | f1645:m3 |
def get_all_related_objects(opts): | if django.VERSION < (<NUM_LIT:1>, <NUM_LIT:8>):<EOL><INDENT>return opts.get_all_related_objects()<EOL><DEDENT>else:<EOL><INDENT>return [r for r in opts.related_objects if not r.field.many_to_many]<EOL><DEDENT> | Django 1.8 changed meta api, see
https://docs.djangoproject.com/en/1.8/ref/models/meta/#migrating-old-meta-api
https://code.djangoproject.com/ticket/12663
https://github.com/django/django/pull/3848
Initially from Django REST Framework:
https://github.com/tomchristie/django-rest-framework/blob/3.3.2/rest_framework/compat.py
:param opts: Options instance
:return: list of relations except many-to-many ones | f1645:m4 |
def get_all_related_many_to_many_objects(opts): | if django.VERSION < (<NUM_LIT:1>, <NUM_LIT:8>):<EOL><INDENT>return opts.get_all_related_many_to_many_objects()<EOL><DEDENT>else:<EOL><INDENT>return [r for r in opts.related_objects if r.field.many_to_many]<EOL><DEDENT> | Django 1.8 changed meta api, see docstr in get_all_related_objects()
:param opts: Options instance
:return: list of many-to-many relations | f1645:m5 |
def _handle_geom(self, value): | if value is None:<EOL><INDENT>geometry = None<EOL><DEDENT>elif isinstance(value, dict) and '<STR_LIT:type>' in value:<EOL><INDENT>geometry = value<EOL><DEDENT>else:<EOL><INDENT>if isinstance(value, GEOSGeometry):<EOL><INDENT>geometry = value<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>geometry = GEOSGeometry(value)<EOL><DEDENT>except ValueError:<EOL><INDENT>error_msg = '<STR_LIT>' % (<EOL>self.geometry_field, value<EOL>)<EOL>raise SerializationError(error_msg)<EOL><DEDENT><DEDENT>if self.options.get('<STR_LIT>'):<EOL><INDENT>wkb_w = WKBWriter()<EOL>wkb_w.outdim = <NUM_LIT:2><EOL>geometry = GEOSGeometry(wkb_w.write(geometry), srid=geometry.srid)<EOL><DEDENT>simplify = self.options.get('<STR_LIT>')<EOL>if simplify is not None:<EOL><INDENT>geometry = geometry.simplify(tolerance=simplify, preserve_topology=True)<EOL><DEDENT>if geometry.srid and geometry.srid != self.srid:<EOL><INDENT>geometry.transform(self.srid)<EOL><DEDENT>if self.options.get('<STR_LIT>'):<EOL><INDENT>self._current['<STR_LIT>'] = geometry.extent<EOL><DEDENT><DEDENT>self._current['<STR_LIT>'] = geometry<EOL> | Geometry processing (in place), depending on options | f1645:c1:m5 |
def serialize(self, queryset, **options): | self.options = options<EOL>self.stream = options.get("<STR_LIT>", StringIO())<EOL>self.primary_key = options.get("<STR_LIT:primary_key>", None)<EOL>self.properties = options.get("<STR_LIT>")<EOL>self.geometry_field = options.get("<STR_LIT>", "<STR_LIT>")<EOL>self.use_natural_keys = options.get("<STR_LIT>", False)<EOL>self.bbox = options.get("<STR_LIT>", None)<EOL>self.bbox_auto = options.get("<STR_LIT>", None)<EOL>self.srid = options.get("<STR_LIT>", GEOJSON_DEFAULT_SRID)<EOL>self.crs = options.get("<STR_LIT>", True)<EOL>self.start_serialization()<EOL>if ValuesQuerySet is not None and isinstance(queryset, ValuesQuerySet):<EOL><INDENT>self.serialize_values_queryset(queryset)<EOL><DEDENT>elif isinstance(queryset, list):<EOL><INDENT>self.serialize_object_list(queryset)<EOL><DEDENT>elif isinstance(queryset, QuerySet):<EOL><INDENT>self.serialize_queryset(queryset)<EOL><DEDENT>self.end_serialization()<EOL>return self.getvalue()<EOL> | Serialize a queryset. | f1645:c1:m14 |
@register.filter<EOL>def geojsonfeature(source, params='<STR_LIT>'): | parse = re.search(r'<STR_LIT>', params)<EOL>if parse:<EOL><INDENT>parse = parse.groupdict()<EOL><DEDENT>else:<EOL><INDENT>parse = {}<EOL><DEDENT>geometry_field = parse.get('<STR_LIT>') or '<STR_LIT>'<EOL>properties = parse.get('<STR_LIT>', '<STR_LIT>').split('<STR_LIT:U+002C>')<EOL>srid = parse.get('<STR_LIT>') or GEOJSON_DEFAULT_SRID<EOL>if source is None or isinstance(source, string_types):<EOL><INDENT>return '<STR_LIT:null>'<EOL><DEDENT>if isinstance(source, (GEOSGeometry, GeometryField)):<EOL><INDENT>encoder = DjangoGeoJSONEncoder()<EOL>if source.srid != srid:<EOL><INDENT>source.transform(srid)<EOL><DEDENT>feature = {"<STR_LIT:type>": "<STR_LIT>", "<STR_LIT>": {}}<EOL>feature['<STR_LIT>'] = encoder.default(source)<EOL>return json.dumps(feature)<EOL><DEDENT>serializer = Serializer()<EOL>if not hasattr(source, '<STR_LIT>'):<EOL><INDENT>source = [source]<EOL><DEDENT>return serializer.serialize(source, properties=properties,<EOL>geometry_field=geometry_field, srid=srid)<EOL> | :params: A string with the following optional tokens:
"properties:field:srid" | f1646:m0 |
def render_to_response(self, context, **response_kwargs): | serializer = GeoJSONSerializer()<EOL>response = self.response_class(**response_kwargs)<EOL>queryset = self.get_queryset()<EOL>options = dict(properties=self.properties,<EOL>precision=self.precision,<EOL>simplify=self.simplify,<EOL>srid=self.srid,<EOL>geometry_field=self.geometry_field,<EOL>force2d=self.force2d,<EOL>bbox=self.bbox,<EOL>bbox_auto=self.bbox_auto,<EOL>use_natural_keys=self.use_natural_keys,<EOL>with_modelname=self.with_modelname)<EOL>serializer.serialize(queryset, stream=response, ensure_ascii=False,<EOL>**options)<EOL>return response<EOL> | Returns a JSON response, transforming 'context' to make the payload. | f1647:c0:m0 |
def tile_coord(self, xtile, ytile, zoom): | assert self.tile_srid == <NUM_LIT>, '<STR_LIT>'<EOL>n = <NUM_LIT> ** zoom<EOL>lon_deg = xtile / n * <NUM_LIT> - <NUM_LIT><EOL>lat_rad = math.atan(math.sinh(math.pi * (<NUM_LIT:1> - <NUM_LIT:2> * ytile / n)))<EOL>lat_deg = math.degrees(lat_rad)<EOL>return (lon_deg, lat_deg)<EOL> | This returns the NW-corner of the square. Use the function
with xtile+1 and/or ytile+1 to get the other corners.
With xtile+0.5 & ytile+0.5 it will return the center of the tile.
http://wiki.openstreetmap.org/wiki/Slippy_map_tilenames#Tile_numbers_to_lon..2Flat._2 | f1647:c2:m0 |
def get_queryset(self): | self.z, self.x, self.y = self._parse_args()<EOL>nw = self.tile_coord(self.x, self.y, self.z)<EOL>se = self.tile_coord(self.x + <NUM_LIT:1>, self.y + <NUM_LIT:1>, self.z)<EOL>bbox = Polygon((nw, (se[<NUM_LIT:0>], nw[<NUM_LIT:1>]),<EOL>se, (nw[<NUM_LIT:0>], se[<NUM_LIT:1>]), nw))<EOL>qs = super(TiledGeoJSONLayerView, self).get_queryset()<EOL>qs = qs.filter(**{<EOL>'<STR_LIT>' % self.geometry_field: bbox<EOL>})<EOL>self.bbox = bbox.extent<EOL>simplifications = self.simplifications or {}<EOL>z = self.z<EOL>self.simplify = simplifications.get(z)<EOL>while self.simplify is None and z < <NUM_LIT:32>:<EOL><INDENT>z += <NUM_LIT:1><EOL>self.simplify = simplifications.get(z)<EOL><DEDENT>model_field = qs.model._meta.get_field(self.geometry_field)<EOL>self.trim_to_boundary = (self.trim_to_boundary and<EOL>not isinstance(model_field, PointField) and<EOL>Intersection is not None)<EOL>if self.trim_to_boundary:<EOL><INDENT>if django.VERSION < (<NUM_LIT:1>, <NUM_LIT:9>):<EOL><INDENT>qs = qs.intersection(bbox)<EOL><DEDENT>else:<EOL><INDENT>qs = qs.annotate(intersection=Intersection(self.geometry_field, bbox))<EOL><DEDENT>self.geometry_field = '<STR_LIT>'<EOL><DEDENT>return qs<EOL> | Inspired by Glen Roberton's django-geojson-tiles view | f1647:c2:m2 |
def convert(self, text): | url = self.API_URL<EOL>encoding = self.ENCODING<EOL>headers = self.HEADERS<EOL>data = urlencode({<EOL>'<STR_LIT>': str(text)<EOL>}).encode(encoding)<EOL>request = Request(url=url,data=data,headers=headers)<EOL>response = urlopen(request)<EOL>result = response.read()<EOL>response_encoding = response.headers['<STR_LIT:Content-Type>']<EOL>response_encoding = response_encoding[response_encoding.find('<STR_LIT:=>')+<NUM_LIT:1>:]<EOL>result = result.decode(response_encoding)<EOL>result = result.replace('<STR_LIT>','<STR_LIT>')[:-<NUM_LIT:1>]<EOL>return result<EOL> | convert Finglish(or whatever you'd like to call) to Persian.
gets and returns string. | f1654:c0:m0 |
def get_url(city: Optional[str]) -> str: | if not city:<EOL><INDENT>city = '<STR_LIT>'<EOL><DEDENT>return "<STR_LIT>""<STR_LIT>""<STR_LIT>""<STR_LIT>" + city + "<STR_LIT>""<STR_LIT>""<STR_LIT>"<EOL> | Get the URL for the Yahoo weather API for a
given city | f1664:m0 |
def get_data(city: Optional[str]) -> Dict[str, Any]: | req = urllib.request.Request(get_url(city))<EOL>with urllib.request.urlopen(req) as f:<EOL><INDENT>response = f.read()<EOL><DEDENT>answer = response.decode('<STR_LIT:ascii>')<EOL>data = json.loads(answer)<EOL>r = data['<STR_LIT>']['<STR_LIT>']['<STR_LIT>'] <EOL>return r<EOL> | Use the Yahoo weather API to get weather information | f1664:m1 |
def _forwardrefload(l: Loader, value: Any, type_: type) -> Any: | if l.frefs is None:<EOL><INDENT>raise TypedloadException('<STR_LIT>', value=value, type_=type_)<EOL><DEDENT>tname = type_.__forward_arg__ <EOL>t = l.frefs.get(tname)<EOL>if t is None:<EOL><INDENT>raise TypedloadValueError(<EOL>"<STR_LIT>" % tname,<EOL>value=value,<EOL>type_=type_<EOL>)<EOL><DEDENT>return l.load(value, t, annotation=Annotation(AnnotationType.FORWARDREF, tname))<EOL> | This resolves a ForwardRef.
It just looks up the type in the dictionary of known types
and loads the value using that. | f1665:m0 |
def _basicload(l: Loader, value: Any, type_: type) -> Any: | if type(value) != type_:<EOL><INDENT>if l.basiccast:<EOL><INDENT>try:<EOL><INDENT>return type_(value)<EOL><DEDENT>except ValueError as e:<EOL><INDENT>raise TypedloadValueError(str(e), value=value, type_=type_)<EOL><DEDENT>except TypeError as e:<EOL><INDENT>raise TypedloadTypeError(str(e), value=value, type_=type_)<EOL><DEDENT>except Exception as e:<EOL><INDENT>raise TypedloadException(str(e), value=value, type_=type_)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise TypedloadValueError('<STR_LIT>' % type_, value=value, type_=type_)<EOL><DEDENT><DEDENT>return value<EOL> | This converts a value into a basic type.
In theory it does nothing, but it performs type checking
and raises if conditions fail.
It also attempts casting, if enabled. | f1665:m1 |
def _listload(l: Loader, value, type_) -> List: | t = type_.__args__[<NUM_LIT:0>]<EOL>try:<EOL><INDENT>return [l.load(v, t, annotation=Annotation(AnnotationType.INDEX, i)) for i, v in enumerate(value)]<EOL><DEDENT>except TypeError as e:<EOL><INDENT>if isinstance(e, TypedloadException):<EOL><INDENT>raise<EOL><DEDENT>raise TypedloadTypeError(str(e), value=value, type_=type_)<EOL><DEDENT> | This loads into something like List[int] | f1665:m2 |
def _dictload(l: Loader, value, type_) -> Dict: | key_type, value_type = type_.__args__<EOL>try:<EOL><INDENT>return {<EOL>l.load(k, key_type, annotation=Annotation(AnnotationType.KEY, k)): l.load(v, value_type, annotation=Annotation(AnnotationType.VALUE, v))<EOL>for k, v in value.items()}<EOL><DEDENT>except AttributeError as e:<EOL><INDENT>raise TypedloadAttributeError(str(e), type_=type_, value=value)<EOL><DEDENT> | This loads into something like Dict[str,str]
Recursively loads both keys and values. | f1665:m3 |
def _setload(l: Loader, value, type_) -> Set: | t = type_.__args__[<NUM_LIT:0>]<EOL>return {l.load(i, t) for i in value}<EOL> | This loads into something like Set[int] | f1665:m4 |
def _frozensetload(l: Loader, value, type_) -> FrozenSet: | t = type_.__args__[<NUM_LIT:0>]<EOL>return frozenset(l.load(i, t) for i in value)<EOL> | This loads into something like FrozenSet[int] | f1665:m5 |
def _tupleload(l: Loader, value, type_) -> Tuple: | if HAS_TUPLEARGS:<EOL><INDENT>args = type_.__args__<EOL><DEDENT>else:<EOL><INDENT>args = type_.__tuple_params__<EOL><DEDENT>if len(args) == <NUM_LIT:2> and args[<NUM_LIT:1>] == ...: <EOL><INDENT>return tuple(l.load(i, args[<NUM_LIT:0>]) for i in value)<EOL><DEDENT>else: <EOL><INDENT>if l.failonextra and len(value) > len(args):<EOL><INDENT>raise TypedloadValueError('<STR_LIT>' % type_, value=value, type_=type_)<EOL><DEDENT>elif len(value) < len(args):<EOL><INDENT>raise TypedloadValueError('<STR_LIT>' % type_, value=value, type_=type_)<EOL><DEDENT>return tuple(l.load(v, t, annotation=Annotation(AnnotationType.INDEX, i)) for i, (v, t) in enumerate(zip(value, args)))<EOL><DEDENT> | This loads into something like Tuple[int,str] | f1665:m6 |
def _namedtupleload(l: Loader, value: Dict[str, Any], type_) -> Tuple: | if not hasattr(type_, '<STR_LIT>'):<EOL><INDENT>fields = set(type_._fields)<EOL>optional_fields = set(getattr(type_, '<STR_LIT>', {}).keys())<EOL>type_hints = type_._field_types<EOL><DEDENT>else:<EOL><INDENT>import dataclasses<EOL>fields = set(type_.__dataclass_fields__.keys())<EOL>optional_fields = {k for k,v in type_.__dataclass_fields__.items() if not (isinstance(getattr(v, '<STR_LIT:default>', dataclasses._MISSING_TYPE()), dataclasses._MISSING_TYPE) and isinstance(getattr(v, '<STR_LIT>', dataclasses._MISSING_TYPE()), dataclasses._MISSING_TYPE))}<EOL>type_hints = {k: v.type for k,v in type_.__dataclass_fields__.items()}<EOL>transforms = [] <EOL>for field in fields:<EOL><INDENT>if type_.__dataclass_fields__[field].metadata:<EOL><INDENT>name = type_.__dataclass_fields__[field].metadata.get('<STR_LIT:name>')<EOL>if name:<EOL><INDENT>transforms.append((field, name))<EOL><DEDENT><DEDENT><DEDENT>if transforms:<EOL><INDENT>value = value.copy()<EOL>for pyname, dataname in transforms:<EOL><INDENT>if dataname in value:<EOL><INDENT>tmp = value[dataname]<EOL>del value[dataname]<EOL>value[pyname] = tmp<EOL><DEDENT><DEDENT><DEDENT><DEDENT>necessary_fields = fields.difference(optional_fields)<EOL>try:<EOL><INDENT>vfields = set(value.keys())<EOL><DEDENT>except AttributeError as e:<EOL><INDENT>raise TypedloadAttributeError(str(e), value=value, type_=type_)<EOL><DEDENT>if necessary_fields.intersection(vfields) != necessary_fields:<EOL><INDENT>raise TypedloadValueError(<EOL>'<STR_LIT>' % (<EOL>necessary_fields.difference(vfields),<EOL>type_<EOL>),<EOL>value=value,<EOL>type_=type_,<EOL>)<EOL><DEDENT>fieldsdiff = vfields.difference(fields)<EOL>if l.failonextra and len(fieldsdiff):<EOL><INDENT>extra = '<STR_LIT:U+002CU+0020>'.join(fieldsdiff)<EOL>raise TypedloadValueError(<EOL>'<STR_LIT>' % (extra, type_),<EOL>value=value,<EOL>type_=type_,<EOL>)<EOL><DEDENT>params = {}<EOL>for k, v in value.items():<EOL><INDENT>if k not in fields:<EOL><INDENT>continue<EOL><DEDENT>params[k] = l.load(<EOL>v,<EOL>type_hints[k],<EOL>annotation=Annotation(AnnotationType.FIELD, k),<EOL>)<EOL><DEDENT>return type_(**params)<EOL> | This loads a Dict[str, Any] into a NamedTuple. | f1665:m7 |
def _unionload(l: Loader, value, type_) -> Any: | try:<EOL><INDENT>args = uniontypes(type_)<EOL><DEDENT>except AttributeError:<EOL><INDENT>raise TypedloadAttributeError('<STR_LIT>')<EOL><DEDENT>if type(value) in args.intersection(l.basictypes):<EOL><INDENT>return value<EOL><DEDENT>exceptions = []<EOL>for t in args:<EOL><INDENT>try:<EOL><INDENT>return l.load(value, t, annotation=Annotation(AnnotationType.UNION, t))<EOL><DEDENT>except Exception as e:<EOL><INDENT>exceptions.append(e)<EOL><DEDENT><DEDENT>raise TypedloadValueError(<EOL>'<STR_LIT>' % type_,<EOL>value=value,<EOL>type_=type_,<EOL>exceptions=exceptions<EOL>)<EOL> | Loads a value into a union.
Basically this iterates all the types inside the
union, until one that doesn't raise an exception
is found.
If no suitable type is found, an exception is raised. | f1665:m8 |
def _enumload(l: Loader, value, type_) -> Enum: | try:<EOL><INDENT>return type_(value)<EOL><DEDENT>except:<EOL><INDENT>pass<EOL><DEDENT>for _, t in get_type_hints(type_).items():<EOL><INDENT>try:<EOL><INDENT>return type_(l.load(value, t))<EOL><DEDENT>except:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>raise TypedloadValueError(<EOL>'<STR_LIT>' % type_,<EOL>value=value,<EOL>type_=type_<EOL>)<EOL> | This loads something into an Enum.
It tries with basic types first.
If that fails, it tries to look for type annotations inside the
Enum, and tries to use those to load the value into something
that is compatible with the Enum.
Of course if that fails too, a ValueError is raised. | f1665:m9 |
def _noneload(l: Loader, value, type_) -> None: | if value is None:<EOL><INDENT>return None<EOL><DEDENT>raise TypedloadValueError('<STR_LIT>', value=value, type_=type_)<EOL> | Loads a value that can only be None,
so it fails if it isn't | f1665:m10 |
def index(self, type_: Type[T]) -> int: | for i, cond in ((q[<NUM_LIT:0>], q[<NUM_LIT:1>][<NUM_LIT:0>]) for q in enumerate(self.handlers)):<EOL><INDENT>try:<EOL><INDENT>match = cond(type_)<EOL><DEDENT>except:<EOL><INDENT>if self.raiseconditionerrors:<EOL><INDENT>raise<EOL><DEDENT>match = False<EOL><DEDENT>if match:<EOL><INDENT>return i<EOL><DEDENT><DEDENT>raise ValueError('<STR_LIT>')<EOL> | Returns the index in the handlers list
that matches the given type.
If no condition matches, ValueError is raised. | f1665:c0:m1 |
def load(self, value: Any, type_: Type[T], *, annotation: Optional[Annotation] = None) -> T: | try:<EOL><INDENT>index = self.index(type_)<EOL><DEDENT>except ValueError:<EOL><INDENT>raise TypedloadTypeError(<EOL>'<STR_LIT>' % type_,<EOL>value=value,<EOL>type_=type_<EOL>)<EOL><DEDENT>if self.frefs is not None and hasattr(type_, '<STR_LIT>'):<EOL><INDENT>tname = type_.__name__<EOL>if tname not in self.frefs:<EOL><INDENT>self.frefs[tname] = type_<EOL><DEDENT><DEDENT>func = self.handlers[index][<NUM_LIT:1>]<EOL>try:<EOL><INDENT>return func(self, value, type_)<EOL><DEDENT>except Exception as e:<EOL><INDENT>assert isinstance(e, TypedloadException)<EOL>e.trace.insert(<NUM_LIT:0>, TraceItem(value, type_, annotation))<EOL>raise e<EOL><DEDENT> | Loads value into the typed data structure.
TypeError is raised if there is no known way to treat type_,
otherwise all errors raise a ValueError. | f1665:c0:m2 |
def add2dumper(l) -> None: | l.handlers.append((_condition, _attrdump))<EOL> | Adds the "attr" handler to an existing dumper | f1666:m2 |
def add2loader(l) -> None: | l.handlers.append((is_attrs, _attrload))<EOL> | Adds the "attr" handler to an existing loader | f1667:m1 |
def load(value: Any, type_: Type[T], **kwargs) -> T: | from . import dataloader<EOL>loader = dataloader.Loader(**kwargs)<EOL>return loader.load(value, type_)<EOL> | Quick function call to load data into a type.
It is useful to avoid creating the Loader object,
in case only the default parameters are used. | f1668:m0 |
def dump(value: Any, **kwargs) -> Any: | from . import datadumper<EOL>dumper = datadumper.Dumper(**kwargs)<EOL>return dumper.dump(value)<EOL> | Quick function to dump a data structure into
something that is compatible with json or
other programs and languages.
It is useful to avoid creating the Dumper object,
in case only the default parameters are used. | f1668:m1 |
def attrload(value: Any, type_: Type[T], **kwargs) -> T: | from . import dataloader<EOL>from .plugins import attrload as loadplugin<EOL>loader = dataloader.Loader(**kwargs)<EOL>loadplugin.add2loader(loader)<EOL>return loader.load(value, type_)<EOL> | Quick function call to load data supporting the "attr" module
in addition to the default ones. | f1668:m2 |
def attrdump(value: Any, **kwargs) -> Any: | from . import datadumper<EOL>from .plugins import attrdump as dumpplugin<EOL>dumper = datadumper.Dumper(**kwargs)<EOL>dumpplugin.add2dumper(dumper)<EOL>return dumper.dump(value)<EOL> | Quick function to do a dump that supports the "attr"
module. | f1668:m3 |
def _issubclass(t1, t2) -> bool: | try:<EOL><INDENT>return issubclass(t1, t2)<EOL><DEDENT>except TypeError:<EOL><INDENT>return False<EOL><DEDENT> | Wrapper around _issubclass to circumvent python 3.7 changing API | f1669:m0 |
def is_tuple(type_: Type[Any]) -> bool: | if HAS_TUPLEARGS:<EOL><INDENT>return _generic_type_check(type_, tuple, Tuple)<EOL><DEDENT>else:<EOL><INDENT>return _issubclass(type_, Tuple) and _issubclass(type_, tuple) == False<EOL><DEDENT> | Tuple[int, str]
Tuple | f1669:m1 |
def is_union(type_: Type[Any]) -> bool: | if HAS_UNIONSUBCLASS:<EOL><INDENT>return _issubclass(type_, Union)<EOL><DEDENT>else:<EOL><INDENT>return getattr(type_, '<STR_LIT>', None) == Union<EOL><DEDENT> | Union[A, B]
Union
Optional[A] | f1669:m2 |
def is_nonetype(type_: Type[Any]) -> bool: | return type_ == NONETYPE<EOL> | type_ == type(None) | f1669:m3 |
def is_list(type_: Type[Any]) -> bool: | return _generic_type_check(type_, list, List)<EOL> | List[A]
List | f1669:m5 |
def is_dict(type_: Type[Any]) -> bool: | return _generic_type_check(type_, dict, Dict)<EOL> | Dict[A, B]
Dict | f1669:m6 |
def is_set(type_: Type[Any]) -> bool: | return _generic_type_check(type_, set, Set)<EOL> | Set[A]
Set | f1669:m7 |
def is_frozenset(type_: Type[Any]) -> bool: | return _generic_type_check(type_, frozenset, FrozenSet)<EOL> | FrozenSet[A]
FrozenSet | f1669:m8 |
def is_enum(type_: Type[Any]) -> bool: | return _issubclass(type_, Enum)<EOL> | Check if the class is a subclass of Enum | f1669:m9 |
def is_namedtuple(type_: Type[Any]) -> bool: | return _issubclass(type_, tuple) and hasattr(type_, '<STR_LIT>') and hasattr(type_, '<STR_LIT>')<EOL> | Generated with typing.NamedTuple | f1669:m10 |
def is_dataclass(type_: Type[Any]) -> bool: | return hasattr(type_, '<STR_LIT>')<EOL> | dataclass (Introduced in Python3.7 | f1669:m11 |
def is_forwardref(type_: Type[Any]) -> bool: | return type(type_) == ForwardRef<EOL> | Check if it's a ForwardRef.
They are unresolved types passed as strings, supposed to
be resolved into types at a later moment | f1669:m12 |
def is_attrs(type_: Type[Any]) -> bool: | return hasattr(type_, '<STR_LIT>')<EOL> | Check if the type is obtained with an
@attr.s decorator | f1669:m13 |
def uniontypes(type_: Type[Any]) -> Set[Type[Any]]: | if not is_union(type_):<EOL><INDENT>raise ValueError('<STR_LIT>' + str(type_))<EOL><DEDENT>if hasattr(type_, '<STR_LIT>'):<EOL><INDENT>return set(type_.__args__)<EOL><DEDENT>elif hasattr(type_, '<STR_LIT>'):<EOL><INDENT>return set(type_.__union_params__)<EOL><DEDENT>raise AttributeError('<STR_LIT>')<EOL> | Returns the types of a Union.
Raises ValueError if the argument is not a Union
and AttributeError when running on an unsupported
Python version. | f1669:m14 |
def __init__(self, **kwargs): | self.basictypes = {int, bool, float, str, NONETYPE}<EOL>self.hidedefault = True<EOL>self.raiseconditionerrors = True<EOL>self.handlers = [<EOL>(lambda value: type(value) in self.basictypes, lambda l, value: value),<EOL>(lambda value: isinstance(value, tuple) and hasattr(value, '<STR_LIT>') and hasattr(value, '<STR_LIT>'), _namedtupledump),<EOL>(lambda value: '<STR_LIT>' in dir(value), _dataclassdump),<EOL>(lambda value: isinstance(value, (list, tuple, set, frozenset)), lambda l, value: [l.dump(i) for i in value]),<EOL>(lambda value: isinstance(value, Enum), lambda l, value: l.dump(value.value)),<EOL>(lambda value: isinstance(value, Dict), lambda l, value: {l.dump(k): l.dump(v) for k, v in value.items()}),<EOL>(lambda value: isinstance(value, (datetime.date, datetime.time)), _datetimedump),<EOL>] <EOL>for k, v in kwargs.items():<EOL><INDENT>setattr(self, k, v)<EOL><DEDENT> | This dumps data structures recursively using only
basic types, lists and dictionaries.
A value dumped in this way from a typed data structure
can be loaded back using dataloader.
hidedefault: Enabled by default.
When enabled, does not include fields that have the
same value as the default in the dump.
raiseconditionerrors: Enabled by default.
Raises exceptions when evaluating a condition from an
handler. When disabled, the exceptions are not raised
and the condition is considered False.
handlers: This is the list that the dumper uses to
perform its task.
The type is:
List[
Tuple[
Callable[[Any], bool],
Callable[['Dumper', Any], Any]
]
]
The elements are: Tuple[Condition, Dumper]
Condition(value) -> Bool
Dumper(dumper, value) -> simpler_value
In most cases, it is sufficient to append new elements
at the end, to handle more types.
These parameters can be set as named arguments in the constructor
or they can be set later on.
The constructor will accept any named argument, but only the documented
ones have any effect. This is to allow custom handlers to have their
own parameters as well.
There is support for:
* Basic python types (int, str, bool, float, NoneType)
* NamedTuple
* Enum
* List[SomeType]
* Dict[TypeA, TypeB]
* Tuple[TypeA, TypeB, TypeC]
* Set[SomeType] | f1670:c0:m0 |
def index(self, value: Any) -> int: | for i, cond in ((j[<NUM_LIT:0>], j[<NUM_LIT:1>][<NUM_LIT:0>]) for j in enumerate(self.handlers)):<EOL><INDENT>try:<EOL><INDENT>match = cond(value)<EOL><DEDENT>except:<EOL><INDENT>if self.raiseconditionerrors:<EOL><INDENT>raise<EOL><DEDENT>match = False<EOL><DEDENT>if match:<EOL><INDENT>return i<EOL><DEDENT><DEDENT>raise TypedloadValueError('<STR_LIT>' % value, value=value)<EOL> | Returns the index in the handlers list
that matches the given value.
If no condition matches, ValueError is raised. | f1670:c0:m1 |
def dump(self, value: Any) -> Any: | index = self.index(value)<EOL>func = self.handlers[index][<NUM_LIT:1>]<EOL>return func(self, value)<EOL> | Dump the typed data structure into its
untyped equivalent. | f1670:c0:m2 |
def serialize_compact(jwt): | return '<STR_LIT:.>'.join(jwt)<EOL> | Compact serialization of a :class:`~jose.JWE` or :class:`~jose.JWS`
:rtype: str
:returns: A string, representing the compact serialization of a
:class:`~jose.JWE` or :class:`~jose.JWS`. | f1673:m0 |
def deserialize_compact(jwt): | parts = jwt.split('<STR_LIT:.>')<EOL>if len(parts) == <NUM_LIT:3>:<EOL><INDENT>token_type = JWS<EOL><DEDENT>elif len(parts) == <NUM_LIT:5>:<EOL><INDENT>token_type = JWE<EOL><DEDENT>else:<EOL><INDENT>raise Error('<STR_LIT>')<EOL><DEDENT>return token_type(*parts)<EOL> | Deserialization of a compact representation of a :class:`~jwt.JWE`
:param jwt: The serialized JWT to deserialize.
:rtype: :class:`~jose.JWT`.
:raises: :class:`~jose.Error` if the JWT is malformed | f1673:m1 |
def encrypt(claims, jwk, adata='<STR_LIT>', add_header=None, alg='<STR_LIT>',<EOL>enc='<STR_LIT>', rng=get_random_bytes, compression=None): | <EOL>claims = deepcopy(claims)<EOL>assert _TEMP_VER_KEY not in claims<EOL>claims[_TEMP_VER_KEY] = _TEMP_VER<EOL>header = dict(list((add_header or {}).items()) + [<EOL>(HEADER_ENC, enc), (HEADER_ALG, alg)])<EOL>assert _TEMP_VER_KEY not in header<EOL>header[_TEMP_VER_KEY] = claims[_TEMP_VER_KEY]<EOL>plaintext = json_encode(claims)<EOL>if compression is not None:<EOL><INDENT>header[HEADER_ZIP] = compression<EOL>try:<EOL><INDENT>(compress, _) = COMPRESSION[compression]<EOL><DEDENT>except KeyError:<EOL><INDENT>raise Error(<EOL>'<STR_LIT>'.format(compression))<EOL><DEDENT>plaintext = compress(plaintext)<EOL><DEDENT>((cipher, _), key_size), ((hash_fn, _), hash_mod) = JWA[enc]<EOL>iv = rng(AES.block_size)<EOL>encryption_key = rng(hash_mod.digest_size)<EOL>ciphertext = cipher(plaintext, encryption_key[-hash_mod.digest_size/<NUM_LIT:2>:], iv)<EOL>hash = hash_fn(_jwe_hash_str(ciphertext, iv, adata),<EOL>encryption_key[:-hash_mod.digest_size/<NUM_LIT:2>], hash_mod)<EOL>(cipher, _), _ = JWA[alg]<EOL>encryption_key_ciphertext = cipher(encryption_key, jwk)<EOL>return JWE(*list(map(b64encode_url,<EOL>(json_encode(header),<EOL>encryption_key_ciphertext,<EOL>iv,<EOL>ciphertext,<EOL>auth_tag(hash)))))<EOL> | Encrypts the given claims and produces a :class:`~jose.JWE`
:param claims: A `dict` representing the claims for this
:class:`~jose.JWE`.
:param jwk: A `dict` representing the JWK to be used for encryption of
the CEK. This parameter is algorithm-specific.
:param adata: Arbitrary string data to add to the authentication
(i.e. HMAC). The same data must be provided during
decryption.
:param add_header: Additional items to be added to the header. Additional
headers *will* be authenticated.
:param alg: The algorithm to use for CEK encryption
:param enc: The algorithm to use for claims encryption
:param rng: Random number generator. A string of random bytes is expected
as output.
:param compression: The compression algorithm to use. Currently supports
`'DEF'`.
:rtype: :class:`~jose.JWE`
:raises: :class:`~jose.Error` if there is an error producing the JWE | f1673:m10 |
def spec_compliant_encrypt(claims, jwk, add_header=None, alg='<STR_LIT>',<EOL>enc='<STR_LIT>', rng=get_random_bytes): | <EOL>header = dict(list((add_header or {}).items()) + [(HEADER_ENC, enc),<EOL>(HEADER_ALG, alg)])<EOL>protected_header = json_encode(header)<EOL>mac_key, enc_key = _generate_encryption_keys(enc, rng)<EOL>encrypted_key = _encrypt_key(mac_key + enc_key, jwk, alg)<EOL>iv = _generate_iv(enc, rng)<EOL>plaintext = json_encode(claims)<EOL>if HEADER_ZIP in header:<EOL><INDENT>try:<EOL><INDENT>(compression_func, _) = COMPRESSION[header[HEADER_ZIP]]<EOL><DEDENT>except KeyError:<EOL><INDENT>raise Error(<EOL>'<STR_LIT>'.format(header[HEADER_ZIP]))<EOL><DEDENT>M = compression_func(plaintext)<EOL><DEDENT>else:<EOL><INDENT>M = plaintext<EOL><DEDENT>((cipher, _), key_len), _ = JWA[enc]<EOL>ciphertext = cipher(M, enc_key, iv)<EOL>authentication_tag = _generate_authentication_tag(<EOL>mac_key, protected_header, ciphertext, iv, enc<EOL>)<EOL>return JWE(<EOL>*list(map(<EOL>b64encode_url,<EOL>(protected_header, encrypted_key, iv, ciphertext,<EOL>authentication_tag)<EOL>))<EOL>)<EOL> | Encrypts the given claims and produces a :class:`~jose.JWE`
:param claims: A `dict` representing the claims for this
:class:`~jose.JWE`.
:param jwk: A `dict` representing the JWK to be used for encryption of
the CEK. This parameter is algorithm-specific.
:param add_header: Additional items to be added to the header. Additional
headers *will* be authenticated.
:param alg: The algorithm to use for CEK encryption
:param enc: The algorithm to use for claims encryption
:param rng: Random number generator. A string of random bytes is expected
as output.
: param compression: The compression algorithm to use. Currently supports
`'DEF'`.
:rtype: :class:`~jose.JWE`
:raises: :class:`~jose.Error` if there is an error producing the JWE | f1673:m11 |
def legacy_decrypt(jwe, jwk, adata='<STR_LIT>', validate_claims=True,<EOL>expiry_seconds=None): | protected_header, encrypted_key, iv, ciphertext, authentication_tag = list(map(<EOL>b64decode_url, jwe))<EOL>header = json_decode(protected_header)<EOL>alg = header[HEADER_ALG]<EOL>enc = header[HEADER_ENC]<EOL>encryption_key = _decrypt_key(encrypted_key, jwk, alg)<EOL>((_, decipher), _), ((hash_fn, _), mod) = JWA[enc]<EOL>version = header.get(_TEMP_VER_KEY)<EOL>if version:<EOL><INDENT>plaintext = decipher(ciphertext, encryption_key[-mod.digest_size/<NUM_LIT:2>:],<EOL>iv)<EOL>hash = hash_fn(_jwe_hash_str(ciphertext, iv, adata, version),<EOL>encryption_key[:-mod.digest_size/<NUM_LIT:2>], mod=mod)<EOL><DEDENT>else:<EOL><INDENT>plaintext = decipher(ciphertext, encryption_key[:-mod.digest_size], iv)<EOL>hash = hash_fn(_jwe_hash_str(ciphertext, iv, adata, version),<EOL>encryption_key[-mod.digest_size:], mod=mod)<EOL><DEDENT>if not const_compare(auth_tag(hash), authentication_tag):<EOL><INDENT>raise Error('<STR_LIT>')<EOL><DEDENT>if HEADER_ZIP in header:<EOL><INDENT>try:<EOL><INDENT>(_, decompress) = COMPRESSION[header[HEADER_ZIP]]<EOL><DEDENT>except KeyError:<EOL><INDENT>raise Error('<STR_LIT>'.format(<EOL>header[HEADER_ZIP]))<EOL><DEDENT>plaintext = decompress(plaintext)<EOL><DEDENT>claims = json_decode(plaintext)<EOL>try:<EOL><INDENT>del claims[_TEMP_VER_KEY]<EOL><DEDENT>except KeyError:<EOL><INDENT>pass<EOL><DEDENT>_validate(claims, validate_claims, expiry_seconds)<EOL>return JWT(header, claims)<EOL> | Decrypts a deserialized :class:`~jose.JWE`
:param jwe: An instance of :class:`~jose.JWE`
:param jwk: A `dict` representing the JWK required to decrypt the content
of the :class:`~jose.JWE`.
:param adata: Arbitrary string data used during encryption for additional
authentication.
:param validate_claims: A `bool` indicating whether or not the `exp`, `iat`
and `nbf` claims should be validated. Defaults to
`True`.
:param expiry_seconds: An `int` containing the JWT expiry in seconds, used
when evaluating the `iat` claim. Defaults to `None`,
which disables `iat` claim validation.
:rtype: :class:`~jose.JWT`
:raises: :class:`~jose.Expired` if the JWT has expired
:raises: :class:`~jose.NotYetValid` if the JWT is not yet valid
:raises: :class:`~jose.Error` if there is an error decrypting the JWE | f1673:m12 |
def spec_compliant_decrypt(jwe, jwk, validate_claims=True,<EOL>expiry_seconds=None): | protected_header, encrypted_key, iv, ciphertext, authentication_tag = list(map(<EOL>b64decode_url, jwe<EOL>))<EOL>header = json_decode(protected_header)<EOL>if not _verify_header(header):<EOL><INDENT>raise Error('<STR_LIT>')<EOL><DEDENT>alg = header[HEADER_ALG]<EOL>enc = header[HEADER_ENC]<EOL>encryption_key = _decrypt_key(encrypted_key, jwk, alg)<EOL>mac_key, enc_key = _parse_encryption_keys(encryption_key, enc)<EOL>expected_tag = _generate_authentication_tag(<EOL>mac_key, json_encode(header), ciphertext, iv, enc<EOL>)<EOL>if not const_compare(expected_tag, authentication_tag):<EOL><INDENT>raise Error('<STR_LIT>')<EOL><DEDENT>((_, decipher), _), _ = JWA[enc]<EOL>M = decipher(ciphertext, enc_key, iv)<EOL>if HEADER_ZIP in header:<EOL><INDENT>try:<EOL><INDENT>(_, decompress) = COMPRESSION[header[HEADER_ZIP]]<EOL><DEDENT>except KeyError:<EOL><INDENT>raise Error('<STR_LIT>'.format(<EOL>header[HEADER_ZIP]))<EOL><DEDENT>plaintext = decompress(M)<EOL><DEDENT>else:<EOL><INDENT>plaintext = M<EOL><DEDENT>claims = json_decode(plaintext)<EOL>_validate(claims, validate_claims, expiry_seconds)<EOL>return JWT(header, claims)<EOL> | Decrypts a deserialized :class:`~jose.JWE`
:param jwe: An instance of :class:`~jose.JWE`
:param jwk: A `dict` representing the JWK required to decrypt the content
of the :class:`~jose.JWE`.
:param validate_claims: A `bool` indicating whether or not the `exp`, `iat`
and `nbf` claims should be validated. Defaults to
`True`.
:param expiry_seconds: An `int` containing the JWT expiry in seconds, used
when evaluating the `iat` claim. Defaults to `None`,
which disables `iat` claim validation.
:rtype: :class:`~jose.JWT`
:raises: :class:`~jose.Expired` if the JWT has expired
:raises: :class:`~jose.NotYetValid` if the JWT is not yet valid
:raises: :class:`~jose.Error` if there is an error decrypting the JWE | f1673:m13 |
Subsets and Splits