repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
codeinthehole/django-cacheback
cacheback/base.py
Job.async_refresh
def async_refresh(self, *args, **kwargs): """ Trigger an asynchronous job to refresh the cache """ # We trigger the task with the class path to import as well as the # (a) args and kwargs for instantiating the class # (b) args and kwargs for calling the 'refresh' method try: enqueue_task( dict( klass_str=self.class_path, obj_args=self.get_init_args(), obj_kwargs=self.get_init_kwargs(), call_args=args, call_kwargs=kwargs ), task_options=self.task_options ) except Exception: # Handle exceptions from talking to RabbitMQ - eg connection # refused. When this happens, we try to run the task # synchronously. logger.error("Unable to trigger task asynchronously - failing " "over to synchronous refresh", exc_info=True) try: return self.refresh(*args, **kwargs) except Exception as e: # Something went wrong while running the task logger.error("Unable to refresh data synchronously: %s", e, exc_info=True) else: logger.debug("Failover synchronous refresh completed successfully")
python
def async_refresh(self, *args, **kwargs): """ Trigger an asynchronous job to refresh the cache """ # We trigger the task with the class path to import as well as the # (a) args and kwargs for instantiating the class # (b) args and kwargs for calling the 'refresh' method try: enqueue_task( dict( klass_str=self.class_path, obj_args=self.get_init_args(), obj_kwargs=self.get_init_kwargs(), call_args=args, call_kwargs=kwargs ), task_options=self.task_options ) except Exception: # Handle exceptions from talking to RabbitMQ - eg connection # refused. When this happens, we try to run the task # synchronously. logger.error("Unable to trigger task asynchronously - failing " "over to synchronous refresh", exc_info=True) try: return self.refresh(*args, **kwargs) except Exception as e: # Something went wrong while running the task logger.error("Unable to refresh data synchronously: %s", e, exc_info=True) else: logger.debug("Failover synchronous refresh completed successfully")
[ "def", "async_refresh", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# We trigger the task with the class path to import as well as the", "# (a) args and kwargs for instantiating the class", "# (b) args and kwargs for calling the 'refresh' method", "try", ":", "enqueue_task", "(", "dict", "(", "klass_str", "=", "self", ".", "class_path", ",", "obj_args", "=", "self", ".", "get_init_args", "(", ")", ",", "obj_kwargs", "=", "self", ".", "get_init_kwargs", "(", ")", ",", "call_args", "=", "args", ",", "call_kwargs", "=", "kwargs", ")", ",", "task_options", "=", "self", ".", "task_options", ")", "except", "Exception", ":", "# Handle exceptions from talking to RabbitMQ - eg connection", "# refused. When this happens, we try to run the task", "# synchronously.", "logger", ".", "error", "(", "\"Unable to trigger task asynchronously - failing \"", "\"over to synchronous refresh\"", ",", "exc_info", "=", "True", ")", "try", ":", "return", "self", ".", "refresh", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "# Something went wrong while running the task", "logger", ".", "error", "(", "\"Unable to refresh data synchronously: %s\"", ",", "e", ",", "exc_info", "=", "True", ")", "else", ":", "logger", ".", "debug", "(", "\"Failover synchronous refresh completed successfully\"", ")" ]
Trigger an asynchronous job to refresh the cache
[ "Trigger", "an", "asynchronous", "job", "to", "refresh", "the", "cache" ]
train
https://github.com/codeinthehole/django-cacheback/blob/0c79a524a28ca2fada98ed58c26c544f07a58e14/cacheback/base.py#L312-L344
codeinthehole/django-cacheback
cacheback/base.py
Job.should_stale_item_be_fetched_synchronously
def should_stale_item_be_fetched_synchronously(self, delta, *args, **kwargs): """ Return whether to refresh an item synchronously when it is found in the cache but stale """ if self.fetch_on_stale_threshold is None: return False return delta > (self.fetch_on_stale_threshold - self.lifetime)
python
def should_stale_item_be_fetched_synchronously(self, delta, *args, **kwargs): """ Return whether to refresh an item synchronously when it is found in the cache but stale """ if self.fetch_on_stale_threshold is None: return False return delta > (self.fetch_on_stale_threshold - self.lifetime)
[ "def", "should_stale_item_be_fetched_synchronously", "(", "self", ",", "delta", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "fetch_on_stale_threshold", "is", "None", ":", "return", "False", "return", "delta", ">", "(", "self", ".", "fetch_on_stale_threshold", "-", "self", ".", "lifetime", ")" ]
Return whether to refresh an item synchronously when it is found in the cache but stale
[ "Return", "whether", "to", "refresh", "an", "item", "synchronously", "when", "it", "is", "found", "in", "the", "cache", "but", "stale" ]
train
https://github.com/codeinthehole/django-cacheback/blob/0c79a524a28ca2fada98ed58c26c544f07a58e14/cacheback/base.py#L384-L391
codeinthehole/django-cacheback
cacheback/base.py
Job.key
def key(self, *args, **kwargs): """ Return the cache key to use. If you're passing anything but primitive types to the ``get`` method, it's likely that you'll need to override this method. """ if not args and not kwargs: return self.class_path try: if args and not kwargs: return "%s:%s" % (self.class_path, self.hash(args)) # The line might break if your passed values are un-hashable. If # it does, you need to override this method and implement your own # key algorithm. return "%s:%s:%s:%s" % (self.class_path, self.hash(args), self.hash([k for k in sorted(kwargs)]), self.hash([kwargs[k] for k in sorted(kwargs)])) except TypeError: raise RuntimeError( "Unable to generate cache key due to unhashable" "args or kwargs - you need to implement your own" "key generation method to avoid this problem")
python
def key(self, *args, **kwargs): """ Return the cache key to use. If you're passing anything but primitive types to the ``get`` method, it's likely that you'll need to override this method. """ if not args and not kwargs: return self.class_path try: if args and not kwargs: return "%s:%s" % (self.class_path, self.hash(args)) # The line might break if your passed values are un-hashable. If # it does, you need to override this method and implement your own # key algorithm. return "%s:%s:%s:%s" % (self.class_path, self.hash(args), self.hash([k for k in sorted(kwargs)]), self.hash([kwargs[k] for k in sorted(kwargs)])) except TypeError: raise RuntimeError( "Unable to generate cache key due to unhashable" "args or kwargs - you need to implement your own" "key generation method to avoid this problem")
[ "def", "key", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "args", "and", "not", "kwargs", ":", "return", "self", ".", "class_path", "try", ":", "if", "args", "and", "not", "kwargs", ":", "return", "\"%s:%s\"", "%", "(", "self", ".", "class_path", ",", "self", ".", "hash", "(", "args", ")", ")", "# The line might break if your passed values are un-hashable. If", "# it does, you need to override this method and implement your own", "# key algorithm.", "return", "\"%s:%s:%s:%s\"", "%", "(", "self", ".", "class_path", ",", "self", ".", "hash", "(", "args", ")", ",", "self", ".", "hash", "(", "[", "k", "for", "k", "in", "sorted", "(", "kwargs", ")", "]", ")", ",", "self", ".", "hash", "(", "[", "kwargs", "[", "k", "]", "for", "k", "in", "sorted", "(", "kwargs", ")", "]", ")", ")", "except", "TypeError", ":", "raise", "RuntimeError", "(", "\"Unable to generate cache key due to unhashable\"", "\"args or kwargs - you need to implement your own\"", "\"key generation method to avoid this problem\"", ")" ]
Return the cache key to use. If you're passing anything but primitive types to the ``get`` method, it's likely that you'll need to override this method.
[ "Return", "the", "cache", "key", "to", "use", "." ]
train
https://github.com/codeinthehole/django-cacheback/blob/0c79a524a28ca2fada98ed58c26c544f07a58e14/cacheback/base.py#L393-L416
codeinthehole/django-cacheback
cacheback/base.py
Job.hash
def hash(self, value): """ Generate a hash of the given iterable. This is for use in a cache key. """ if is_iterable(value): value = tuple(to_bytestring(v) for v in value) return hashlib.md5(six.b(':').join(value)).hexdigest()
python
def hash(self, value): """ Generate a hash of the given iterable. This is for use in a cache key. """ if is_iterable(value): value = tuple(to_bytestring(v) for v in value) return hashlib.md5(six.b(':').join(value)).hexdigest()
[ "def", "hash", "(", "self", ",", "value", ")", ":", "if", "is_iterable", "(", "value", ")", ":", "value", "=", "tuple", "(", "to_bytestring", "(", "v", ")", "for", "v", "in", "value", ")", "return", "hashlib", ".", "md5", "(", "six", ".", "b", "(", "':'", ")", ".", "join", "(", "value", ")", ")", ".", "hexdigest", "(", ")" ]
Generate a hash of the given iterable. This is for use in a cache key.
[ "Generate", "a", "hash", "of", "the", "given", "iterable", "." ]
train
https://github.com/codeinthehole/django-cacheback/blob/0c79a524a28ca2fada98ed58c26c544f07a58e14/cacheback/base.py#L418-L426
codeinthehole/django-cacheback
cacheback/base.py
Job.perform_async_refresh
def perform_async_refresh(cls, klass_str, obj_args, obj_kwargs, call_args, call_kwargs): """ Re-populate cache using the given job class. The job class is instantiated with the passed constructor args and the refresh method is called with the passed call args. That is:: data = klass(*obj_args, **obj_kwargs).refresh( *call_args, **call_kwargs) :klass_str: String repr of class (eg 'apps.twitter.jobs.FetchTweetsJob') :obj_args: Constructor args :obj_kwargs: Constructor kwargs :call_args: Refresh args :call_kwargs: Refresh kwargs """ klass = get_job_class(klass_str) if klass is None: logger.error("Unable to construct %s with args %r and kwargs %r", klass_str, obj_args, obj_kwargs) return logger.info("Using %s with constructor args %r and kwargs %r", klass_str, obj_args, obj_kwargs) logger.info("Calling refresh with args %r and kwargs %r", call_args, call_kwargs) start = time.time() try: klass(*obj_args, **obj_kwargs).refresh( *call_args, **call_kwargs) except Exception as e: logger.exception("Error running job: '%s'", e) else: duration = time.time() - start logger.info("Refreshed cache in %.6f seconds", duration)
python
def perform_async_refresh(cls, klass_str, obj_args, obj_kwargs, call_args, call_kwargs): """ Re-populate cache using the given job class. The job class is instantiated with the passed constructor args and the refresh method is called with the passed call args. That is:: data = klass(*obj_args, **obj_kwargs).refresh( *call_args, **call_kwargs) :klass_str: String repr of class (eg 'apps.twitter.jobs.FetchTweetsJob') :obj_args: Constructor args :obj_kwargs: Constructor kwargs :call_args: Refresh args :call_kwargs: Refresh kwargs """ klass = get_job_class(klass_str) if klass is None: logger.error("Unable to construct %s with args %r and kwargs %r", klass_str, obj_args, obj_kwargs) return logger.info("Using %s with constructor args %r and kwargs %r", klass_str, obj_args, obj_kwargs) logger.info("Calling refresh with args %r and kwargs %r", call_args, call_kwargs) start = time.time() try: klass(*obj_args, **obj_kwargs).refresh( *call_args, **call_kwargs) except Exception as e: logger.exception("Error running job: '%s'", e) else: duration = time.time() - start logger.info("Refreshed cache in %.6f seconds", duration)
[ "def", "perform_async_refresh", "(", "cls", ",", "klass_str", ",", "obj_args", ",", "obj_kwargs", ",", "call_args", ",", "call_kwargs", ")", ":", "klass", "=", "get_job_class", "(", "klass_str", ")", "if", "klass", "is", "None", ":", "logger", ".", "error", "(", "\"Unable to construct %s with args %r and kwargs %r\"", ",", "klass_str", ",", "obj_args", ",", "obj_kwargs", ")", "return", "logger", ".", "info", "(", "\"Using %s with constructor args %r and kwargs %r\"", ",", "klass_str", ",", "obj_args", ",", "obj_kwargs", ")", "logger", ".", "info", "(", "\"Calling refresh with args %r and kwargs %r\"", ",", "call_args", ",", "call_kwargs", ")", "start", "=", "time", ".", "time", "(", ")", "try", ":", "klass", "(", "*", "obj_args", ",", "*", "*", "obj_kwargs", ")", ".", "refresh", "(", "*", "call_args", ",", "*", "*", "call_kwargs", ")", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "\"Error running job: '%s'\"", ",", "e", ")", "else", ":", "duration", "=", "time", ".", "time", "(", ")", "-", "start", "logger", ".", "info", "(", "\"Refreshed cache in %.6f seconds\"", ",", "duration", ")" ]
Re-populate cache using the given job class. The job class is instantiated with the passed constructor args and the refresh method is called with the passed call args. That is:: data = klass(*obj_args, **obj_kwargs).refresh( *call_args, **call_kwargs) :klass_str: String repr of class (eg 'apps.twitter.jobs.FetchTweetsJob') :obj_args: Constructor args :obj_kwargs: Constructor kwargs :call_args: Refresh args :call_kwargs: Refresh kwargs
[ "Re", "-", "populate", "cache", "using", "the", "given", "job", "class", "." ]
train
https://github.com/codeinthehole/django-cacheback/blob/0c79a524a28ca2fada98ed58c26c544f07a58e14/cacheback/base.py#L463-L497
codeinthehole/django-cacheback
cacheback/decorators.py
cacheback
def cacheback(lifetime=None, fetch_on_miss=None, cache_alias=None, job_class=None, task_options=None, **job_class_kwargs): """ Decorate function to cache its return value. :lifetime: How long to cache items for :fetch_on_miss: Whether to perform a synchronous fetch when no cached result is found :cache_alias: The Django cache alias to store the result into. :job_class: The class to use for running the cache refresh job. Defaults using the FunctionJob. :job_class_kwargs: Any extra kwargs to pass to job_class constructor. Useful with custom job_class implementations. """ if job_class is None: job_class = FunctionJob job = job_class(lifetime=lifetime, fetch_on_miss=fetch_on_miss, cache_alias=cache_alias, task_options=task_options, **job_class_kwargs) def _wrapper(fn): # using available_attrs to work around http://bugs.python.org/issue3445 @wraps(fn, assigned=available_attrs(fn)) def __wrapper(*args, **kwargs): return job.get(fn, *args, **kwargs) # Assign reference to unwrapped function so that we can access it # later without descending into infinite regress. __wrapper.fn = fn # Assign reference to job so we can use the full Job API __wrapper.job = job return __wrapper return _wrapper
python
def cacheback(lifetime=None, fetch_on_miss=None, cache_alias=None, job_class=None, task_options=None, **job_class_kwargs): """ Decorate function to cache its return value. :lifetime: How long to cache items for :fetch_on_miss: Whether to perform a synchronous fetch when no cached result is found :cache_alias: The Django cache alias to store the result into. :job_class: The class to use for running the cache refresh job. Defaults using the FunctionJob. :job_class_kwargs: Any extra kwargs to pass to job_class constructor. Useful with custom job_class implementations. """ if job_class is None: job_class = FunctionJob job = job_class(lifetime=lifetime, fetch_on_miss=fetch_on_miss, cache_alias=cache_alias, task_options=task_options, **job_class_kwargs) def _wrapper(fn): # using available_attrs to work around http://bugs.python.org/issue3445 @wraps(fn, assigned=available_attrs(fn)) def __wrapper(*args, **kwargs): return job.get(fn, *args, **kwargs) # Assign reference to unwrapped function so that we can access it # later without descending into infinite regress. __wrapper.fn = fn # Assign reference to job so we can use the full Job API __wrapper.job = job return __wrapper return _wrapper
[ "def", "cacheback", "(", "lifetime", "=", "None", ",", "fetch_on_miss", "=", "None", ",", "cache_alias", "=", "None", ",", "job_class", "=", "None", ",", "task_options", "=", "None", ",", "*", "*", "job_class_kwargs", ")", ":", "if", "job_class", "is", "None", ":", "job_class", "=", "FunctionJob", "job", "=", "job_class", "(", "lifetime", "=", "lifetime", ",", "fetch_on_miss", "=", "fetch_on_miss", ",", "cache_alias", "=", "cache_alias", ",", "task_options", "=", "task_options", ",", "*", "*", "job_class_kwargs", ")", "def", "_wrapper", "(", "fn", ")", ":", "# using available_attrs to work around http://bugs.python.org/issue3445", "@", "wraps", "(", "fn", ",", "assigned", "=", "available_attrs", "(", "fn", ")", ")", "def", "__wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "job", ".", "get", "(", "fn", ",", "*", "args", ",", "*", "*", "kwargs", ")", "# Assign reference to unwrapped function so that we can access it", "# later without descending into infinite regress.", "__wrapper", ".", "fn", "=", "fn", "# Assign reference to job so we can use the full Job API", "__wrapper", ".", "job", "=", "job", "return", "__wrapper", "return", "_wrapper" ]
Decorate function to cache its return value. :lifetime: How long to cache items for :fetch_on_miss: Whether to perform a synchronous fetch when no cached result is found :cache_alias: The Django cache alias to store the result into. :job_class: The class to use for running the cache refresh job. Defaults using the FunctionJob. :job_class_kwargs: Any extra kwargs to pass to job_class constructor. Useful with custom job_class implementations.
[ "Decorate", "function", "to", "cache", "its", "return", "value", "." ]
train
https://github.com/codeinthehole/django-cacheback/blob/0c79a524a28ca2fada98ed58c26c544f07a58e14/cacheback/decorators.py#L8-L40
bartromgens/geojsoncontour
geojsoncontour/utilities/multipoly.py
angle
def angle(v1, v2): """Return the angle in radians between vectors 'v1' and 'v2'.""" v1_u = unit_vector(v1) v2_u = unit_vector(v2) return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
python
def angle(v1, v2): """Return the angle in radians between vectors 'v1' and 'v2'.""" v1_u = unit_vector(v1) v2_u = unit_vector(v2) return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
[ "def", "angle", "(", "v1", ",", "v2", ")", ":", "v1_u", "=", "unit_vector", "(", "v1", ")", "v2_u", "=", "unit_vector", "(", "v2", ")", "return", "np", ".", "arccos", "(", "np", ".", "clip", "(", "np", ".", "dot", "(", "v1_u", ",", "v2_u", ")", ",", "-", "1.0", ",", "1.0", ")", ")" ]
Return the angle in radians between vectors 'v1' and 'v2'.
[ "Return", "the", "angle", "in", "radians", "between", "vectors", "v1", "and", "v2", "." ]
train
https://github.com/bartromgens/geojsoncontour/blob/79e30718fa0c1d96a2459eb1f45d06d699d240ed/geojsoncontour/utilities/multipoly.py#L39-L43
bartromgens/geojsoncontour
geojsoncontour/utilities/multipoly.py
keep_high_angle
def keep_high_angle(vertices, min_angle_deg): """Keep vertices with angles higher then given minimum.""" accepted = [] v = vertices v1 = v[1] - v[0] accepted.append((v[0][0], v[0][1])) for i in range(1, len(v) - 2): v2 = v[i + 1] - v[i - 1] diff_angle = np.fabs(angle(v1, v2) * 180.0 / np.pi) if diff_angle > min_angle_deg: accepted.append((v[i][0], v[i][1])) v1 = v[i] - v[i - 1] accepted.append((v[-1][0], v[-1][1])) return np.array(accepted, dtype=vertices.dtype)
python
def keep_high_angle(vertices, min_angle_deg): """Keep vertices with angles higher then given minimum.""" accepted = [] v = vertices v1 = v[1] - v[0] accepted.append((v[0][0], v[0][1])) for i in range(1, len(v) - 2): v2 = v[i + 1] - v[i - 1] diff_angle = np.fabs(angle(v1, v2) * 180.0 / np.pi) if diff_angle > min_angle_deg: accepted.append((v[i][0], v[i][1])) v1 = v[i] - v[i - 1] accepted.append((v[-1][0], v[-1][1])) return np.array(accepted, dtype=vertices.dtype)
[ "def", "keep_high_angle", "(", "vertices", ",", "min_angle_deg", ")", ":", "accepted", "=", "[", "]", "v", "=", "vertices", "v1", "=", "v", "[", "1", "]", "-", "v", "[", "0", "]", "accepted", ".", "append", "(", "(", "v", "[", "0", "]", "[", "0", "]", ",", "v", "[", "0", "]", "[", "1", "]", ")", ")", "for", "i", "in", "range", "(", "1", ",", "len", "(", "v", ")", "-", "2", ")", ":", "v2", "=", "v", "[", "i", "+", "1", "]", "-", "v", "[", "i", "-", "1", "]", "diff_angle", "=", "np", ".", "fabs", "(", "angle", "(", "v1", ",", "v2", ")", "*", "180.0", "/", "np", ".", "pi", ")", "if", "diff_angle", ">", "min_angle_deg", ":", "accepted", ".", "append", "(", "(", "v", "[", "i", "]", "[", "0", "]", ",", "v", "[", "i", "]", "[", "1", "]", ")", ")", "v1", "=", "v", "[", "i", "]", "-", "v", "[", "i", "-", "1", "]", "accepted", ".", "append", "(", "(", "v", "[", "-", "1", "]", "[", "0", "]", ",", "v", "[", "-", "1", "]", "[", "1", "]", ")", ")", "return", "np", ".", "array", "(", "accepted", ",", "dtype", "=", "vertices", ".", "dtype", ")" ]
Keep vertices with angles higher then given minimum.
[ "Keep", "vertices", "with", "angles", "higher", "then", "given", "minimum", "." ]
train
https://github.com/bartromgens/geojsoncontour/blob/79e30718fa0c1d96a2459eb1f45d06d699d240ed/geojsoncontour/utilities/multipoly.py#L46-L59
bartromgens/geojsoncontour
geojsoncontour/utilities/multipoly.py
set_contourf_properties
def set_contourf_properties(stroke_width, fcolor, fill_opacity, contour_levels, contourf_idx, unit): """Set property values for Polygon.""" return { "stroke": fcolor, "stroke-width": stroke_width, "stroke-opacity": 1, "fill": fcolor, "fill-opacity": fill_opacity, "title": "%.2f" % contour_levels[contourf_idx] + ' ' + unit }
python
def set_contourf_properties(stroke_width, fcolor, fill_opacity, contour_levels, contourf_idx, unit): """Set property values for Polygon.""" return { "stroke": fcolor, "stroke-width": stroke_width, "stroke-opacity": 1, "fill": fcolor, "fill-opacity": fill_opacity, "title": "%.2f" % contour_levels[contourf_idx] + ' ' + unit }
[ "def", "set_contourf_properties", "(", "stroke_width", ",", "fcolor", ",", "fill_opacity", ",", "contour_levels", ",", "contourf_idx", ",", "unit", ")", ":", "return", "{", "\"stroke\"", ":", "fcolor", ",", "\"stroke-width\"", ":", "stroke_width", ",", "\"stroke-opacity\"", ":", "1", ",", "\"fill\"", ":", "fcolor", ",", "\"fill-opacity\"", ":", "fill_opacity", ",", "\"title\"", ":", "\"%.2f\"", "%", "contour_levels", "[", "contourf_idx", "]", "+", "' '", "+", "unit", "}" ]
Set property values for Polygon.
[ "Set", "property", "values", "for", "Polygon", "." ]
train
https://github.com/bartromgens/geojsoncontour/blob/79e30718fa0c1d96a2459eb1f45d06d699d240ed/geojsoncontour/utilities/multipoly.py#L62-L71
bartromgens/geojsoncontour
geojsoncontour/contour.py
contour_to_geojson
def contour_to_geojson(contour, geojson_filepath=None, min_angle_deg=None, ndigits=5, unit='', stroke_width=1, geojson_properties=None, strdump=False, serialize=True): """Transform matplotlib.contour to geojson.""" collections = contour.collections contour_index = 0 line_features = [] for collection in collections: color = collection.get_edgecolor() for path in collection.get_paths(): v = path.vertices if len(v) < 3: continue coordinates = keep_high_angle(v, min_angle_deg) if ndigits: coordinates = np.around(coordinates, ndigits) line = LineString(coordinates.tolist()) properties = { "stroke-width": stroke_width, "stroke": rgb2hex(color[0]), "title": "%.2f" % contour.levels[contour_index] + ' ' + unit, "level-value": float("%.6f" % contour.levels[contour_index]), "level-index": contour_index } if geojson_properties: properties.update(geojson_properties) line_features.append(Feature(geometry=line, properties=properties)) contour_index += 1 feature_collection = FeatureCollection(line_features) return _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize)
python
def contour_to_geojson(contour, geojson_filepath=None, min_angle_deg=None, ndigits=5, unit='', stroke_width=1, geojson_properties=None, strdump=False, serialize=True): """Transform matplotlib.contour to geojson.""" collections = contour.collections contour_index = 0 line_features = [] for collection in collections: color = collection.get_edgecolor() for path in collection.get_paths(): v = path.vertices if len(v) < 3: continue coordinates = keep_high_angle(v, min_angle_deg) if ndigits: coordinates = np.around(coordinates, ndigits) line = LineString(coordinates.tolist()) properties = { "stroke-width": stroke_width, "stroke": rgb2hex(color[0]), "title": "%.2f" % contour.levels[contour_index] + ' ' + unit, "level-value": float("%.6f" % contour.levels[contour_index]), "level-index": contour_index } if geojson_properties: properties.update(geojson_properties) line_features.append(Feature(geometry=line, properties=properties)) contour_index += 1 feature_collection = FeatureCollection(line_features) return _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize)
[ "def", "contour_to_geojson", "(", "contour", ",", "geojson_filepath", "=", "None", ",", "min_angle_deg", "=", "None", ",", "ndigits", "=", "5", ",", "unit", "=", "''", ",", "stroke_width", "=", "1", ",", "geojson_properties", "=", "None", ",", "strdump", "=", "False", ",", "serialize", "=", "True", ")", ":", "collections", "=", "contour", ".", "collections", "contour_index", "=", "0", "line_features", "=", "[", "]", "for", "collection", "in", "collections", ":", "color", "=", "collection", ".", "get_edgecolor", "(", ")", "for", "path", "in", "collection", ".", "get_paths", "(", ")", ":", "v", "=", "path", ".", "vertices", "if", "len", "(", "v", ")", "<", "3", ":", "continue", "coordinates", "=", "keep_high_angle", "(", "v", ",", "min_angle_deg", ")", "if", "ndigits", ":", "coordinates", "=", "np", ".", "around", "(", "coordinates", ",", "ndigits", ")", "line", "=", "LineString", "(", "coordinates", ".", "tolist", "(", ")", ")", "properties", "=", "{", "\"stroke-width\"", ":", "stroke_width", ",", "\"stroke\"", ":", "rgb2hex", "(", "color", "[", "0", "]", ")", ",", "\"title\"", ":", "\"%.2f\"", "%", "contour", ".", "levels", "[", "contour_index", "]", "+", "' '", "+", "unit", ",", "\"level-value\"", ":", "float", "(", "\"%.6f\"", "%", "contour", ".", "levels", "[", "contour_index", "]", ")", ",", "\"level-index\"", ":", "contour_index", "}", "if", "geojson_properties", ":", "properties", ".", "update", "(", "geojson_properties", ")", "line_features", ".", "append", "(", "Feature", "(", "geometry", "=", "line", ",", "properties", "=", "properties", ")", ")", "contour_index", "+=", "1", "feature_collection", "=", "FeatureCollection", "(", "line_features", ")", "return", "_render_feature_collection", "(", "feature_collection", ",", "geojson_filepath", ",", "strdump", ",", "serialize", ")" ]
Transform matplotlib.contour to geojson.
[ "Transform", "matplotlib", ".", "contour", "to", "geojson", "." ]
train
https://github.com/bartromgens/geojsoncontour/blob/79e30718fa0c1d96a2459eb1f45d06d699d240ed/geojsoncontour/contour.py#L11-L40
bartromgens/geojsoncontour
geojsoncontour/contour.py
contourf_to_geojson_overlap
def contourf_to_geojson_overlap(contourf, geojson_filepath=None, min_angle_deg=None, ndigits=5, unit='', stroke_width=1, fill_opacity=.9, geojson_properties=None, strdump=False, serialize=True): """Transform matplotlib.contourf to geojson with overlapping filled contours.""" polygon_features = [] contourf_idx = 0 for collection in contourf.collections: color = collection.get_facecolor() for path in collection.get_paths(): for coord in path.to_polygons(): if min_angle_deg: coord = keep_high_angle(coord, min_angle_deg) coord = np.around(coord, ndigits) if ndigits else coord polygon = Polygon(coordinates=[coord.tolist()]) fcolor = rgb2hex(color[0]) properties = set_contourf_properties(stroke_width, fcolor, fill_opacity, contourf.levels, contourf_idx, unit) if geojson_properties: properties.update(geojson_properties) feature = Feature(geometry=polygon, properties=properties) polygon_features.append(feature) contourf_idx += 1 feature_collection = FeatureCollection(polygon_features) return _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize)
python
def contourf_to_geojson_overlap(contourf, geojson_filepath=None, min_angle_deg=None, ndigits=5, unit='', stroke_width=1, fill_opacity=.9, geojson_properties=None, strdump=False, serialize=True): """Transform matplotlib.contourf to geojson with overlapping filled contours.""" polygon_features = [] contourf_idx = 0 for collection in contourf.collections: color = collection.get_facecolor() for path in collection.get_paths(): for coord in path.to_polygons(): if min_angle_deg: coord = keep_high_angle(coord, min_angle_deg) coord = np.around(coord, ndigits) if ndigits else coord polygon = Polygon(coordinates=[coord.tolist()]) fcolor = rgb2hex(color[0]) properties = set_contourf_properties(stroke_width, fcolor, fill_opacity, contourf.levels, contourf_idx, unit) if geojson_properties: properties.update(geojson_properties) feature = Feature(geometry=polygon, properties=properties) polygon_features.append(feature) contourf_idx += 1 feature_collection = FeatureCollection(polygon_features) return _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize)
[ "def", "contourf_to_geojson_overlap", "(", "contourf", ",", "geojson_filepath", "=", "None", ",", "min_angle_deg", "=", "None", ",", "ndigits", "=", "5", ",", "unit", "=", "''", ",", "stroke_width", "=", "1", ",", "fill_opacity", "=", ".9", ",", "geojson_properties", "=", "None", ",", "strdump", "=", "False", ",", "serialize", "=", "True", ")", ":", "polygon_features", "=", "[", "]", "contourf_idx", "=", "0", "for", "collection", "in", "contourf", ".", "collections", ":", "color", "=", "collection", ".", "get_facecolor", "(", ")", "for", "path", "in", "collection", ".", "get_paths", "(", ")", ":", "for", "coord", "in", "path", ".", "to_polygons", "(", ")", ":", "if", "min_angle_deg", ":", "coord", "=", "keep_high_angle", "(", "coord", ",", "min_angle_deg", ")", "coord", "=", "np", ".", "around", "(", "coord", ",", "ndigits", ")", "if", "ndigits", "else", "coord", "polygon", "=", "Polygon", "(", "coordinates", "=", "[", "coord", ".", "tolist", "(", ")", "]", ")", "fcolor", "=", "rgb2hex", "(", "color", "[", "0", "]", ")", "properties", "=", "set_contourf_properties", "(", "stroke_width", ",", "fcolor", ",", "fill_opacity", ",", "contourf", ".", "levels", ",", "contourf_idx", ",", "unit", ")", "if", "geojson_properties", ":", "properties", ".", "update", "(", "geojson_properties", ")", "feature", "=", "Feature", "(", "geometry", "=", "polygon", ",", "properties", "=", "properties", ")", "polygon_features", ".", "append", "(", "feature", ")", "contourf_idx", "+=", "1", "feature_collection", "=", "FeatureCollection", "(", "polygon_features", ")", "return", "_render_feature_collection", "(", "feature_collection", ",", "geojson_filepath", ",", "strdump", ",", "serialize", ")" ]
Transform matplotlib.contourf to geojson with overlapping filled contours.
[ "Transform", "matplotlib", ".", "contourf", "to", "geojson", "with", "overlapping", "filled", "contours", "." ]
train
https://github.com/bartromgens/geojsoncontour/blob/79e30718fa0c1d96a2459eb1f45d06d699d240ed/geojsoncontour/contour.py#L43-L65
bartromgens/geojsoncontour
geojsoncontour/contour.py
contourf_to_geojson
def contourf_to_geojson(contourf, geojson_filepath=None, min_angle_deg=None, ndigits=5, unit='', stroke_width=1, fill_opacity=.9, geojson_properties=None, strdump=False, serialize=True): """Transform matplotlib.contourf to geojson with MultiPolygons.""" polygon_features = [] mps = [] contourf_idx = 0 for coll in contourf.collections: color = coll.get_facecolor() for path in coll.get_paths(): for coord in path.to_polygons(): if min_angle_deg: coord = keep_high_angle(coord, min_angle_deg) coord = np.around(coord, ndigits) if ndigits else coord op = MP(contourf.levels[contourf_idx], rgb2hex(color[0])) if op in mps: for i, k in enumerate(mps): if k == op: mps[i].add_coords(coord.tolist()) else: op.add_coords(coord.tolist()) mps.append(op) contourf_idx += 1 # starting here the multipolys will be extracted contourf_idx = 0 for muli in mps: polygon = muli.mpoly() fcolor = muli.color properties = set_contourf_properties(stroke_width, fcolor, fill_opacity, contourf.levels, contourf_idx, unit) if geojson_properties: properties.update(geojson_properties) feature = Feature(geometry=polygon, properties=properties) polygon_features.append(feature) contourf_idx += 1 feature_collection = FeatureCollection(polygon_features) return _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize)
python
def contourf_to_geojson(contourf, geojson_filepath=None, min_angle_deg=None, ndigits=5, unit='', stroke_width=1, fill_opacity=.9, geojson_properties=None, strdump=False, serialize=True): """Transform matplotlib.contourf to geojson with MultiPolygons.""" polygon_features = [] mps = [] contourf_idx = 0 for coll in contourf.collections: color = coll.get_facecolor() for path in coll.get_paths(): for coord in path.to_polygons(): if min_angle_deg: coord = keep_high_angle(coord, min_angle_deg) coord = np.around(coord, ndigits) if ndigits else coord op = MP(contourf.levels[contourf_idx], rgb2hex(color[0])) if op in mps: for i, k in enumerate(mps): if k == op: mps[i].add_coords(coord.tolist()) else: op.add_coords(coord.tolist()) mps.append(op) contourf_idx += 1 # starting here the multipolys will be extracted contourf_idx = 0 for muli in mps: polygon = muli.mpoly() fcolor = muli.color properties = set_contourf_properties(stroke_width, fcolor, fill_opacity, contourf.levels, contourf_idx, unit) if geojson_properties: properties.update(geojson_properties) feature = Feature(geometry=polygon, properties=properties) polygon_features.append(feature) contourf_idx += 1 feature_collection = FeatureCollection(polygon_features) return _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize)
[ "def", "contourf_to_geojson", "(", "contourf", ",", "geojson_filepath", "=", "None", ",", "min_angle_deg", "=", "None", ",", "ndigits", "=", "5", ",", "unit", "=", "''", ",", "stroke_width", "=", "1", ",", "fill_opacity", "=", ".9", ",", "geojson_properties", "=", "None", ",", "strdump", "=", "False", ",", "serialize", "=", "True", ")", ":", "polygon_features", "=", "[", "]", "mps", "=", "[", "]", "contourf_idx", "=", "0", "for", "coll", "in", "contourf", ".", "collections", ":", "color", "=", "coll", ".", "get_facecolor", "(", ")", "for", "path", "in", "coll", ".", "get_paths", "(", ")", ":", "for", "coord", "in", "path", ".", "to_polygons", "(", ")", ":", "if", "min_angle_deg", ":", "coord", "=", "keep_high_angle", "(", "coord", ",", "min_angle_deg", ")", "coord", "=", "np", ".", "around", "(", "coord", ",", "ndigits", ")", "if", "ndigits", "else", "coord", "op", "=", "MP", "(", "contourf", ".", "levels", "[", "contourf_idx", "]", ",", "rgb2hex", "(", "color", "[", "0", "]", ")", ")", "if", "op", "in", "mps", ":", "for", "i", ",", "k", "in", "enumerate", "(", "mps", ")", ":", "if", "k", "==", "op", ":", "mps", "[", "i", "]", ".", "add_coords", "(", "coord", ".", "tolist", "(", ")", ")", "else", ":", "op", ".", "add_coords", "(", "coord", ".", "tolist", "(", ")", ")", "mps", ".", "append", "(", "op", ")", "contourf_idx", "+=", "1", "# starting here the multipolys will be extracted", "contourf_idx", "=", "0", "for", "muli", "in", "mps", ":", "polygon", "=", "muli", ".", "mpoly", "(", ")", "fcolor", "=", "muli", ".", "color", "properties", "=", "set_contourf_properties", "(", "stroke_width", ",", "fcolor", ",", "fill_opacity", ",", "contourf", ".", "levels", ",", "contourf_idx", ",", "unit", ")", "if", "geojson_properties", ":", "properties", ".", "update", "(", "geojson_properties", ")", "feature", "=", "Feature", "(", "geometry", "=", "polygon", ",", "properties", "=", "properties", ")", "polygon_features", ".", "append", "(", "feature", ")", "contourf_idx", "+=", "1", "feature_collection", "=", "FeatureCollection", "(", "polygon_features", ")", "return", "_render_feature_collection", "(", "feature_collection", ",", "geojson_filepath", ",", "strdump", ",", "serialize", ")" ]
Transform matplotlib.contourf to geojson with MultiPolygons.
[ "Transform", "matplotlib", ".", "contourf", "to", "geojson", "with", "MultiPolygons", "." ]
train
https://github.com/bartromgens/geojsoncontour/blob/79e30718fa0c1d96a2459eb1f45d06d699d240ed/geojsoncontour/contour.py#L68-L103
mattupstate/flask-social
flask_social/utils.py
get_authorize_callback
def get_authorize_callback(endpoint, provider_id): """Get a qualified URL for the provider to return to upon authorization param: endpoint: Absolute path to append to the application's host """ endpoint_prefix = config_value('BLUEPRINT_NAME') url = url_for(endpoint_prefix + '.' + endpoint, provider_id=provider_id) return request.url_root[:-1] + url
python
def get_authorize_callback(endpoint, provider_id): """Get a qualified URL for the provider to return to upon authorization param: endpoint: Absolute path to append to the application's host """ endpoint_prefix = config_value('BLUEPRINT_NAME') url = url_for(endpoint_prefix + '.' + endpoint, provider_id=provider_id) return request.url_root[:-1] + url
[ "def", "get_authorize_callback", "(", "endpoint", ",", "provider_id", ")", ":", "endpoint_prefix", "=", "config_value", "(", "'BLUEPRINT_NAME'", ")", "url", "=", "url_for", "(", "endpoint_prefix", "+", "'.'", "+", "endpoint", ",", "provider_id", "=", "provider_id", ")", "return", "request", ".", "url_root", "[", ":", "-", "1", "]", "+", "url" ]
Get a qualified URL for the provider to return to upon authorization param: endpoint: Absolute path to append to the application's host
[ "Get", "a", "qualified", "URL", "for", "the", "provider", "to", "return", "to", "upon", "authorization" ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/utils.py#L30-L37
mattupstate/flask-social
flask_social/datastore.py
ConnectionDatastore.delete_connection
def delete_connection(self, **kwargs): """Remove a single connection to a provider for the specified user.""" conn = self.find_connection(**kwargs) if not conn: return False self.delete(conn) return True
python
def delete_connection(self, **kwargs): """Remove a single connection to a provider for the specified user.""" conn = self.find_connection(**kwargs) if not conn: return False self.delete(conn) return True
[ "def", "delete_connection", "(", "self", ",", "*", "*", "kwargs", ")", ":", "conn", "=", "self", ".", "find_connection", "(", "*", "*", "kwargs", ")", "if", "not", "conn", ":", "return", "False", "self", ".", "delete", "(", "conn", ")", "return", "True" ]
Remove a single connection to a provider for the specified user.
[ "Remove", "a", "single", "connection", "to", "a", "provider", "for", "the", "specified", "user", "." ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/datastore.py#L35-L41
mattupstate/flask-social
flask_social/datastore.py
ConnectionDatastore.delete_connections
def delete_connections(self, **kwargs): """Remove a single connection to a provider for the specified user.""" rv = False for c in self.find_connections(**kwargs): self.delete(c) rv = True return rv
python
def delete_connections(self, **kwargs): """Remove a single connection to a provider for the specified user.""" rv = False for c in self.find_connections(**kwargs): self.delete(c) rv = True return rv
[ "def", "delete_connections", "(", "self", ",", "*", "*", "kwargs", ")", ":", "rv", "=", "False", "for", "c", "in", "self", ".", "find_connections", "(", "*", "*", "kwargs", ")", ":", "self", ".", "delete", "(", "c", ")", "rv", "=", "True", "return", "rv" ]
Remove a single connection to a provider for the specified user.
[ "Remove", "a", "single", "connection", "to", "a", "provider", "for", "the", "specified", "user", "." ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/datastore.py#L43-L49
mattupstate/flask-social
flask_social/views.py
login
def login(provider_id): """Starts the provider login OAuth flow""" provider = get_provider_or_404(provider_id) callback_url = get_authorize_callback('login', provider_id) post_login = request.form.get('next', get_post_login_redirect()) session[config_value('POST_OAUTH_LOGIN_SESSION_KEY')] = post_login return provider.authorize(callback_url)
python
def login(provider_id): """Starts the provider login OAuth flow""" provider = get_provider_or_404(provider_id) callback_url = get_authorize_callback('login', provider_id) post_login = request.form.get('next', get_post_login_redirect()) session[config_value('POST_OAUTH_LOGIN_SESSION_KEY')] = post_login return provider.authorize(callback_url)
[ "def", "login", "(", "provider_id", ")", ":", "provider", "=", "get_provider_or_404", "(", "provider_id", ")", "callback_url", "=", "get_authorize_callback", "(", "'login'", ",", "provider_id", ")", "post_login", "=", "request", ".", "form", ".", "get", "(", "'next'", ",", "get_post_login_redirect", "(", ")", ")", "session", "[", "config_value", "(", "'POST_OAUTH_LOGIN_SESSION_KEY'", ")", "]", "=", "post_login", "return", "provider", ".", "authorize", "(", "callback_url", ")" ]
Starts the provider login OAuth flow
[ "Starts", "the", "provider", "login", "OAuth", "flow" ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/views.py#L44-L50
mattupstate/flask-social
flask_social/views.py
connect
def connect(provider_id): """Starts the provider connection OAuth flow""" provider = get_provider_or_404(provider_id) callback_url = get_authorize_callback('connect', provider_id) allow_view = get_url(config_value('CONNECT_ALLOW_VIEW')) pc = request.form.get('next', allow_view) session[config_value('POST_OAUTH_CONNECT_SESSION_KEY')] = pc return provider.authorize(callback_url)
python
def connect(provider_id): """Starts the provider connection OAuth flow""" provider = get_provider_or_404(provider_id) callback_url = get_authorize_callback('connect', provider_id) allow_view = get_url(config_value('CONNECT_ALLOW_VIEW')) pc = request.form.get('next', allow_view) session[config_value('POST_OAUTH_CONNECT_SESSION_KEY')] = pc return provider.authorize(callback_url)
[ "def", "connect", "(", "provider_id", ")", ":", "provider", "=", "get_provider_or_404", "(", "provider_id", ")", "callback_url", "=", "get_authorize_callback", "(", "'connect'", ",", "provider_id", ")", "allow_view", "=", "get_url", "(", "config_value", "(", "'CONNECT_ALLOW_VIEW'", ")", ")", "pc", "=", "request", ".", "form", ".", "get", "(", "'next'", ",", "allow_view", ")", "session", "[", "config_value", "(", "'POST_OAUTH_CONNECT_SESSION_KEY'", ")", "]", "=", "pc", "return", "provider", ".", "authorize", "(", "callback_url", ")" ]
Starts the provider connection OAuth flow
[ "Starts", "the", "provider", "connection", "OAuth", "flow" ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/views.py#L54-L61
mattupstate/flask-social
flask_social/views.py
remove_all_connections
def remove_all_connections(provider_id): """Remove all connections for the authenticated user to the specified provider """ provider = get_provider_or_404(provider_id) ctx = dict(provider=provider.name, user=current_user) deleted = _datastore.delete_connections(user_id=current_user.get_id(), provider_id=provider_id) if deleted: after_this_request(_commit) msg = ('All connections to %s removed' % provider.name, 'info') connection_removed.send(current_app._get_current_object(), user=current_user._get_current_object(), provider_id=provider_id) else: msg = ('Unable to remove connection to %(provider)s' % ctx, 'error') do_flash(*msg) return redirect(request.referrer)
python
def remove_all_connections(provider_id): """Remove all connections for the authenticated user to the specified provider """ provider = get_provider_or_404(provider_id) ctx = dict(provider=provider.name, user=current_user) deleted = _datastore.delete_connections(user_id=current_user.get_id(), provider_id=provider_id) if deleted: after_this_request(_commit) msg = ('All connections to %s removed' % provider.name, 'info') connection_removed.send(current_app._get_current_object(), user=current_user._get_current_object(), provider_id=provider_id) else: msg = ('Unable to remove connection to %(provider)s' % ctx, 'error') do_flash(*msg) return redirect(request.referrer)
[ "def", "remove_all_connections", "(", "provider_id", ")", ":", "provider", "=", "get_provider_or_404", "(", "provider_id", ")", "ctx", "=", "dict", "(", "provider", "=", "provider", ".", "name", ",", "user", "=", "current_user", ")", "deleted", "=", "_datastore", ".", "delete_connections", "(", "user_id", "=", "current_user", ".", "get_id", "(", ")", ",", "provider_id", "=", "provider_id", ")", "if", "deleted", ":", "after_this_request", "(", "_commit", ")", "msg", "=", "(", "'All connections to %s removed'", "%", "provider", ".", "name", ",", "'info'", ")", "connection_removed", ".", "send", "(", "current_app", ".", "_get_current_object", "(", ")", ",", "user", "=", "current_user", ".", "_get_current_object", "(", ")", ",", "provider_id", "=", "provider_id", ")", "else", ":", "msg", "=", "(", "'Unable to remove connection to %(provider)s'", "%", "ctx", ",", "'error'", ")", "do_flash", "(", "*", "msg", ")", "return", "redirect", "(", "request", ".", "referrer", ")" ]
Remove all connections for the authenticated user to the specified provider
[ "Remove", "all", "connections", "for", "the", "authenticated", "user", "to", "the", "specified", "provider" ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/views.py#L73-L93
mattupstate/flask-social
flask_social/views.py
remove_connection
def remove_connection(provider_id, provider_user_id): """Remove a specific connection for the authenticated user to the specified provider """ provider = get_provider_or_404(provider_id) ctx = dict(provider=provider.name, user=current_user, provider_user_id=provider_user_id) deleted = _datastore.delete_connection(user_id=current_user.get_id(), provider_id=provider_id, provider_user_id=provider_user_id) if deleted: after_this_request(_commit) msg = ('Connection to %(provider)s removed' % ctx, 'info') connection_removed.send(current_app._get_current_object(), user=current_user._get_current_object(), provider_id=provider_id) else: msg = ('Unabled to remove connection to %(provider)s' % ctx, 'error') do_flash(*msg) return redirect(request.referrer or get_post_login_redirect())
python
def remove_connection(provider_id, provider_user_id): """Remove a specific connection for the authenticated user to the specified provider """ provider = get_provider_or_404(provider_id) ctx = dict(provider=provider.name, user=current_user, provider_user_id=provider_user_id) deleted = _datastore.delete_connection(user_id=current_user.get_id(), provider_id=provider_id, provider_user_id=provider_user_id) if deleted: after_this_request(_commit) msg = ('Connection to %(provider)s removed' % ctx, 'info') connection_removed.send(current_app._get_current_object(), user=current_user._get_current_object(), provider_id=provider_id) else: msg = ('Unabled to remove connection to %(provider)s' % ctx, 'error') do_flash(*msg) return redirect(request.referrer or get_post_login_redirect())
[ "def", "remove_connection", "(", "provider_id", ",", "provider_user_id", ")", ":", "provider", "=", "get_provider_or_404", "(", "provider_id", ")", "ctx", "=", "dict", "(", "provider", "=", "provider", ".", "name", ",", "user", "=", "current_user", ",", "provider_user_id", "=", "provider_user_id", ")", "deleted", "=", "_datastore", ".", "delete_connection", "(", "user_id", "=", "current_user", ".", "get_id", "(", ")", ",", "provider_id", "=", "provider_id", ",", "provider_user_id", "=", "provider_user_id", ")", "if", "deleted", ":", "after_this_request", "(", "_commit", ")", "msg", "=", "(", "'Connection to %(provider)s removed'", "%", "ctx", ",", "'info'", ")", "connection_removed", ".", "send", "(", "current_app", ".", "_get_current_object", "(", ")", ",", "user", "=", "current_user", ".", "_get_current_object", "(", ")", ",", "provider_id", "=", "provider_id", ")", "else", ":", "msg", "=", "(", "'Unabled to remove connection to %(provider)s'", "%", "ctx", ",", "'error'", ")", "do_flash", "(", "*", "msg", ")", "return", "redirect", "(", "request", ".", "referrer", "or", "get_post_login_redirect", "(", ")", ")" ]
Remove a specific connection for the authenticated user to the specified provider
[ "Remove", "a", "specific", "connection", "for", "the", "authenticated", "user", "to", "the", "specified", "provider" ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/views.py#L97-L120
mattupstate/flask-social
flask_social/views.py
connect_handler
def connect_handler(cv, provider): """Shared method to handle the connection process :param connection_values: A dictionary containing the connection values :param provider_id: The provider ID the connection shoudl be made to """ cv.setdefault('user_id', current_user.get_id()) connection = _datastore.find_connection( provider_id=cv['provider_id'], provider_user_id=cv['provider_user_id']) if connection is None: after_this_request(_commit) connection = _datastore.create_connection(**cv) msg = ('Connection established to %s' % provider.name, 'success') connection_created.send(current_app._get_current_object(), user=current_user._get_current_object(), connection=connection) else: msg = ('A connection is already established with %s ' 'to your account' % provider.name, 'notice') connection_failed.send(current_app._get_current_object(), user=current_user._get_current_object()) redirect_url = session.pop(config_value('POST_OAUTH_CONNECT_SESSION_KEY'), get_url(config_value('CONNECT_ALLOW_VIEW'))) do_flash(*msg) return redirect(redirect_url)
python
def connect_handler(cv, provider): """Shared method to handle the connection process :param connection_values: A dictionary containing the connection values :param provider_id: The provider ID the connection shoudl be made to """ cv.setdefault('user_id', current_user.get_id()) connection = _datastore.find_connection( provider_id=cv['provider_id'], provider_user_id=cv['provider_user_id']) if connection is None: after_this_request(_commit) connection = _datastore.create_connection(**cv) msg = ('Connection established to %s' % provider.name, 'success') connection_created.send(current_app._get_current_object(), user=current_user._get_current_object(), connection=connection) else: msg = ('A connection is already established with %s ' 'to your account' % provider.name, 'notice') connection_failed.send(current_app._get_current_object(), user=current_user._get_current_object()) redirect_url = session.pop(config_value('POST_OAUTH_CONNECT_SESSION_KEY'), get_url(config_value('CONNECT_ALLOW_VIEW'))) do_flash(*msg) return redirect(redirect_url)
[ "def", "connect_handler", "(", "cv", ",", "provider", ")", ":", "cv", ".", "setdefault", "(", "'user_id'", ",", "current_user", ".", "get_id", "(", ")", ")", "connection", "=", "_datastore", ".", "find_connection", "(", "provider_id", "=", "cv", "[", "'provider_id'", "]", ",", "provider_user_id", "=", "cv", "[", "'provider_user_id'", "]", ")", "if", "connection", "is", "None", ":", "after_this_request", "(", "_commit", ")", "connection", "=", "_datastore", ".", "create_connection", "(", "*", "*", "cv", ")", "msg", "=", "(", "'Connection established to %s'", "%", "provider", ".", "name", ",", "'success'", ")", "connection_created", ".", "send", "(", "current_app", ".", "_get_current_object", "(", ")", ",", "user", "=", "current_user", ".", "_get_current_object", "(", ")", ",", "connection", "=", "connection", ")", "else", ":", "msg", "=", "(", "'A connection is already established with %s '", "'to your account'", "%", "provider", ".", "name", ",", "'notice'", ")", "connection_failed", ".", "send", "(", "current_app", ".", "_get_current_object", "(", ")", ",", "user", "=", "current_user", ".", "_get_current_object", "(", ")", ")", "redirect_url", "=", "session", ".", "pop", "(", "config_value", "(", "'POST_OAUTH_CONNECT_SESSION_KEY'", ")", ",", "get_url", "(", "config_value", "(", "'CONNECT_ALLOW_VIEW'", ")", ")", ")", "do_flash", "(", "*", "msg", ")", "return", "redirect", "(", "redirect_url", ")" ]
Shared method to handle the connection process :param connection_values: A dictionary containing the connection values :param provider_id: The provider ID the connection shoudl be made to
[ "Shared", "method", "to", "handle", "the", "connection", "process" ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/views.py#L123-L150
mattupstate/flask-social
flask_social/views.py
login_handler
def login_handler(response, provider, query): """Shared method to handle the signin process""" connection = _datastore.find_connection(**query) if connection: after_this_request(_commit) token_pair = get_token_pair_from_oauth_response(provider, response) if (token_pair['access_token'] != connection.access_token or token_pair['secret'] != connection.secret): connection.access_token = token_pair['access_token'] connection.secret = token_pair['secret'] _datastore.put(connection) user = connection.user login_user(user) key = _social.post_oauth_login_session_key redirect_url = session.pop(key, get_post_login_redirect()) login_completed.send(current_app._get_current_object(), provider=provider, user=user) return redirect(redirect_url) login_failed.send(current_app._get_current_object(), provider=provider, oauth_response=response) next = get_url(_security.login_manager.login_view) msg = '%s account not associated with an existing user' % provider.name do_flash(msg, 'error') return redirect(next)
python
def login_handler(response, provider, query): """Shared method to handle the signin process""" connection = _datastore.find_connection(**query) if connection: after_this_request(_commit) token_pair = get_token_pair_from_oauth_response(provider, response) if (token_pair['access_token'] != connection.access_token or token_pair['secret'] != connection.secret): connection.access_token = token_pair['access_token'] connection.secret = token_pair['secret'] _datastore.put(connection) user = connection.user login_user(user) key = _social.post_oauth_login_session_key redirect_url = session.pop(key, get_post_login_redirect()) login_completed.send(current_app._get_current_object(), provider=provider, user=user) return redirect(redirect_url) login_failed.send(current_app._get_current_object(), provider=provider, oauth_response=response) next = get_url(_security.login_manager.login_view) msg = '%s account not associated with an existing user' % provider.name do_flash(msg, 'error') return redirect(next)
[ "def", "login_handler", "(", "response", ",", "provider", ",", "query", ")", ":", "connection", "=", "_datastore", ".", "find_connection", "(", "*", "*", "query", ")", "if", "connection", ":", "after_this_request", "(", "_commit", ")", "token_pair", "=", "get_token_pair_from_oauth_response", "(", "provider", ",", "response", ")", "if", "(", "token_pair", "[", "'access_token'", "]", "!=", "connection", ".", "access_token", "or", "token_pair", "[", "'secret'", "]", "!=", "connection", ".", "secret", ")", ":", "connection", ".", "access_token", "=", "token_pair", "[", "'access_token'", "]", "connection", ".", "secret", "=", "token_pair", "[", "'secret'", "]", "_datastore", ".", "put", "(", "connection", ")", "user", "=", "connection", ".", "user", "login_user", "(", "user", ")", "key", "=", "_social", ".", "post_oauth_login_session_key", "redirect_url", "=", "session", ".", "pop", "(", "key", ",", "get_post_login_redirect", "(", ")", ")", "login_completed", ".", "send", "(", "current_app", ".", "_get_current_object", "(", ")", ",", "provider", "=", "provider", ",", "user", "=", "user", ")", "return", "redirect", "(", "redirect_url", ")", "login_failed", ".", "send", "(", "current_app", ".", "_get_current_object", "(", ")", ",", "provider", "=", "provider", ",", "oauth_response", "=", "response", ")", "next", "=", "get_url", "(", "_security", ".", "login_manager", ".", "login_view", ")", "msg", "=", "'%s account not associated with an existing user'", "%", "provider", ".", "name", "do_flash", "(", "msg", ",", "'error'", ")", "return", "redirect", "(", "next", ")" ]
Shared method to handle the signin process
[ "Shared", "method", "to", "handle", "the", "signin", "process" ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/views.py#L169-L199
mattupstate/flask-social
flask_social/core.py
Social.init_app
def init_app(self, app, datastore=None): """Initialize the application with the Social extension :param app: The Flask application :param datastore: Connection datastore instance """ datastore = datastore or self.datastore for key, value in default_config.items(): app.config.setdefault(key, value) providers = dict() for key, config in app.config.items(): if not key.startswith('SOCIAL_') or config is None or key in default_config: continue suffix = key.lower().replace('social_', '') default_module_name = 'flask_social.providers.%s' % suffix module_name = config.get('module', default_module_name) module = import_module(module_name) config = update_recursive(module.config, config) providers[config['id']] = OAuthRemoteApp(**config) providers[config['id']].tokengetter(_get_token) state = _get_state(app, datastore, providers) app.register_blueprint(create_blueprint(state, __name__)) app.extensions['social'] = state return state
python
def init_app(self, app, datastore=None): """Initialize the application with the Social extension :param app: The Flask application :param datastore: Connection datastore instance """ datastore = datastore or self.datastore for key, value in default_config.items(): app.config.setdefault(key, value) providers = dict() for key, config in app.config.items(): if not key.startswith('SOCIAL_') or config is None or key in default_config: continue suffix = key.lower().replace('social_', '') default_module_name = 'flask_social.providers.%s' % suffix module_name = config.get('module', default_module_name) module = import_module(module_name) config = update_recursive(module.config, config) providers[config['id']] = OAuthRemoteApp(**config) providers[config['id']].tokengetter(_get_token) state = _get_state(app, datastore, providers) app.register_blueprint(create_blueprint(state, __name__)) app.extensions['social'] = state return state
[ "def", "init_app", "(", "self", ",", "app", ",", "datastore", "=", "None", ")", ":", "datastore", "=", "datastore", "or", "self", ".", "datastore", "for", "key", ",", "value", "in", "default_config", ".", "items", "(", ")", ":", "app", ".", "config", ".", "setdefault", "(", "key", ",", "value", ")", "providers", "=", "dict", "(", ")", "for", "key", ",", "config", "in", "app", ".", "config", ".", "items", "(", ")", ":", "if", "not", "key", ".", "startswith", "(", "'SOCIAL_'", ")", "or", "config", "is", "None", "or", "key", "in", "default_config", ":", "continue", "suffix", "=", "key", ".", "lower", "(", ")", ".", "replace", "(", "'social_'", ",", "''", ")", "default_module_name", "=", "'flask_social.providers.%s'", "%", "suffix", "module_name", "=", "config", ".", "get", "(", "'module'", ",", "default_module_name", ")", "module", "=", "import_module", "(", "module_name", ")", "config", "=", "update_recursive", "(", "module", ".", "config", ",", "config", ")", "providers", "[", "config", "[", "'id'", "]", "]", "=", "OAuthRemoteApp", "(", "*", "*", "config", ")", "providers", "[", "config", "[", "'id'", "]", "]", ".", "tokengetter", "(", "_get_token", ")", "state", "=", "_get_state", "(", "app", ",", "datastore", ",", "providers", ")", "app", ".", "register_blueprint", "(", "create_blueprint", "(", "state", ",", "__name__", ")", ")", "app", ".", "extensions", "[", "'social'", "]", "=", "state", "return", "state" ]
Initialize the application with the Social extension :param app: The Flask application :param datastore: Connection datastore instance
[ "Initialize", "the", "application", "with", "the", "Social", "extension" ]
train
https://github.com/mattupstate/flask-social/blob/36f5790c8fb6d4d9a5120d099419ba30fd73e897/flask_social/core.py#L108-L140
eugene-eeo/mailthon
mailthon/helpers.py
guess
def guess(filename, fallback='application/octet-stream'): """ Using the mimetypes library, guess the mimetype and encoding for a given *filename*. If the mimetype cannot be guessed, *fallback* is assumed instead. :param filename: Filename- can be absolute path. :param fallback: A fallback mimetype. """ guessed, encoding = mimetypes.guess_type(filename, strict=False) if guessed is None: return fallback, encoding return guessed, encoding
python
def guess(filename, fallback='application/octet-stream'): """ Using the mimetypes library, guess the mimetype and encoding for a given *filename*. If the mimetype cannot be guessed, *fallback* is assumed instead. :param filename: Filename- can be absolute path. :param fallback: A fallback mimetype. """ guessed, encoding = mimetypes.guess_type(filename, strict=False) if guessed is None: return fallback, encoding return guessed, encoding
[ "def", "guess", "(", "filename", ",", "fallback", "=", "'application/octet-stream'", ")", ":", "guessed", ",", "encoding", "=", "mimetypes", ".", "guess_type", "(", "filename", ",", "strict", "=", "False", ")", "if", "guessed", "is", "None", ":", "return", "fallback", ",", "encoding", "return", "guessed", ",", "encoding" ]
Using the mimetypes library, guess the mimetype and encoding for a given *filename*. If the mimetype cannot be guessed, *fallback* is assumed instead. :param filename: Filename- can be absolute path. :param fallback: A fallback mimetype.
[ "Using", "the", "mimetypes", "library", "guess", "the", "mimetype", "and", "encoding", "for", "a", "given", "*", "filename", "*", ".", "If", "the", "mimetype", "cannot", "be", "guessed", "*", "fallback", "*", "is", "assumed", "instead", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/helpers.py#L23-L35
eugene-eeo/mailthon
mailthon/helpers.py
format_addresses
def format_addresses(addrs): """ Given an iterable of addresses or name-address tuples *addrs*, return a header value that joins all of them together with a space and a comma. """ return ', '.join( formataddr(item) if isinstance(item, tuple) else item for item in addrs )
python
def format_addresses(addrs): """ Given an iterable of addresses or name-address tuples *addrs*, return a header value that joins all of them together with a space and a comma. """ return ', '.join( formataddr(item) if isinstance(item, tuple) else item for item in addrs )
[ "def", "format_addresses", "(", "addrs", ")", ":", "return", "', '", ".", "join", "(", "formataddr", "(", "item", ")", "if", "isinstance", "(", "item", ",", "tuple", ")", "else", "item", "for", "item", "in", "addrs", ")" ]
Given an iterable of addresses or name-address tuples *addrs*, return a header value that joins all of them together with a space and a comma.
[ "Given", "an", "iterable", "of", "addresses", "or", "name", "-", "address", "tuples", "*", "addrs", "*", "return", "a", "header", "value", "that", "joins", "all", "of", "them", "together", "with", "a", "space", "and", "a", "comma", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/helpers.py#L38-L47
eugene-eeo/mailthon
mailthon/helpers.py
stringify_address
def stringify_address(addr, encoding='utf-8'): """ Given an email address *addr*, try to encode it with ASCII. If it's not possible, encode the *local-part* with the *encoding* and the *domain* with IDNA. The result is a unicode string with the domain encoded as idna. """ if isinstance(addr, bytes_type): return addr try: addr = addr.encode('ascii') except UnicodeEncodeError: if '@' in addr: localpart, domain = addr.split('@', 1) addr = b'@'.join([ localpart.encode(encoding), domain.encode('idna'), ]) else: addr = addr.encode(encoding) return addr.decode('utf-8')
python
def stringify_address(addr, encoding='utf-8'): """ Given an email address *addr*, try to encode it with ASCII. If it's not possible, encode the *local-part* with the *encoding* and the *domain* with IDNA. The result is a unicode string with the domain encoded as idna. """ if isinstance(addr, bytes_type): return addr try: addr = addr.encode('ascii') except UnicodeEncodeError: if '@' in addr: localpart, domain = addr.split('@', 1) addr = b'@'.join([ localpart.encode(encoding), domain.encode('idna'), ]) else: addr = addr.encode(encoding) return addr.decode('utf-8')
[ "def", "stringify_address", "(", "addr", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "isinstance", "(", "addr", ",", "bytes_type", ")", ":", "return", "addr", "try", ":", "addr", "=", "addr", ".", "encode", "(", "'ascii'", ")", "except", "UnicodeEncodeError", ":", "if", "'@'", "in", "addr", ":", "localpart", ",", "domain", "=", "addr", ".", "split", "(", "'@'", ",", "1", ")", "addr", "=", "b'@'", ".", "join", "(", "[", "localpart", ".", "encode", "(", "encoding", ")", ",", "domain", ".", "encode", "(", "'idna'", ")", ",", "]", ")", "else", ":", "addr", "=", "addr", ".", "encode", "(", "encoding", ")", "return", "addr", ".", "decode", "(", "'utf-8'", ")" ]
Given an email address *addr*, try to encode it with ASCII. If it's not possible, encode the *local-part* with the *encoding* and the *domain* with IDNA. The result is a unicode string with the domain encoded as idna.
[ "Given", "an", "email", "address", "*", "addr", "*", "try", "to", "encode", "it", "with", "ASCII", ".", "If", "it", "s", "not", "possible", "encode", "the", "*", "local", "-", "part", "*", "with", "the", "*", "encoding", "*", "and", "the", "*", "domain", "*", "with", "IDNA", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/helpers.py#L50-L73
eugene-eeo/mailthon
mailthon/api.py
email
def email(sender=None, receivers=(), cc=(), bcc=(), subject=None, content=None, encoding='utf8', attachments=()): """ Creates a Collection object with a HTML *content*, and *attachments*. :param content: HTML content. :param encoding: Encoding of the email. :param attachments: List of filenames to attach to the email. """ enclosure = [HTML(content, encoding)] enclosure.extend(Attachment(k) for k in attachments) return Collection( *enclosure, headers=[ headers.subject(subject), headers.sender(sender), headers.to(*receivers), headers.cc(*cc), headers.bcc(*bcc), headers.date(), headers.message_id(), ] )
python
def email(sender=None, receivers=(), cc=(), bcc=(), subject=None, content=None, encoding='utf8', attachments=()): """ Creates a Collection object with a HTML *content*, and *attachments*. :param content: HTML content. :param encoding: Encoding of the email. :param attachments: List of filenames to attach to the email. """ enclosure = [HTML(content, encoding)] enclosure.extend(Attachment(k) for k in attachments) return Collection( *enclosure, headers=[ headers.subject(subject), headers.sender(sender), headers.to(*receivers), headers.cc(*cc), headers.bcc(*bcc), headers.date(), headers.message_id(), ] )
[ "def", "email", "(", "sender", "=", "None", ",", "receivers", "=", "(", ")", ",", "cc", "=", "(", ")", ",", "bcc", "=", "(", ")", ",", "subject", "=", "None", ",", "content", "=", "None", ",", "encoding", "=", "'utf8'", ",", "attachments", "=", "(", ")", ")", ":", "enclosure", "=", "[", "HTML", "(", "content", ",", "encoding", ")", "]", "enclosure", ".", "extend", "(", "Attachment", "(", "k", ")", "for", "k", "in", "attachments", ")", "return", "Collection", "(", "*", "enclosure", ",", "headers", "=", "[", "headers", ".", "subject", "(", "subject", ")", ",", "headers", ".", "sender", "(", "sender", ")", ",", "headers", ".", "to", "(", "*", "receivers", ")", ",", "headers", ".", "cc", "(", "*", "cc", ")", ",", "headers", ".", "bcc", "(", "*", "bcc", ")", ",", "headers", ".", "date", "(", ")", ",", "headers", ".", "message_id", "(", ")", ",", "]", ")" ]
Creates a Collection object with a HTML *content*, and *attachments*. :param content: HTML content. :param encoding: Encoding of the email. :param attachments: List of filenames to attach to the email.
[ "Creates", "a", "Collection", "object", "with", "a", "HTML", "*", "content", "*", "and", "*", "attachments", "*", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/api.py#L18-L43
eugene-eeo/mailthon
mailthon/api.py
postman
def postman(host, port=587, auth=(None, None), force_tls=False, options=None): """ Creates a Postman object with TLS and Auth middleware. TLS is placed before authentication because usually authentication happens and is accepted only after TLS is enabled. :param auth: Tuple of (username, password) to be used to ``login`` to the server. :param force_tls: Whether TLS should be forced. :param options: Dictionary of keyword arguments to be used when the SMTP class is called. """ return Postman( host=host, port=port, middlewares=[ middleware.tls(force=force_tls), middleware.auth(*auth), ], **options )
python
def postman(host, port=587, auth=(None, None), force_tls=False, options=None): """ Creates a Postman object with TLS and Auth middleware. TLS is placed before authentication because usually authentication happens and is accepted only after TLS is enabled. :param auth: Tuple of (username, password) to be used to ``login`` to the server. :param force_tls: Whether TLS should be forced. :param options: Dictionary of keyword arguments to be used when the SMTP class is called. """ return Postman( host=host, port=port, middlewares=[ middleware.tls(force=force_tls), middleware.auth(*auth), ], **options )
[ "def", "postman", "(", "host", ",", "port", "=", "587", ",", "auth", "=", "(", "None", ",", "None", ")", ",", "force_tls", "=", "False", ",", "options", "=", "None", ")", ":", "return", "Postman", "(", "host", "=", "host", ",", "port", "=", "port", ",", "middlewares", "=", "[", "middleware", ".", "tls", "(", "force", "=", "force_tls", ")", ",", "middleware", ".", "auth", "(", "*", "auth", ")", ",", "]", ",", "*", "*", "options", ")" ]
Creates a Postman object with TLS and Auth middleware. TLS is placed before authentication because usually authentication happens and is accepted only after TLS is enabled. :param auth: Tuple of (username, password) to be used to ``login`` to the server. :param force_tls: Whether TLS should be forced. :param options: Dictionary of keyword arguments to be used when the SMTP class is called.
[ "Creates", "a", "Postman", "object", "with", "TLS", "and", "Auth", "middleware", ".", "TLS", "is", "placed", "before", "authentication", "because", "usually", "authentication", "happens", "and", "is", "accepted", "only", "after", "TLS", "is", "enabled", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/api.py#L46-L68
eugene-eeo/mailthon
mailthon/enclosure.py
Enclosure.mime
def mime(self): """ Returns the finalised mime object, after applying the internal headers. Usually this is not to be overriden. """ mime = self.mime_object() self.headers.prepare(mime) return mime
python
def mime(self): """ Returns the finalised mime object, after applying the internal headers. Usually this is not to be overriden. """ mime = self.mime_object() self.headers.prepare(mime) return mime
[ "def", "mime", "(", "self", ")", ":", "mime", "=", "self", ".", "mime_object", "(", ")", "self", ".", "headers", ".", "prepare", "(", "mime", ")", "return", "mime" ]
Returns the finalised mime object, after applying the internal headers. Usually this is not to be overriden.
[ "Returns", "the", "finalised", "mime", "object", "after", "applying", "the", "internal", "headers", ".", "Usually", "this", "is", "not", "to", "be", "overriden", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/enclosure.py#L56-L64
eugene-eeo/mailthon
mailthon/postman.py
Session.send
def send(self, envelope): """ Send an *envelope* which may be an envelope or an enclosure-like object, see :class:`~mailthon.enclosure.Enclosure` and :class:`~mailthon.envelope.Envelope`, and returns a :class:`~mailthon.response.SendmailResponse` object. """ rejected = self.conn.sendmail( stringify_address(envelope.sender), [stringify_address(k) for k in envelope.receivers], envelope.string(), ) status_code, reason = self.conn.noop() return SendmailResponse( status_code, reason, rejected, )
python
def send(self, envelope): """ Send an *envelope* which may be an envelope or an enclosure-like object, see :class:`~mailthon.enclosure.Enclosure` and :class:`~mailthon.envelope.Envelope`, and returns a :class:`~mailthon.response.SendmailResponse` object. """ rejected = self.conn.sendmail( stringify_address(envelope.sender), [stringify_address(k) for k in envelope.receivers], envelope.string(), ) status_code, reason = self.conn.noop() return SendmailResponse( status_code, reason, rejected, )
[ "def", "send", "(", "self", ",", "envelope", ")", ":", "rejected", "=", "self", ".", "conn", ".", "sendmail", "(", "stringify_address", "(", "envelope", ".", "sender", ")", ",", "[", "stringify_address", "(", "k", ")", "for", "k", "in", "envelope", ".", "receivers", "]", ",", "envelope", ".", "string", "(", ")", ",", ")", "status_code", ",", "reason", "=", "self", ".", "conn", ".", "noop", "(", ")", "return", "SendmailResponse", "(", "status_code", ",", "reason", ",", "rejected", ",", ")" ]
Send an *envelope* which may be an envelope or an enclosure-like object, see :class:`~mailthon.enclosure.Enclosure` and :class:`~mailthon.envelope.Envelope`, and returns a :class:`~mailthon.response.SendmailResponse` object.
[ "Send", "an", "*", "envelope", "*", "which", "may", "be", "an", "envelope", "or", "an", "enclosure", "-", "like", "object", "see", ":", "class", ":", "~mailthon", ".", "enclosure", ".", "Enclosure", "and", ":", "class", ":", "~mailthon", ".", "envelope", ".", "Envelope", "and", "returns", "a", ":", "class", ":", "~mailthon", ".", "response", ".", "SendmailResponse", "object", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/postman.py#L37-L56
eugene-eeo/mailthon
mailthon/postman.py
Postman.connection
def connection(self): """ A context manager that returns a connection to the server using some *session*. """ conn = self.session(**self.options) try: for item in self.middlewares: item(conn) yield conn finally: conn.teardown()
python
def connection(self): """ A context manager that returns a connection to the server using some *session*. """ conn = self.session(**self.options) try: for item in self.middlewares: item(conn) yield conn finally: conn.teardown()
[ "def", "connection", "(", "self", ")", ":", "conn", "=", "self", ".", "session", "(", "*", "*", "self", ".", "options", ")", "try", ":", "for", "item", "in", "self", ".", "middlewares", ":", "item", "(", "conn", ")", "yield", "conn", "finally", ":", "conn", ".", "teardown", "(", ")" ]
A context manager that returns a connection to the server using some *session*.
[ "A", "context", "manager", "that", "returns", "a", "connection", "to", "the", "server", "using", "some", "*", "session", "*", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/postman.py#L87-L98
eugene-eeo/mailthon
mailthon/headers.py
Headers.sender
def sender(self): """ Returns the sender, respecting the Resent-* headers. In any case, prefer Sender over From, meaning that if Sender is present then From is ignored, as per the RFC. """ to_fetch = ( ['Resent-Sender', 'Resent-From'] if self.resent else ['Sender', 'From'] ) for item in to_fetch: if item in self: _, addr = getaddresses([self[item]])[0] return addr
python
def sender(self): """ Returns the sender, respecting the Resent-* headers. In any case, prefer Sender over From, meaning that if Sender is present then From is ignored, as per the RFC. """ to_fetch = ( ['Resent-Sender', 'Resent-From'] if self.resent else ['Sender', 'From'] ) for item in to_fetch: if item in self: _, addr = getaddresses([self[item]])[0] return addr
[ "def", "sender", "(", "self", ")", ":", "to_fetch", "=", "(", "[", "'Resent-Sender'", ",", "'Resent-From'", "]", "if", "self", ".", "resent", "else", "[", "'Sender'", ",", "'From'", "]", ")", "for", "item", "in", "to_fetch", ":", "if", "item", "in", "self", ":", "_", ",", "addr", "=", "getaddresses", "(", "[", "self", "[", "item", "]", "]", ")", "[", "0", "]", "return", "addr" ]
Returns the sender, respecting the Resent-* headers. In any case, prefer Sender over From, meaning that if Sender is present then From is ignored, as per the RFC.
[ "Returns", "the", "sender", "respecting", "the", "Resent", "-", "*", "headers", ".", "In", "any", "case", "prefer", "Sender", "over", "From", "meaning", "that", "if", "Sender", "is", "present", "then", "From", "is", "ignored", "as", "per", "the", "RFC", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/headers.py#L42-L56
eugene-eeo/mailthon
mailthon/headers.py
Headers.receivers
def receivers(self): """ Returns a list of receivers, obtained from the To, Cc, and Bcc headers, respecting the Resent-* headers if the email was resent. """ attrs = ( ['Resent-To', 'Resent-Cc', 'Resent-Bcc'] if self.resent else ['To', 'Cc', 'Bcc'] ) addrs = (v for v in (self.get(k) for k in attrs) if v) return [addr for _, addr in getaddresses(addrs)]
python
def receivers(self): """ Returns a list of receivers, obtained from the To, Cc, and Bcc headers, respecting the Resent-* headers if the email was resent. """ attrs = ( ['Resent-To', 'Resent-Cc', 'Resent-Bcc'] if self.resent else ['To', 'Cc', 'Bcc'] ) addrs = (v for v in (self.get(k) for k in attrs) if v) return [addr for _, addr in getaddresses(addrs)]
[ "def", "receivers", "(", "self", ")", ":", "attrs", "=", "(", "[", "'Resent-To'", ",", "'Resent-Cc'", ",", "'Resent-Bcc'", "]", "if", "self", ".", "resent", "else", "[", "'To'", ",", "'Cc'", ",", "'Bcc'", "]", ")", "addrs", "=", "(", "v", "for", "v", "in", "(", "self", ".", "get", "(", "k", ")", "for", "k", "in", "attrs", ")", "if", "v", ")", "return", "[", "addr", "for", "_", ",", "addr", "in", "getaddresses", "(", "addrs", ")", "]" ]
Returns a list of receivers, obtained from the To, Cc, and Bcc headers, respecting the Resent-* headers if the email was resent.
[ "Returns", "a", "list", "of", "receivers", "obtained", "from", "the", "To", "Cc", "and", "Bcc", "headers", "respecting", "the", "Resent", "-", "*", "headers", "if", "the", "email", "was", "resent", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/headers.py#L59-L70
eugene-eeo/mailthon
mailthon/headers.py
Headers.prepare
def prepare(self, mime): """ Prepares a MIME object by applying the headers to the *mime* object. Ignores any Bcc or Resent-Bcc headers. """ for key in self: if key == 'Bcc' or key == 'Resent-Bcc': continue del mime[key] # Python 3.* email's compatibility layer will handle # unicode field values in proper way but Python 2 # won't (it will encode not only additional field # values but also all header values) parsed_header, additional_fields = parse_header( self[key] if IS_PY3 else self[key].encode("utf-8") ) mime.add_header(key, parsed_header, **additional_fields)
python
def prepare(self, mime): """ Prepares a MIME object by applying the headers to the *mime* object. Ignores any Bcc or Resent-Bcc headers. """ for key in self: if key == 'Bcc' or key == 'Resent-Bcc': continue del mime[key] # Python 3.* email's compatibility layer will handle # unicode field values in proper way but Python 2 # won't (it will encode not only additional field # values but also all header values) parsed_header, additional_fields = parse_header( self[key] if IS_PY3 else self[key].encode("utf-8") ) mime.add_header(key, parsed_header, **additional_fields)
[ "def", "prepare", "(", "self", ",", "mime", ")", ":", "for", "key", "in", "self", ":", "if", "key", "==", "'Bcc'", "or", "key", "==", "'Resent-Bcc'", ":", "continue", "del", "mime", "[", "key", "]", "# Python 3.* email's compatibility layer will handle", "# unicode field values in proper way but Python 2", "# won't (it will encode not only additional field", "# values but also all header values)", "parsed_header", ",", "additional_fields", "=", "parse_header", "(", "self", "[", "key", "]", "if", "IS_PY3", "else", "self", "[", "key", "]", ".", "encode", "(", "\"utf-8\"", ")", ")", "mime", ".", "add_header", "(", "key", ",", "parsed_header", ",", "*", "*", "additional_fields", ")" ]
Prepares a MIME object by applying the headers to the *mime* object. Ignores any Bcc or Resent-Bcc headers.
[ "Prepares", "a", "MIME", "object", "by", "applying", "the", "headers", "to", "the", "*", "mime", "*", "object", ".", "Ignores", "any", "Bcc", "or", "Resent", "-", "Bcc", "headers", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/headers.py#L72-L90
eugene-eeo/mailthon
mailthon/middleware.py
tls
def tls(force=False): """ Middleware implementing TLS for SMTP connections. By default this is not forced- TLS is only used if STARTTLS is available. If the *force* parameter is set to True, it will not query the server for TLS features before upgrading to TLS. """ def middleware(conn): if force or conn.has_extn('STARTTLS'): conn.starttls() conn.ehlo() return middleware
python
def tls(force=False): """ Middleware implementing TLS for SMTP connections. By default this is not forced- TLS is only used if STARTTLS is available. If the *force* parameter is set to True, it will not query the server for TLS features before upgrading to TLS. """ def middleware(conn): if force or conn.has_extn('STARTTLS'): conn.starttls() conn.ehlo() return middleware
[ "def", "tls", "(", "force", "=", "False", ")", ":", "def", "middleware", "(", "conn", ")", ":", "if", "force", "or", "conn", ".", "has_extn", "(", "'STARTTLS'", ")", ":", "conn", ".", "starttls", "(", ")", "conn", ".", "ehlo", "(", ")", "return", "middleware" ]
Middleware implementing TLS for SMTP connections. By default this is not forced- TLS is only used if STARTTLS is available. If the *force* parameter is set to True, it will not query the server for TLS features before upgrading to TLS.
[ "Middleware", "implementing", "TLS", "for", "SMTP", "connections", ".", "By", "default", "this", "is", "not", "forced", "-", "TLS", "is", "only", "used", "if", "STARTTLS", "is", "available", ".", "If", "the", "*", "force", "*", "parameter", "is", "set", "to", "True", "it", "will", "not", "query", "the", "server", "for", "TLS", "features", "before", "upgrading", "to", "TLS", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/middleware.py#L14-L26
eugene-eeo/mailthon
mailthon/middleware.py
auth
def auth(username, password): """ Middleware implementing authentication via LOGIN. Most of the time this middleware needs to be placed *after* TLS. :param username: Username to login with. :param password: Password of the user. """ def middleware(conn): conn.login(username, password) return middleware
python
def auth(username, password): """ Middleware implementing authentication via LOGIN. Most of the time this middleware needs to be placed *after* TLS. :param username: Username to login with. :param password: Password of the user. """ def middleware(conn): conn.login(username, password) return middleware
[ "def", "auth", "(", "username", ",", "password", ")", ":", "def", "middleware", "(", "conn", ")", ":", "conn", ".", "login", "(", "username", ",", "password", ")", "return", "middleware" ]
Middleware implementing authentication via LOGIN. Most of the time this middleware needs to be placed *after* TLS. :param username: Username to login with. :param password: Password of the user.
[ "Middleware", "implementing", "authentication", "via", "LOGIN", ".", "Most", "of", "the", "time", "this", "middleware", "needs", "to", "be", "placed", "*", "after", "*", "TLS", "." ]
train
https://github.com/eugene-eeo/mailthon/blob/e3d5aef62505acb4edbc33e3378a04951c3199cb/mailthon/middleware.py#L29-L40
ramses-tech/ramses
ramses/models.py
get_existing_model
def get_existing_model(model_name): """ Try to find existing model class named `model_name`. :param model_name: String name of the model class. """ try: model_cls = engine.get_document_cls(model_name) log.debug('Model `{}` already exists. Using existing one'.format( model_name)) return model_cls except ValueError: log.debug('Model `{}` does not exist'.format(model_name))
python
def get_existing_model(model_name): """ Try to find existing model class named `model_name`. :param model_name: String name of the model class. """ try: model_cls = engine.get_document_cls(model_name) log.debug('Model `{}` already exists. Using existing one'.format( model_name)) return model_cls except ValueError: log.debug('Model `{}` does not exist'.format(model_name))
[ "def", "get_existing_model", "(", "model_name", ")", ":", "try", ":", "model_cls", "=", "engine", ".", "get_document_cls", "(", "model_name", ")", "log", ".", "debug", "(", "'Model `{}` already exists. Using existing one'", ".", "format", "(", "model_name", ")", ")", "return", "model_cls", "except", "ValueError", ":", "log", ".", "debug", "(", "'Model `{}` does not exist'", ".", "format", "(", "model_name", ")", ")" ]
Try to find existing model class named `model_name`. :param model_name: String name of the model class.
[ "Try", "to", "find", "existing", "model", "class", "named", "model_name", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/models.py#L45-L56
ramses-tech/ramses
ramses/models.py
prepare_relationship
def prepare_relationship(config, model_name, raml_resource): """ Create referenced model if it doesn't exist. When preparing a relationship, we check to see if the model that will be referenced already exists. If not, it is created so that it will be possible to use it in a relationship. Thus the first usage of this model in RAML file must provide its schema in POST method resource body schema. :param model_name: Name of model which should be generated. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which :model_name: will be defined. """ if get_existing_model(model_name) is None: plural_route = '/' + pluralize(model_name.lower()) route = '/' + model_name.lower() for res in raml_resource.root.resources: if res.method.upper() != 'POST': continue if res.path.endswith(plural_route) or res.path.endswith(route): break else: raise ValueError('Model `{}` used in relationship is not ' 'defined'.format(model_name)) setup_data_model(config, res, model_name)
python
def prepare_relationship(config, model_name, raml_resource): """ Create referenced model if it doesn't exist. When preparing a relationship, we check to see if the model that will be referenced already exists. If not, it is created so that it will be possible to use it in a relationship. Thus the first usage of this model in RAML file must provide its schema in POST method resource body schema. :param model_name: Name of model which should be generated. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which :model_name: will be defined. """ if get_existing_model(model_name) is None: plural_route = '/' + pluralize(model_name.lower()) route = '/' + model_name.lower() for res in raml_resource.root.resources: if res.method.upper() != 'POST': continue if res.path.endswith(plural_route) or res.path.endswith(route): break else: raise ValueError('Model `{}` used in relationship is not ' 'defined'.format(model_name)) setup_data_model(config, res, model_name)
[ "def", "prepare_relationship", "(", "config", ",", "model_name", ",", "raml_resource", ")", ":", "if", "get_existing_model", "(", "model_name", ")", "is", "None", ":", "plural_route", "=", "'/'", "+", "pluralize", "(", "model_name", ".", "lower", "(", ")", ")", "route", "=", "'/'", "+", "model_name", ".", "lower", "(", ")", "for", "res", "in", "raml_resource", ".", "root", ".", "resources", ":", "if", "res", ".", "method", ".", "upper", "(", ")", "!=", "'POST'", ":", "continue", "if", "res", ".", "path", ".", "endswith", "(", "plural_route", ")", "or", "res", ".", "path", ".", "endswith", "(", "route", ")", ":", "break", "else", ":", "raise", "ValueError", "(", "'Model `{}` used in relationship is not '", "'defined'", ".", "format", "(", "model_name", ")", ")", "setup_data_model", "(", "config", ",", "res", ",", "model_name", ")" ]
Create referenced model if it doesn't exist. When preparing a relationship, we check to see if the model that will be referenced already exists. If not, it is created so that it will be possible to use it in a relationship. Thus the first usage of this model in RAML file must provide its schema in POST method resource body schema. :param model_name: Name of model which should be generated. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which :model_name: will be defined.
[ "Create", "referenced", "model", "if", "it", "doesn", "t", "exist", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/models.py#L59-L82
ramses-tech/ramses
ramses/models.py
generate_model_cls
def generate_model_cls(config, schema, model_name, raml_resource, es_based=True): """ Generate model class. Engine DB field types are determined using `type_fields` and only those types may be used. :param schema: Model schema dict parsed from RAML. :param model_name: String that is used as new model's name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param es_based: Boolean indicating if generated model should be a subclass of Elasticsearch-based document class or not. It True, ESBaseDocument is used; BaseDocument is used otherwise. Defaults to True. """ from nefertari.authentication.models import AuthModelMethodsMixin base_cls = engine.ESBaseDocument if es_based else engine.BaseDocument model_name = str(model_name) metaclass = type(base_cls) auth_model = schema.get('_auth_model', False) bases = [] if config.registry.database_acls: from nefertari_guards import engine as guards_engine bases.append(guards_engine.DocumentACLMixin) if auth_model: bases.append(AuthModelMethodsMixin) bases.append(base_cls) attrs = { '__tablename__': model_name.lower(), '_public_fields': schema.get('_public_fields') or [], '_auth_fields': schema.get('_auth_fields') or [], '_hidden_fields': schema.get('_hidden_fields') or [], '_nested_relationships': schema.get('_nested_relationships') or [], } if '_nesting_depth' in schema: attrs['_nesting_depth'] = schema.get('_nesting_depth') # Generate fields from properties properties = schema.get('properties', {}) for field_name, props in properties.items(): if field_name in attrs: continue db_settings = props.get('_db_settings') if db_settings is None: continue field_kwargs = db_settings.copy() field_kwargs['required'] = bool(field_kwargs.get('required')) for default_attr_key in ('default', 'onupdate'): value = field_kwargs.get(default_attr_key) if is_callable_tag(value): field_kwargs[default_attr_key] = resolve_to_callable(value) type_name = ( field_kwargs.pop('type', 'string') or 'string').lower() if type_name not in type_fields: raise ValueError('Unknown type: {}'.format(type_name)) field_cls = type_fields[type_name] if field_cls is engine.Relationship: prepare_relationship( config, field_kwargs['document'], raml_resource) if field_cls is engine.ForeignKeyField: key = 'ref_column_type' field_kwargs[key] = type_fields[field_kwargs[key]] if field_cls is engine.ListField: key = 'item_type' field_kwargs[key] = type_fields[field_kwargs[key]] attrs[field_name] = field_cls(**field_kwargs) # Update model definition with methods and variables defined in registry attrs.update(registry.mget(model_name)) # Generate new model class model_cls = metaclass(model_name, tuple(bases), attrs) setup_model_event_subscribers(config, model_cls, schema) setup_fields_processors(config, model_cls, schema) return model_cls, auth_model
python
def generate_model_cls(config, schema, model_name, raml_resource, es_based=True): """ Generate model class. Engine DB field types are determined using `type_fields` and only those types may be used. :param schema: Model schema dict parsed from RAML. :param model_name: String that is used as new model's name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param es_based: Boolean indicating if generated model should be a subclass of Elasticsearch-based document class or not. It True, ESBaseDocument is used; BaseDocument is used otherwise. Defaults to True. """ from nefertari.authentication.models import AuthModelMethodsMixin base_cls = engine.ESBaseDocument if es_based else engine.BaseDocument model_name = str(model_name) metaclass = type(base_cls) auth_model = schema.get('_auth_model', False) bases = [] if config.registry.database_acls: from nefertari_guards import engine as guards_engine bases.append(guards_engine.DocumentACLMixin) if auth_model: bases.append(AuthModelMethodsMixin) bases.append(base_cls) attrs = { '__tablename__': model_name.lower(), '_public_fields': schema.get('_public_fields') or [], '_auth_fields': schema.get('_auth_fields') or [], '_hidden_fields': schema.get('_hidden_fields') or [], '_nested_relationships': schema.get('_nested_relationships') or [], } if '_nesting_depth' in schema: attrs['_nesting_depth'] = schema.get('_nesting_depth') # Generate fields from properties properties = schema.get('properties', {}) for field_name, props in properties.items(): if field_name in attrs: continue db_settings = props.get('_db_settings') if db_settings is None: continue field_kwargs = db_settings.copy() field_kwargs['required'] = bool(field_kwargs.get('required')) for default_attr_key in ('default', 'onupdate'): value = field_kwargs.get(default_attr_key) if is_callable_tag(value): field_kwargs[default_attr_key] = resolve_to_callable(value) type_name = ( field_kwargs.pop('type', 'string') or 'string').lower() if type_name not in type_fields: raise ValueError('Unknown type: {}'.format(type_name)) field_cls = type_fields[type_name] if field_cls is engine.Relationship: prepare_relationship( config, field_kwargs['document'], raml_resource) if field_cls is engine.ForeignKeyField: key = 'ref_column_type' field_kwargs[key] = type_fields[field_kwargs[key]] if field_cls is engine.ListField: key = 'item_type' field_kwargs[key] = type_fields[field_kwargs[key]] attrs[field_name] = field_cls(**field_kwargs) # Update model definition with methods and variables defined in registry attrs.update(registry.mget(model_name)) # Generate new model class model_cls = metaclass(model_name, tuple(bases), attrs) setup_model_event_subscribers(config, model_cls, schema) setup_fields_processors(config, model_cls, schema) return model_cls, auth_model
[ "def", "generate_model_cls", "(", "config", ",", "schema", ",", "model_name", ",", "raml_resource", ",", "es_based", "=", "True", ")", ":", "from", "nefertari", ".", "authentication", ".", "models", "import", "AuthModelMethodsMixin", "base_cls", "=", "engine", ".", "ESBaseDocument", "if", "es_based", "else", "engine", ".", "BaseDocument", "model_name", "=", "str", "(", "model_name", ")", "metaclass", "=", "type", "(", "base_cls", ")", "auth_model", "=", "schema", ".", "get", "(", "'_auth_model'", ",", "False", ")", "bases", "=", "[", "]", "if", "config", ".", "registry", ".", "database_acls", ":", "from", "nefertari_guards", "import", "engine", "as", "guards_engine", "bases", ".", "append", "(", "guards_engine", ".", "DocumentACLMixin", ")", "if", "auth_model", ":", "bases", ".", "append", "(", "AuthModelMethodsMixin", ")", "bases", ".", "append", "(", "base_cls", ")", "attrs", "=", "{", "'__tablename__'", ":", "model_name", ".", "lower", "(", ")", ",", "'_public_fields'", ":", "schema", ".", "get", "(", "'_public_fields'", ")", "or", "[", "]", ",", "'_auth_fields'", ":", "schema", ".", "get", "(", "'_auth_fields'", ")", "or", "[", "]", ",", "'_hidden_fields'", ":", "schema", ".", "get", "(", "'_hidden_fields'", ")", "or", "[", "]", ",", "'_nested_relationships'", ":", "schema", ".", "get", "(", "'_nested_relationships'", ")", "or", "[", "]", ",", "}", "if", "'_nesting_depth'", "in", "schema", ":", "attrs", "[", "'_nesting_depth'", "]", "=", "schema", ".", "get", "(", "'_nesting_depth'", ")", "# Generate fields from properties", "properties", "=", "schema", ".", "get", "(", "'properties'", ",", "{", "}", ")", "for", "field_name", ",", "props", "in", "properties", ".", "items", "(", ")", ":", "if", "field_name", "in", "attrs", ":", "continue", "db_settings", "=", "props", ".", "get", "(", "'_db_settings'", ")", "if", "db_settings", "is", "None", ":", "continue", "field_kwargs", "=", "db_settings", ".", "copy", "(", ")", "field_kwargs", "[", "'required'", "]", "=", "bool", "(", "field_kwargs", ".", "get", "(", "'required'", ")", ")", "for", "default_attr_key", "in", "(", "'default'", ",", "'onupdate'", ")", ":", "value", "=", "field_kwargs", ".", "get", "(", "default_attr_key", ")", "if", "is_callable_tag", "(", "value", ")", ":", "field_kwargs", "[", "default_attr_key", "]", "=", "resolve_to_callable", "(", "value", ")", "type_name", "=", "(", "field_kwargs", ".", "pop", "(", "'type'", ",", "'string'", ")", "or", "'string'", ")", ".", "lower", "(", ")", "if", "type_name", "not", "in", "type_fields", ":", "raise", "ValueError", "(", "'Unknown type: {}'", ".", "format", "(", "type_name", ")", ")", "field_cls", "=", "type_fields", "[", "type_name", "]", "if", "field_cls", "is", "engine", ".", "Relationship", ":", "prepare_relationship", "(", "config", ",", "field_kwargs", "[", "'document'", "]", ",", "raml_resource", ")", "if", "field_cls", "is", "engine", ".", "ForeignKeyField", ":", "key", "=", "'ref_column_type'", "field_kwargs", "[", "key", "]", "=", "type_fields", "[", "field_kwargs", "[", "key", "]", "]", "if", "field_cls", "is", "engine", ".", "ListField", ":", "key", "=", "'item_type'", "field_kwargs", "[", "key", "]", "=", "type_fields", "[", "field_kwargs", "[", "key", "]", "]", "attrs", "[", "field_name", "]", "=", "field_cls", "(", "*", "*", "field_kwargs", ")", "# Update model definition with methods and variables defined in registry", "attrs", ".", "update", "(", "registry", ".", "mget", "(", "model_name", ")", ")", "# Generate new model class", "model_cls", "=", "metaclass", "(", "model_name", ",", "tuple", "(", "bases", ")", ",", "attrs", ")", "setup_model_event_subscribers", "(", "config", ",", "model_cls", ",", "schema", ")", "setup_fields_processors", "(", "config", ",", "model_cls", ",", "schema", ")", "return", "model_cls", ",", "auth_model" ]
Generate model class. Engine DB field types are determined using `type_fields` and only those types may be used. :param schema: Model schema dict parsed from RAML. :param model_name: String that is used as new model's name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param es_based: Boolean indicating if generated model should be a subclass of Elasticsearch-based document class or not. It True, ESBaseDocument is used; BaseDocument is used otherwise. Defaults to True.
[ "Generate", "model", "class", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/models.py#L85-L169
ramses-tech/ramses
ramses/models.py
setup_data_model
def setup_data_model(config, raml_resource, model_name): """ Setup storage/data model and return generated model class. Process follows these steps: * Resource schema is found and restructured by `resource_schema`. * Model class is generated from properties dict using util function `generate_model_cls`. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param model_name: String representing model name. """ model_cls = get_existing_model(model_name) schema = resource_schema(raml_resource) if not schema: raise Exception('Missing schema for model `{}`'.format(model_name)) if model_cls is not None: return model_cls, schema.get('_auth_model', False) log.info('Generating model class `{}`'.format(model_name)) return generate_model_cls( config, schema=schema, model_name=model_name, raml_resource=raml_resource, )
python
def setup_data_model(config, raml_resource, model_name): """ Setup storage/data model and return generated model class. Process follows these steps: * Resource schema is found and restructured by `resource_schema`. * Model class is generated from properties dict using util function `generate_model_cls`. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param model_name: String representing model name. """ model_cls = get_existing_model(model_name) schema = resource_schema(raml_resource) if not schema: raise Exception('Missing schema for model `{}`'.format(model_name)) if model_cls is not None: return model_cls, schema.get('_auth_model', False) log.info('Generating model class `{}`'.format(model_name)) return generate_model_cls( config, schema=schema, model_name=model_name, raml_resource=raml_resource, )
[ "def", "setup_data_model", "(", "config", ",", "raml_resource", ",", "model_name", ")", ":", "model_cls", "=", "get_existing_model", "(", "model_name", ")", "schema", "=", "resource_schema", "(", "raml_resource", ")", "if", "not", "schema", ":", "raise", "Exception", "(", "'Missing schema for model `{}`'", ".", "format", "(", "model_name", ")", ")", "if", "model_cls", "is", "not", "None", ":", "return", "model_cls", ",", "schema", ".", "get", "(", "'_auth_model'", ",", "False", ")", "log", ".", "info", "(", "'Generating model class `{}`'", ".", "format", "(", "model_name", ")", ")", "return", "generate_model_cls", "(", "config", ",", "schema", "=", "schema", ",", "model_name", "=", "model_name", ",", "raml_resource", "=", "raml_resource", ",", ")" ]
Setup storage/data model and return generated model class. Process follows these steps: * Resource schema is found and restructured by `resource_schema`. * Model class is generated from properties dict using util function `generate_model_cls`. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param model_name: String representing model name.
[ "Setup", "storage", "/", "data", "model", "and", "return", "generated", "model", "class", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/models.py#L172-L198
ramses-tech/ramses
ramses/models.py
handle_model_generation
def handle_model_generation(config, raml_resource): """ Generates model name and runs `setup_data_model` to get or generate actual model class. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ model_name = generate_model_name(raml_resource) try: return setup_data_model(config, raml_resource, model_name) except ValueError as ex: raise ValueError('{}: {}'.format(model_name, str(ex)))
python
def handle_model_generation(config, raml_resource): """ Generates model name and runs `setup_data_model` to get or generate actual model class. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ model_name = generate_model_name(raml_resource) try: return setup_data_model(config, raml_resource, model_name) except ValueError as ex: raise ValueError('{}: {}'.format(model_name, str(ex)))
[ "def", "handle_model_generation", "(", "config", ",", "raml_resource", ")", ":", "model_name", "=", "generate_model_name", "(", "raml_resource", ")", "try", ":", "return", "setup_data_model", "(", "config", ",", "raml_resource", ",", "model_name", ")", "except", "ValueError", "as", "ex", ":", "raise", "ValueError", "(", "'{}: {}'", ".", "format", "(", "model_name", ",", "str", "(", "ex", ")", ")", ")" ]
Generates model name and runs `setup_data_model` to get or generate actual model class. :param raml_resource: Instance of ramlfications.raml.ResourceNode.
[ "Generates", "model", "name", "and", "runs", "setup_data_model", "to", "get", "or", "generate", "actual", "model", "class", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/models.py#L201-L211
ramses-tech/ramses
ramses/models.py
setup_model_event_subscribers
def setup_model_event_subscribers(config, model_cls, schema): """ Set up model event subscribers. :param config: Pyramid Configurator instance. :param model_cls: Model class for which handlers should be connected. :param schema: Dict of model JSON schema. """ events_map = get_events_map() model_events = schema.get('_event_handlers', {}) event_kwargs = {'model': model_cls} for event_tag, subscribers in model_events.items(): type_, action = event_tag.split('_') event_objects = events_map[type_][action] if not isinstance(event_objects, list): event_objects = [event_objects] for sub_name in subscribers: sub_func = resolve_to_callable(sub_name) config.subscribe_to_events( sub_func, event_objects, **event_kwargs)
python
def setup_model_event_subscribers(config, model_cls, schema): """ Set up model event subscribers. :param config: Pyramid Configurator instance. :param model_cls: Model class for which handlers should be connected. :param schema: Dict of model JSON schema. """ events_map = get_events_map() model_events = schema.get('_event_handlers', {}) event_kwargs = {'model': model_cls} for event_tag, subscribers in model_events.items(): type_, action = event_tag.split('_') event_objects = events_map[type_][action] if not isinstance(event_objects, list): event_objects = [event_objects] for sub_name in subscribers: sub_func = resolve_to_callable(sub_name) config.subscribe_to_events( sub_func, event_objects, **event_kwargs)
[ "def", "setup_model_event_subscribers", "(", "config", ",", "model_cls", ",", "schema", ")", ":", "events_map", "=", "get_events_map", "(", ")", "model_events", "=", "schema", ".", "get", "(", "'_event_handlers'", ",", "{", "}", ")", "event_kwargs", "=", "{", "'model'", ":", "model_cls", "}", "for", "event_tag", ",", "subscribers", "in", "model_events", ".", "items", "(", ")", ":", "type_", ",", "action", "=", "event_tag", ".", "split", "(", "'_'", ")", "event_objects", "=", "events_map", "[", "type_", "]", "[", "action", "]", "if", "not", "isinstance", "(", "event_objects", ",", "list", ")", ":", "event_objects", "=", "[", "event_objects", "]", "for", "sub_name", "in", "subscribers", ":", "sub_func", "=", "resolve_to_callable", "(", "sub_name", ")", "config", ".", "subscribe_to_events", "(", "sub_func", ",", "event_objects", ",", "*", "*", "event_kwargs", ")" ]
Set up model event subscribers. :param config: Pyramid Configurator instance. :param model_cls: Model class for which handlers should be connected. :param schema: Dict of model JSON schema.
[ "Set", "up", "model", "event", "subscribers", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/models.py#L214-L235
ramses-tech/ramses
ramses/models.py
setup_fields_processors
def setup_fields_processors(config, model_cls, schema): """ Set up model fields' processors. :param config: Pyramid Configurator instance. :param model_cls: Model class for field of which processors should be set up. :param schema: Dict of model JSON schema. """ properties = schema.get('properties', {}) for field_name, props in properties.items(): if not props: continue processors = props.get('_processors') backref_processors = props.get('_backref_processors') if processors: processors = [resolve_to_callable(val) for val in processors] setup_kwargs = {'model': model_cls, 'field': field_name} config.add_field_processors(processors, **setup_kwargs) if backref_processors: db_settings = props.get('_db_settings', {}) is_relationship = db_settings.get('type') == 'relationship' document = db_settings.get('document') backref_name = db_settings.get('backref_name') if not (is_relationship and document and backref_name): continue backref_processors = [ resolve_to_callable(val) for val in backref_processors] setup_kwargs = { 'model': engine.get_document_cls(document), 'field': backref_name } config.add_field_processors( backref_processors, **setup_kwargs)
python
def setup_fields_processors(config, model_cls, schema): """ Set up model fields' processors. :param config: Pyramid Configurator instance. :param model_cls: Model class for field of which processors should be set up. :param schema: Dict of model JSON schema. """ properties = schema.get('properties', {}) for field_name, props in properties.items(): if not props: continue processors = props.get('_processors') backref_processors = props.get('_backref_processors') if processors: processors = [resolve_to_callable(val) for val in processors] setup_kwargs = {'model': model_cls, 'field': field_name} config.add_field_processors(processors, **setup_kwargs) if backref_processors: db_settings = props.get('_db_settings', {}) is_relationship = db_settings.get('type') == 'relationship' document = db_settings.get('document') backref_name = db_settings.get('backref_name') if not (is_relationship and document and backref_name): continue backref_processors = [ resolve_to_callable(val) for val in backref_processors] setup_kwargs = { 'model': engine.get_document_cls(document), 'field': backref_name } config.add_field_processors( backref_processors, **setup_kwargs)
[ "def", "setup_fields_processors", "(", "config", ",", "model_cls", ",", "schema", ")", ":", "properties", "=", "schema", ".", "get", "(", "'properties'", ",", "{", "}", ")", "for", "field_name", ",", "props", "in", "properties", ".", "items", "(", ")", ":", "if", "not", "props", ":", "continue", "processors", "=", "props", ".", "get", "(", "'_processors'", ")", "backref_processors", "=", "props", ".", "get", "(", "'_backref_processors'", ")", "if", "processors", ":", "processors", "=", "[", "resolve_to_callable", "(", "val", ")", "for", "val", "in", "processors", "]", "setup_kwargs", "=", "{", "'model'", ":", "model_cls", ",", "'field'", ":", "field_name", "}", "config", ".", "add_field_processors", "(", "processors", ",", "*", "*", "setup_kwargs", ")", "if", "backref_processors", ":", "db_settings", "=", "props", ".", "get", "(", "'_db_settings'", ",", "{", "}", ")", "is_relationship", "=", "db_settings", ".", "get", "(", "'type'", ")", "==", "'relationship'", "document", "=", "db_settings", ".", "get", "(", "'document'", ")", "backref_name", "=", "db_settings", ".", "get", "(", "'backref_name'", ")", "if", "not", "(", "is_relationship", "and", "document", "and", "backref_name", ")", ":", "continue", "backref_processors", "=", "[", "resolve_to_callable", "(", "val", ")", "for", "val", "in", "backref_processors", "]", "setup_kwargs", "=", "{", "'model'", ":", "engine", ".", "get_document_cls", "(", "document", ")", ",", "'field'", ":", "backref_name", "}", "config", ".", "add_field_processors", "(", "backref_processors", ",", "*", "*", "setup_kwargs", ")" ]
Set up model fields' processors. :param config: Pyramid Configurator instance. :param model_cls: Model class for field of which processors should be set up. :param schema: Dict of model JSON schema.
[ "Set", "up", "model", "fields", "processors", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/models.py#L238-L274
ramses-tech/ramses
ramses/auth.py
_setup_ticket_policy
def _setup_ticket_policy(config, params): """ Setup Pyramid AuthTktAuthenticationPolicy. Notes: * Initial `secret` params value is considered to be a name of config param that represents a cookie name. * `auth_model.get_groups_by_userid` is used as a `callback`. * Also connects basic routes to perform authentication actions. :param config: Pyramid Configurator instance. :param params: Nefertari dictset which contains security scheme `settings`. """ from nefertari.authentication.views import ( TicketAuthRegisterView, TicketAuthLoginView, TicketAuthLogoutView) log.info('Configuring Pyramid Ticket Authn policy') if 'secret' not in params: raise ValueError( 'Missing required security scheme settings: secret') params['secret'] = config.registry.settings[params['secret']] auth_model = config.registry.auth_model params['callback'] = auth_model.get_groups_by_userid config.add_request_method( auth_model.get_authuser_by_userid, 'user', reify=True) policy = AuthTktAuthenticationPolicy(**params) RegisterViewBase = TicketAuthRegisterView if config.registry.database_acls: class RegisterViewBase(ACLAssignRegisterMixin, TicketAuthRegisterView): pass class RamsesTicketAuthRegisterView(RegisterViewBase): Model = config.registry.auth_model class RamsesTicketAuthLoginView(TicketAuthLoginView): Model = config.registry.auth_model class RamsesTicketAuthLogoutView(TicketAuthLogoutView): Model = config.registry.auth_model common_kw = { 'prefix': 'auth', 'factory': 'nefertari.acl.AuthenticationACL', } root = config.get_root_resource() root.add('register', view=RamsesTicketAuthRegisterView, **common_kw) root.add('login', view=RamsesTicketAuthLoginView, **common_kw) root.add('logout', view=RamsesTicketAuthLogoutView, **common_kw) return policy
python
def _setup_ticket_policy(config, params): """ Setup Pyramid AuthTktAuthenticationPolicy. Notes: * Initial `secret` params value is considered to be a name of config param that represents a cookie name. * `auth_model.get_groups_by_userid` is used as a `callback`. * Also connects basic routes to perform authentication actions. :param config: Pyramid Configurator instance. :param params: Nefertari dictset which contains security scheme `settings`. """ from nefertari.authentication.views import ( TicketAuthRegisterView, TicketAuthLoginView, TicketAuthLogoutView) log.info('Configuring Pyramid Ticket Authn policy') if 'secret' not in params: raise ValueError( 'Missing required security scheme settings: secret') params['secret'] = config.registry.settings[params['secret']] auth_model = config.registry.auth_model params['callback'] = auth_model.get_groups_by_userid config.add_request_method( auth_model.get_authuser_by_userid, 'user', reify=True) policy = AuthTktAuthenticationPolicy(**params) RegisterViewBase = TicketAuthRegisterView if config.registry.database_acls: class RegisterViewBase(ACLAssignRegisterMixin, TicketAuthRegisterView): pass class RamsesTicketAuthRegisterView(RegisterViewBase): Model = config.registry.auth_model class RamsesTicketAuthLoginView(TicketAuthLoginView): Model = config.registry.auth_model class RamsesTicketAuthLogoutView(TicketAuthLogoutView): Model = config.registry.auth_model common_kw = { 'prefix': 'auth', 'factory': 'nefertari.acl.AuthenticationACL', } root = config.get_root_resource() root.add('register', view=RamsesTicketAuthRegisterView, **common_kw) root.add('login', view=RamsesTicketAuthLoginView, **common_kw) root.add('logout', view=RamsesTicketAuthLogoutView, **common_kw) return policy
[ "def", "_setup_ticket_policy", "(", "config", ",", "params", ")", ":", "from", "nefertari", ".", "authentication", ".", "views", "import", "(", "TicketAuthRegisterView", ",", "TicketAuthLoginView", ",", "TicketAuthLogoutView", ")", "log", ".", "info", "(", "'Configuring Pyramid Ticket Authn policy'", ")", "if", "'secret'", "not", "in", "params", ":", "raise", "ValueError", "(", "'Missing required security scheme settings: secret'", ")", "params", "[", "'secret'", "]", "=", "config", ".", "registry", ".", "settings", "[", "params", "[", "'secret'", "]", "]", "auth_model", "=", "config", ".", "registry", ".", "auth_model", "params", "[", "'callback'", "]", "=", "auth_model", ".", "get_groups_by_userid", "config", ".", "add_request_method", "(", "auth_model", ".", "get_authuser_by_userid", ",", "'user'", ",", "reify", "=", "True", ")", "policy", "=", "AuthTktAuthenticationPolicy", "(", "*", "*", "params", ")", "RegisterViewBase", "=", "TicketAuthRegisterView", "if", "config", ".", "registry", ".", "database_acls", ":", "class", "RegisterViewBase", "(", "ACLAssignRegisterMixin", ",", "TicketAuthRegisterView", ")", ":", "pass", "class", "RamsesTicketAuthRegisterView", "(", "RegisterViewBase", ")", ":", "Model", "=", "config", ".", "registry", ".", "auth_model", "class", "RamsesTicketAuthLoginView", "(", "TicketAuthLoginView", ")", ":", "Model", "=", "config", ".", "registry", ".", "auth_model", "class", "RamsesTicketAuthLogoutView", "(", "TicketAuthLogoutView", ")", ":", "Model", "=", "config", ".", "registry", ".", "auth_model", "common_kw", "=", "{", "'prefix'", ":", "'auth'", ",", "'factory'", ":", "'nefertari.acl.AuthenticationACL'", ",", "}", "root", "=", "config", ".", "get_root_resource", "(", ")", "root", ".", "add", "(", "'register'", ",", "view", "=", "RamsesTicketAuthRegisterView", ",", "*", "*", "common_kw", ")", "root", ".", "add", "(", "'login'", ",", "view", "=", "RamsesTicketAuthLoginView", ",", "*", "*", "common_kw", ")", "root", ".", "add", "(", "'logout'", ",", "view", "=", "RamsesTicketAuthLogoutView", ",", "*", "*", "common_kw", ")", "return", "policy" ]
Setup Pyramid AuthTktAuthenticationPolicy. Notes: * Initial `secret` params value is considered to be a name of config param that represents a cookie name. * `auth_model.get_groups_by_userid` is used as a `callback`. * Also connects basic routes to perform authentication actions. :param config: Pyramid Configurator instance. :param params: Nefertari dictset which contains security scheme `settings`.
[ "Setup", "Pyramid", "AuthTktAuthenticationPolicy", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/auth.py#L46-L102
ramses-tech/ramses
ramses/auth.py
_setup_apikey_policy
def _setup_apikey_policy(config, params): """ Setup `nefertari.ApiKeyAuthenticationPolicy`. Notes: * User may provide model name in :params['user_model']: do define the name of the user model. * `auth_model.get_groups_by_token` is used to perform username and token check * `auth_model.get_token_credentials` is used to get username and token from userid * Also connects basic routes to perform authentication actions. Arguments: :config: Pyramid Configurator instance. :params: Nefertari dictset which contains security scheme `settings`. """ from nefertari.authentication.views import ( TokenAuthRegisterView, TokenAuthClaimView, TokenAuthResetView) log.info('Configuring ApiKey Authn policy') auth_model = config.registry.auth_model params['check'] = auth_model.get_groups_by_token params['credentials_callback'] = auth_model.get_token_credentials params['user_model'] = auth_model config.add_request_method( auth_model.get_authuser_by_name, 'user', reify=True) policy = ApiKeyAuthenticationPolicy(**params) RegisterViewBase = TokenAuthRegisterView if config.registry.database_acls: class RegisterViewBase(ACLAssignRegisterMixin, TokenAuthRegisterView): pass class RamsesTokenAuthRegisterView(RegisterViewBase): Model = auth_model class RamsesTokenAuthClaimView(TokenAuthClaimView): Model = auth_model class RamsesTokenAuthResetView(TokenAuthResetView): Model = auth_model common_kw = { 'prefix': 'auth', 'factory': 'nefertari.acl.AuthenticationACL', } root = config.get_root_resource() root.add('register', view=RamsesTokenAuthRegisterView, **common_kw) root.add('token', view=RamsesTokenAuthClaimView, **common_kw) root.add('reset_token', view=RamsesTokenAuthResetView, **common_kw) return policy
python
def _setup_apikey_policy(config, params): """ Setup `nefertari.ApiKeyAuthenticationPolicy`. Notes: * User may provide model name in :params['user_model']: do define the name of the user model. * `auth_model.get_groups_by_token` is used to perform username and token check * `auth_model.get_token_credentials` is used to get username and token from userid * Also connects basic routes to perform authentication actions. Arguments: :config: Pyramid Configurator instance. :params: Nefertari dictset which contains security scheme `settings`. """ from nefertari.authentication.views import ( TokenAuthRegisterView, TokenAuthClaimView, TokenAuthResetView) log.info('Configuring ApiKey Authn policy') auth_model = config.registry.auth_model params['check'] = auth_model.get_groups_by_token params['credentials_callback'] = auth_model.get_token_credentials params['user_model'] = auth_model config.add_request_method( auth_model.get_authuser_by_name, 'user', reify=True) policy = ApiKeyAuthenticationPolicy(**params) RegisterViewBase = TokenAuthRegisterView if config.registry.database_acls: class RegisterViewBase(ACLAssignRegisterMixin, TokenAuthRegisterView): pass class RamsesTokenAuthRegisterView(RegisterViewBase): Model = auth_model class RamsesTokenAuthClaimView(TokenAuthClaimView): Model = auth_model class RamsesTokenAuthResetView(TokenAuthResetView): Model = auth_model common_kw = { 'prefix': 'auth', 'factory': 'nefertari.acl.AuthenticationACL', } root = config.get_root_resource() root.add('register', view=RamsesTokenAuthRegisterView, **common_kw) root.add('token', view=RamsesTokenAuthClaimView, **common_kw) root.add('reset_token', view=RamsesTokenAuthResetView, **common_kw) return policy
[ "def", "_setup_apikey_policy", "(", "config", ",", "params", ")", ":", "from", "nefertari", ".", "authentication", ".", "views", "import", "(", "TokenAuthRegisterView", ",", "TokenAuthClaimView", ",", "TokenAuthResetView", ")", "log", ".", "info", "(", "'Configuring ApiKey Authn policy'", ")", "auth_model", "=", "config", ".", "registry", ".", "auth_model", "params", "[", "'check'", "]", "=", "auth_model", ".", "get_groups_by_token", "params", "[", "'credentials_callback'", "]", "=", "auth_model", ".", "get_token_credentials", "params", "[", "'user_model'", "]", "=", "auth_model", "config", ".", "add_request_method", "(", "auth_model", ".", "get_authuser_by_name", ",", "'user'", ",", "reify", "=", "True", ")", "policy", "=", "ApiKeyAuthenticationPolicy", "(", "*", "*", "params", ")", "RegisterViewBase", "=", "TokenAuthRegisterView", "if", "config", ".", "registry", ".", "database_acls", ":", "class", "RegisterViewBase", "(", "ACLAssignRegisterMixin", ",", "TokenAuthRegisterView", ")", ":", "pass", "class", "RamsesTokenAuthRegisterView", "(", "RegisterViewBase", ")", ":", "Model", "=", "auth_model", "class", "RamsesTokenAuthClaimView", "(", "TokenAuthClaimView", ")", ":", "Model", "=", "auth_model", "class", "RamsesTokenAuthResetView", "(", "TokenAuthResetView", ")", ":", "Model", "=", "auth_model", "common_kw", "=", "{", "'prefix'", ":", "'auth'", ",", "'factory'", ":", "'nefertari.acl.AuthenticationACL'", ",", "}", "root", "=", "config", ".", "get_root_resource", "(", ")", "root", ".", "add", "(", "'register'", ",", "view", "=", "RamsesTokenAuthRegisterView", ",", "*", "*", "common_kw", ")", "root", ".", "add", "(", "'token'", ",", "view", "=", "RamsesTokenAuthClaimView", ",", "*", "*", "common_kw", ")", "root", ".", "add", "(", "'reset_token'", ",", "view", "=", "RamsesTokenAuthResetView", ",", "*", "*", "common_kw", ")", "return", "policy" ]
Setup `nefertari.ApiKeyAuthenticationPolicy`. Notes: * User may provide model name in :params['user_model']: do define the name of the user model. * `auth_model.get_groups_by_token` is used to perform username and token check * `auth_model.get_token_credentials` is used to get username and token from userid * Also connects basic routes to perform authentication actions. Arguments: :config: Pyramid Configurator instance. :params: Nefertari dictset which contains security scheme `settings`.
[ "Setup", "nefertari", ".", "ApiKeyAuthenticationPolicy", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/auth.py#L105-L160
ramses-tech/ramses
ramses/auth.py
setup_auth_policies
def setup_auth_policies(config, raml_root): """ Setup authentication, authorization policies. Performs basic validation to check all the required values are present and performs authentication, authorization policies generation using generator functions from `AUTHENTICATION_POLICIES`. :param config: Pyramid Configurator instance. :param raml_root: Instance of ramlfications.raml.RootNode. """ log.info('Configuring auth policies') secured_by_all = raml_root.secured_by or [] secured_by = [item for item in secured_by_all if item] if not secured_by: log.info('API is not secured. `secured_by` attribute ' 'value missing.') return secured_by = secured_by[0] schemes = {scheme.name: scheme for scheme in raml_root.security_schemes} if secured_by not in schemes: raise ValueError( 'Undefined security scheme used in `secured_by`: {}'.format( secured_by)) scheme = schemes[secured_by] if scheme.type not in AUTHENTICATION_POLICIES: raise ValueError('Unsupported security scheme type: {}'.format( scheme.type)) # Setup Authentication policy policy_generator = AUTHENTICATION_POLICIES[scheme.type] params = dictset(scheme.settings or {}) authn_policy = policy_generator(config, params) config.set_authentication_policy(authn_policy) # Setup Authorization policy authz_policy = ACLAuthorizationPolicy() config.set_authorization_policy(authz_policy)
python
def setup_auth_policies(config, raml_root): """ Setup authentication, authorization policies. Performs basic validation to check all the required values are present and performs authentication, authorization policies generation using generator functions from `AUTHENTICATION_POLICIES`. :param config: Pyramid Configurator instance. :param raml_root: Instance of ramlfications.raml.RootNode. """ log.info('Configuring auth policies') secured_by_all = raml_root.secured_by or [] secured_by = [item for item in secured_by_all if item] if not secured_by: log.info('API is not secured. `secured_by` attribute ' 'value missing.') return secured_by = secured_by[0] schemes = {scheme.name: scheme for scheme in raml_root.security_schemes} if secured_by not in schemes: raise ValueError( 'Undefined security scheme used in `secured_by`: {}'.format( secured_by)) scheme = schemes[secured_by] if scheme.type not in AUTHENTICATION_POLICIES: raise ValueError('Unsupported security scheme type: {}'.format( scheme.type)) # Setup Authentication policy policy_generator = AUTHENTICATION_POLICIES[scheme.type] params = dictset(scheme.settings or {}) authn_policy = policy_generator(config, params) config.set_authentication_policy(authn_policy) # Setup Authorization policy authz_policy = ACLAuthorizationPolicy() config.set_authorization_policy(authz_policy)
[ "def", "setup_auth_policies", "(", "config", ",", "raml_root", ")", ":", "log", ".", "info", "(", "'Configuring auth policies'", ")", "secured_by_all", "=", "raml_root", ".", "secured_by", "or", "[", "]", "secured_by", "=", "[", "item", "for", "item", "in", "secured_by_all", "if", "item", "]", "if", "not", "secured_by", ":", "log", ".", "info", "(", "'API is not secured. `secured_by` attribute '", "'value missing.'", ")", "return", "secured_by", "=", "secured_by", "[", "0", "]", "schemes", "=", "{", "scheme", ".", "name", ":", "scheme", "for", "scheme", "in", "raml_root", ".", "security_schemes", "}", "if", "secured_by", "not", "in", "schemes", ":", "raise", "ValueError", "(", "'Undefined security scheme used in `secured_by`: {}'", ".", "format", "(", "secured_by", ")", ")", "scheme", "=", "schemes", "[", "secured_by", "]", "if", "scheme", ".", "type", "not", "in", "AUTHENTICATION_POLICIES", ":", "raise", "ValueError", "(", "'Unsupported security scheme type: {}'", ".", "format", "(", "scheme", ".", "type", ")", ")", "# Setup Authentication policy", "policy_generator", "=", "AUTHENTICATION_POLICIES", "[", "scheme", ".", "type", "]", "params", "=", "dictset", "(", "scheme", ".", "settings", "or", "{", "}", ")", "authn_policy", "=", "policy_generator", "(", "config", ",", "params", ")", "config", ".", "set_authentication_policy", "(", "authn_policy", ")", "# Setup Authorization policy", "authz_policy", "=", "ACLAuthorizationPolicy", "(", ")", "config", ".", "set_authorization_policy", "(", "authz_policy", ")" ]
Setup authentication, authorization policies. Performs basic validation to check all the required values are present and performs authentication, authorization policies generation using generator functions from `AUTHENTICATION_POLICIES`. :param config: Pyramid Configurator instance. :param raml_root: Instance of ramlfications.raml.RootNode.
[ "Setup", "authentication", "authorization", "policies", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/auth.py#L178-L217
ramses-tech/ramses
ramses/auth.py
get_authuser_model
def get_authuser_model(): """ Define and return AuthUser model using nefertari base classes """ from nefertari.authentication.models import AuthUserMixin from nefertari import engine class AuthUser(AuthUserMixin, engine.BaseDocument): __tablename__ = 'ramses_authuser' return AuthUser
python
def get_authuser_model(): """ Define and return AuthUser model using nefertari base classes """ from nefertari.authentication.models import AuthUserMixin from nefertari import engine class AuthUser(AuthUserMixin, engine.BaseDocument): __tablename__ = 'ramses_authuser' return AuthUser
[ "def", "get_authuser_model", "(", ")", ":", "from", "nefertari", ".", "authentication", ".", "models", "import", "AuthUserMixin", "from", "nefertari", "import", "engine", "class", "AuthUser", "(", "AuthUserMixin", ",", "engine", ".", "BaseDocument", ")", ":", "__tablename__", "=", "'ramses_authuser'", "return", "AuthUser" ]
Define and return AuthUser model using nefertari base classes
[ "Define", "and", "return", "AuthUser", "model", "using", "nefertari", "base", "classes" ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/auth.py#L245-L253
ramses-tech/ramses
ramses/acl.py
validate_permissions
def validate_permissions(perms): """ Validate :perms: contains valid permissions. :param perms: List of permission names or ALL_PERMISSIONS. """ if not isinstance(perms, (list, tuple)): perms = [perms] valid_perms = set(PERMISSIONS.values()) if ALL_PERMISSIONS in perms: return perms if set(perms) - valid_perms: raise ValueError( 'Invalid ACL permission names. Valid permissions ' 'are: {}'.format(', '.join(valid_perms))) return perms
python
def validate_permissions(perms): """ Validate :perms: contains valid permissions. :param perms: List of permission names or ALL_PERMISSIONS. """ if not isinstance(perms, (list, tuple)): perms = [perms] valid_perms = set(PERMISSIONS.values()) if ALL_PERMISSIONS in perms: return perms if set(perms) - valid_perms: raise ValueError( 'Invalid ACL permission names. Valid permissions ' 'are: {}'.format(', '.join(valid_perms))) return perms
[ "def", "validate_permissions", "(", "perms", ")", ":", "if", "not", "isinstance", "(", "perms", ",", "(", "list", ",", "tuple", ")", ")", ":", "perms", "=", "[", "perms", "]", "valid_perms", "=", "set", "(", "PERMISSIONS", ".", "values", "(", ")", ")", "if", "ALL_PERMISSIONS", "in", "perms", ":", "return", "perms", "if", "set", "(", "perms", ")", "-", "valid_perms", ":", "raise", "ValueError", "(", "'Invalid ACL permission names. Valid permissions '", "'are: {}'", ".", "format", "(", "', '", ".", "join", "(", "valid_perms", ")", ")", ")", "return", "perms" ]
Validate :perms: contains valid permissions. :param perms: List of permission names or ALL_PERMISSIONS.
[ "Validate", ":", "perms", ":", "contains", "valid", "permissions", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/acl.py#L29-L43
ramses-tech/ramses
ramses/acl.py
parse_permissions
def parse_permissions(perms): """ Parse permissions ("perms") which are either exact permission names or the keyword 'all'. :param perms: List or comma-separated string of nefertari permission names, or 'all' """ if isinstance(perms, six.string_types): perms = perms.split(',') perms = [perm.strip().lower() for perm in perms] if 'all' in perms: return ALL_PERMISSIONS return validate_permissions(perms)
python
def parse_permissions(perms): """ Parse permissions ("perms") which are either exact permission names or the keyword 'all'. :param perms: List or comma-separated string of nefertari permission names, or 'all' """ if isinstance(perms, six.string_types): perms = perms.split(',') perms = [perm.strip().lower() for perm in perms] if 'all' in perms: return ALL_PERMISSIONS return validate_permissions(perms)
[ "def", "parse_permissions", "(", "perms", ")", ":", "if", "isinstance", "(", "perms", ",", "six", ".", "string_types", ")", ":", "perms", "=", "perms", ".", "split", "(", "','", ")", "perms", "=", "[", "perm", ".", "strip", "(", ")", ".", "lower", "(", ")", "for", "perm", "in", "perms", "]", "if", "'all'", "in", "perms", ":", "return", "ALL_PERMISSIONS", "return", "validate_permissions", "(", "perms", ")" ]
Parse permissions ("perms") which are either exact permission names or the keyword 'all'. :param perms: List or comma-separated string of nefertari permission names, or 'all'
[ "Parse", "permissions", "(", "perms", ")", "which", "are", "either", "exact", "permission", "names", "or", "the", "keyword", "all", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/acl.py#L46-L58
ramses-tech/ramses
ramses/acl.py
parse_acl
def parse_acl(acl_string): """ Parse raw string :acl_string: of RAML-defined ACLs. If :acl_string: is blank or None, all permissions are given. Values of ACL action and principal are parsed using `actions` and `special_principals` maps and are looked up after `strip()` and `lower()`. ACEs in :acl_string: may be separated by newlines or semicolons. Action, principal and permission lists must be separated by spaces. Permissions must be comma-separated. E.g. 'allow everyone view,create,update' and 'deny authenticated delete' :param acl_string: Raw RAML string containing defined ACEs. """ if not acl_string: return [ALLOW_ALL] aces_list = acl_string.replace('\n', ';').split(';') aces_list = [ace.strip().split(' ', 2) for ace in aces_list if ace] aces_list = [(a, b, c.split(',')) for a, b, c in aces_list] result_acl = [] for action_str, princ_str, perms in aces_list: # Process action action_str = action_str.strip().lower() action = actions.get(action_str) if action is None: raise ValueError( 'Unknown ACL action: {}. Valid actions: {}'.format( action_str, list(actions.keys()))) # Process principal princ_str = princ_str.strip().lower() if princ_str in special_principals: principal = special_principals[princ_str] elif is_callable_tag(princ_str): principal = resolve_to_callable(princ_str) else: principal = princ_str # Process permissions permissions = parse_permissions(perms) result_acl.append((action, principal, permissions)) return result_acl
python
def parse_acl(acl_string): """ Parse raw string :acl_string: of RAML-defined ACLs. If :acl_string: is blank or None, all permissions are given. Values of ACL action and principal are parsed using `actions` and `special_principals` maps and are looked up after `strip()` and `lower()`. ACEs in :acl_string: may be separated by newlines or semicolons. Action, principal and permission lists must be separated by spaces. Permissions must be comma-separated. E.g. 'allow everyone view,create,update' and 'deny authenticated delete' :param acl_string: Raw RAML string containing defined ACEs. """ if not acl_string: return [ALLOW_ALL] aces_list = acl_string.replace('\n', ';').split(';') aces_list = [ace.strip().split(' ', 2) for ace in aces_list if ace] aces_list = [(a, b, c.split(',')) for a, b, c in aces_list] result_acl = [] for action_str, princ_str, perms in aces_list: # Process action action_str = action_str.strip().lower() action = actions.get(action_str) if action is None: raise ValueError( 'Unknown ACL action: {}. Valid actions: {}'.format( action_str, list(actions.keys()))) # Process principal princ_str = princ_str.strip().lower() if princ_str in special_principals: principal = special_principals[princ_str] elif is_callable_tag(princ_str): principal = resolve_to_callable(princ_str) else: principal = princ_str # Process permissions permissions = parse_permissions(perms) result_acl.append((action, principal, permissions)) return result_acl
[ "def", "parse_acl", "(", "acl_string", ")", ":", "if", "not", "acl_string", ":", "return", "[", "ALLOW_ALL", "]", "aces_list", "=", "acl_string", ".", "replace", "(", "'\\n'", ",", "';'", ")", ".", "split", "(", "';'", ")", "aces_list", "=", "[", "ace", ".", "strip", "(", ")", ".", "split", "(", "' '", ",", "2", ")", "for", "ace", "in", "aces_list", "if", "ace", "]", "aces_list", "=", "[", "(", "a", ",", "b", ",", "c", ".", "split", "(", "','", ")", ")", "for", "a", ",", "b", ",", "c", "in", "aces_list", "]", "result_acl", "=", "[", "]", "for", "action_str", ",", "princ_str", ",", "perms", "in", "aces_list", ":", "# Process action", "action_str", "=", "action_str", ".", "strip", "(", ")", ".", "lower", "(", ")", "action", "=", "actions", ".", "get", "(", "action_str", ")", "if", "action", "is", "None", ":", "raise", "ValueError", "(", "'Unknown ACL action: {}. Valid actions: {}'", ".", "format", "(", "action_str", ",", "list", "(", "actions", ".", "keys", "(", ")", ")", ")", ")", "# Process principal", "princ_str", "=", "princ_str", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "princ_str", "in", "special_principals", ":", "principal", "=", "special_principals", "[", "princ_str", "]", "elif", "is_callable_tag", "(", "princ_str", ")", ":", "principal", "=", "resolve_to_callable", "(", "princ_str", ")", "else", ":", "principal", "=", "princ_str", "# Process permissions", "permissions", "=", "parse_permissions", "(", "perms", ")", "result_acl", ".", "append", "(", "(", "action", ",", "principal", ",", "permissions", ")", ")", "return", "result_acl" ]
Parse raw string :acl_string: of RAML-defined ACLs. If :acl_string: is blank or None, all permissions are given. Values of ACL action and principal are parsed using `actions` and `special_principals` maps and are looked up after `strip()` and `lower()`. ACEs in :acl_string: may be separated by newlines or semicolons. Action, principal and permission lists must be separated by spaces. Permissions must be comma-separated. E.g. 'allow everyone view,create,update' and 'deny authenticated delete' :param acl_string: Raw RAML string containing defined ACEs.
[ "Parse", "raw", "string", ":", "acl_string", ":", "of", "RAML", "-", "defined", "ACLs", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/acl.py#L61-L107
ramses-tech/ramses
ramses/acl.py
generate_acl
def generate_acl(config, model_cls, raml_resource, es_based=True): """ Generate an ACL. Generated ACL class has a `item_model` attribute set to :model_cls:. ACLs used for collection and item access control are generated from a first security scheme with type `x-ACL`. If :raml_resource: has no x-ACL security schemes defined then ALLOW_ALL ACL is used. If the `collection` or `item` settings are empty, then ALLOW_ALL ACL is used. :param model_cls: Generated model class :param raml_resource: Instance of ramlfications.raml.ResourceNode for which ACL is being generated :param es_based: Boolean inidicating whether ACL should query ES or not when getting an object """ schemes = raml_resource.security_schemes or [] schemes = [sch for sch in schemes if sch.type == 'x-ACL'] if not schemes: collection_acl = item_acl = [] log.debug('No ACL scheme applied. Using ACL: {}'.format(item_acl)) else: sec_scheme = schemes[0] log.debug('{} ACL scheme applied'.format(sec_scheme.name)) settings = sec_scheme.settings or {} collection_acl = parse_acl(acl_string=settings.get('collection')) item_acl = parse_acl(acl_string=settings.get('item')) class GeneratedACLBase(object): item_model = model_cls def __init__(self, request, es_based=es_based): super(GeneratedACLBase, self).__init__(request=request) self.es_based = es_based self._collection_acl = collection_acl self._item_acl = item_acl bases = [GeneratedACLBase] if config.registry.database_acls: from nefertari_guards.acl import DatabaseACLMixin as GuardsMixin bases += [DatabaseACLMixin, GuardsMixin] bases.append(BaseACL) return type('GeneratedACL', tuple(bases), {})
python
def generate_acl(config, model_cls, raml_resource, es_based=True): """ Generate an ACL. Generated ACL class has a `item_model` attribute set to :model_cls:. ACLs used for collection and item access control are generated from a first security scheme with type `x-ACL`. If :raml_resource: has no x-ACL security schemes defined then ALLOW_ALL ACL is used. If the `collection` or `item` settings are empty, then ALLOW_ALL ACL is used. :param model_cls: Generated model class :param raml_resource: Instance of ramlfications.raml.ResourceNode for which ACL is being generated :param es_based: Boolean inidicating whether ACL should query ES or not when getting an object """ schemes = raml_resource.security_schemes or [] schemes = [sch for sch in schemes if sch.type == 'x-ACL'] if not schemes: collection_acl = item_acl = [] log.debug('No ACL scheme applied. Using ACL: {}'.format(item_acl)) else: sec_scheme = schemes[0] log.debug('{} ACL scheme applied'.format(sec_scheme.name)) settings = sec_scheme.settings or {} collection_acl = parse_acl(acl_string=settings.get('collection')) item_acl = parse_acl(acl_string=settings.get('item')) class GeneratedACLBase(object): item_model = model_cls def __init__(self, request, es_based=es_based): super(GeneratedACLBase, self).__init__(request=request) self.es_based = es_based self._collection_acl = collection_acl self._item_acl = item_acl bases = [GeneratedACLBase] if config.registry.database_acls: from nefertari_guards.acl import DatabaseACLMixin as GuardsMixin bases += [DatabaseACLMixin, GuardsMixin] bases.append(BaseACL) return type('GeneratedACL', tuple(bases), {})
[ "def", "generate_acl", "(", "config", ",", "model_cls", ",", "raml_resource", ",", "es_based", "=", "True", ")", ":", "schemes", "=", "raml_resource", ".", "security_schemes", "or", "[", "]", "schemes", "=", "[", "sch", "for", "sch", "in", "schemes", "if", "sch", ".", "type", "==", "'x-ACL'", "]", "if", "not", "schemes", ":", "collection_acl", "=", "item_acl", "=", "[", "]", "log", ".", "debug", "(", "'No ACL scheme applied. Using ACL: {}'", ".", "format", "(", "item_acl", ")", ")", "else", ":", "sec_scheme", "=", "schemes", "[", "0", "]", "log", ".", "debug", "(", "'{} ACL scheme applied'", ".", "format", "(", "sec_scheme", ".", "name", ")", ")", "settings", "=", "sec_scheme", ".", "settings", "or", "{", "}", "collection_acl", "=", "parse_acl", "(", "acl_string", "=", "settings", ".", "get", "(", "'collection'", ")", ")", "item_acl", "=", "parse_acl", "(", "acl_string", "=", "settings", ".", "get", "(", "'item'", ")", ")", "class", "GeneratedACLBase", "(", "object", ")", ":", "item_model", "=", "model_cls", "def", "__init__", "(", "self", ",", "request", ",", "es_based", "=", "es_based", ")", ":", "super", "(", "GeneratedACLBase", ",", "self", ")", ".", "__init__", "(", "request", "=", "request", ")", "self", ".", "es_based", "=", "es_based", "self", ".", "_collection_acl", "=", "collection_acl", "self", ".", "_item_acl", "=", "item_acl", "bases", "=", "[", "GeneratedACLBase", "]", "if", "config", ".", "registry", ".", "database_acls", ":", "from", "nefertari_guards", ".", "acl", "import", "DatabaseACLMixin", "as", "GuardsMixin", "bases", "+=", "[", "DatabaseACLMixin", ",", "GuardsMixin", "]", "bases", ".", "append", "(", "BaseACL", ")", "return", "type", "(", "'GeneratedACL'", ",", "tuple", "(", "bases", ")", ",", "{", "}", ")" ]
Generate an ACL. Generated ACL class has a `item_model` attribute set to :model_cls:. ACLs used for collection and item access control are generated from a first security scheme with type `x-ACL`. If :raml_resource: has no x-ACL security schemes defined then ALLOW_ALL ACL is used. If the `collection` or `item` settings are empty, then ALLOW_ALL ACL is used. :param model_cls: Generated model class :param raml_resource: Instance of ramlfications.raml.ResourceNode for which ACL is being generated :param es_based: Boolean inidicating whether ACL should query ES or not when getting an object
[ "Generate", "an", "ACL", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/acl.py#L217-L264
ramses-tech/ramses
ramses/acl.py
BaseACL._apply_callables
def _apply_callables(self, acl, obj=None): """ Iterate over ACEs from :acl: and apply callable principals if any. Principals are passed 3 arguments on call: :ace: Single ACE object that looks like (action, callable, permission or [permission]) :request: Current request object :obj: Object instance to be accessed via the ACL Principals must return a single ACE or a list of ACEs. :param acl: Sequence of valid Pyramid ACEs which will be processed :param obj: Object to be accessed via the ACL """ new_acl = [] for i, ace in enumerate(acl): principal = ace[1] if six.callable(principal): ace = principal(ace=ace, request=self.request, obj=obj) if not ace: continue if not isinstance(ace[0], (list, tuple)): ace = [ace] ace = [(a, b, validate_permissions(c)) for a, b, c in ace] else: ace = [ace] new_acl += ace return tuple(new_acl)
python
def _apply_callables(self, acl, obj=None): """ Iterate over ACEs from :acl: and apply callable principals if any. Principals are passed 3 arguments on call: :ace: Single ACE object that looks like (action, callable, permission or [permission]) :request: Current request object :obj: Object instance to be accessed via the ACL Principals must return a single ACE or a list of ACEs. :param acl: Sequence of valid Pyramid ACEs which will be processed :param obj: Object to be accessed via the ACL """ new_acl = [] for i, ace in enumerate(acl): principal = ace[1] if six.callable(principal): ace = principal(ace=ace, request=self.request, obj=obj) if not ace: continue if not isinstance(ace[0], (list, tuple)): ace = [ace] ace = [(a, b, validate_permissions(c)) for a, b, c in ace] else: ace = [ace] new_acl += ace return tuple(new_acl)
[ "def", "_apply_callables", "(", "self", ",", "acl", ",", "obj", "=", "None", ")", ":", "new_acl", "=", "[", "]", "for", "i", ",", "ace", "in", "enumerate", "(", "acl", ")", ":", "principal", "=", "ace", "[", "1", "]", "if", "six", ".", "callable", "(", "principal", ")", ":", "ace", "=", "principal", "(", "ace", "=", "ace", ",", "request", "=", "self", ".", "request", ",", "obj", "=", "obj", ")", "if", "not", "ace", ":", "continue", "if", "not", "isinstance", "(", "ace", "[", "0", "]", ",", "(", "list", ",", "tuple", ")", ")", ":", "ace", "=", "[", "ace", "]", "ace", "=", "[", "(", "a", ",", "b", ",", "validate_permissions", "(", "c", ")", ")", "for", "a", ",", "b", ",", "c", "in", "ace", "]", "else", ":", "ace", "=", "[", "ace", "]", "new_acl", "+=", "ace", "return", "tuple", "(", "new_acl", ")" ]
Iterate over ACEs from :acl: and apply callable principals if any. Principals are passed 3 arguments on call: :ace: Single ACE object that looks like (action, callable, permission or [permission]) :request: Current request object :obj: Object instance to be accessed via the ACL Principals must return a single ACE or a list of ACEs. :param acl: Sequence of valid Pyramid ACEs which will be processed :param obj: Object to be accessed via the ACL
[ "Iterate", "over", "ACEs", "from", ":", "acl", ":", "and", "apply", "callable", "principals", "if", "any", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/acl.py#L117-L144
ramses-tech/ramses
ramses/acl.py
DatabaseACLMixin.item_acl
def item_acl(self, item): """ Objectify ACL if ES is used or call item.get_acl() if db is used. """ if self.es_based: from nefertari_guards.elasticsearch import get_es_item_acl return get_es_item_acl(item) return super(DatabaseACLMixin, self).item_acl(item)
python
def item_acl(self, item): """ Objectify ACL if ES is used or call item.get_acl() if db is used. """ if self.es_based: from nefertari_guards.elasticsearch import get_es_item_acl return get_es_item_acl(item) return super(DatabaseACLMixin, self).item_acl(item)
[ "def", "item_acl", "(", "self", ",", "item", ")", ":", "if", "self", ".", "es_based", ":", "from", "nefertari_guards", ".", "elasticsearch", "import", "get_es_item_acl", "return", "get_es_item_acl", "(", "item", ")", "return", "super", "(", "DatabaseACLMixin", ",", "self", ")", ".", "item_acl", "(", "item", ")" ]
Objectify ACL if ES is used or call item.get_acl() if db is used.
[ "Objectify", "ACL", "if", "ES", "is", "used", "or", "call", "item", ".", "get_acl", "()", "if", "db", "is", "used", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/acl.py#L189-L196
ramses-tech/ramses
ramses/acl.py
DatabaseACLMixin.getitem_es
def getitem_es(self, key): """ Override to support ACL filtering. To do so: passes `self.request` to `get_item` and uses `ACLFilterES`. """ from nefertari_guards.elasticsearch import ACLFilterES es = ACLFilterES(self.item_model.__name__) params = { 'id': key, 'request': self.request, } obj = es.get_item(**params) obj.__acl__ = self.item_acl(obj) obj.__parent__ = self obj.__name__ = key return obj
python
def getitem_es(self, key): """ Override to support ACL filtering. To do so: passes `self.request` to `get_item` and uses `ACLFilterES`. """ from nefertari_guards.elasticsearch import ACLFilterES es = ACLFilterES(self.item_model.__name__) params = { 'id': key, 'request': self.request, } obj = es.get_item(**params) obj.__acl__ = self.item_acl(obj) obj.__parent__ = self obj.__name__ = key return obj
[ "def", "getitem_es", "(", "self", ",", "key", ")", ":", "from", "nefertari_guards", ".", "elasticsearch", "import", "ACLFilterES", "es", "=", "ACLFilterES", "(", "self", ".", "item_model", ".", "__name__", ")", "params", "=", "{", "'id'", ":", "key", ",", "'request'", ":", "self", ".", "request", ",", "}", "obj", "=", "es", ".", "get_item", "(", "*", "*", "params", ")", "obj", ".", "__acl__", "=", "self", ".", "item_acl", "(", "obj", ")", "obj", ".", "__parent__", "=", "self", "obj", ".", "__name__", "=", "key", "return", "obj" ]
Override to support ACL filtering. To do so: passes `self.request` to `get_item` and uses `ACLFilterES`.
[ "Override", "to", "support", "ACL", "filtering", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/acl.py#L198-L214
ramses-tech/ramses
ramses/utils.py
convert_schema
def convert_schema(raml_schema, mime_type): """ Restructure `raml_schema` to a dictionary that has 'properties' as well as other schema keys/values. The resulting dictionary looks like this:: { "properties": { "field1": { "required": boolean, "type": ..., ...more field options }, ...more properties }, "public_fields": [...], "auth_fields": [...], ...more schema options } :param raml_schema: RAML request body schema. :param mime_type: ContentType of the schema as a string from RAML file. Only JSON is currently supported. """ if mime_type == ContentTypes.JSON: if not isinstance(raml_schema, dict): raise TypeError( 'Schema is not a valid JSON. Please check your ' 'schema syntax.\n{}...'.format(str(raml_schema)[:60])) return raml_schema if mime_type == ContentTypes.TEXT_XML: # Process XML schema pass
python
def convert_schema(raml_schema, mime_type): """ Restructure `raml_schema` to a dictionary that has 'properties' as well as other schema keys/values. The resulting dictionary looks like this:: { "properties": { "field1": { "required": boolean, "type": ..., ...more field options }, ...more properties }, "public_fields": [...], "auth_fields": [...], ...more schema options } :param raml_schema: RAML request body schema. :param mime_type: ContentType of the schema as a string from RAML file. Only JSON is currently supported. """ if mime_type == ContentTypes.JSON: if not isinstance(raml_schema, dict): raise TypeError( 'Schema is not a valid JSON. Please check your ' 'schema syntax.\n{}...'.format(str(raml_schema)[:60])) return raml_schema if mime_type == ContentTypes.TEXT_XML: # Process XML schema pass
[ "def", "convert_schema", "(", "raml_schema", ",", "mime_type", ")", ":", "if", "mime_type", "==", "ContentTypes", ".", "JSON", ":", "if", "not", "isinstance", "(", "raml_schema", ",", "dict", ")", ":", "raise", "TypeError", "(", "'Schema is not a valid JSON. Please check your '", "'schema syntax.\\n{}...'", ".", "format", "(", "str", "(", "raml_schema", ")", "[", ":", "60", "]", ")", ")", "return", "raml_schema", "if", "mime_type", "==", "ContentTypes", ".", "TEXT_XML", ":", "# Process XML schema", "pass" ]
Restructure `raml_schema` to a dictionary that has 'properties' as well as other schema keys/values. The resulting dictionary looks like this:: { "properties": { "field1": { "required": boolean, "type": ..., ...more field options }, ...more properties }, "public_fields": [...], "auth_fields": [...], ...more schema options } :param raml_schema: RAML request body schema. :param mime_type: ContentType of the schema as a string from RAML file. Only JSON is currently supported.
[ "Restructure", "raml_schema", "to", "a", "dictionary", "that", "has", "properties", "as", "well", "as", "other", "schema", "keys", "/", "values", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L22-L54
ramses-tech/ramses
ramses/utils.py
generate_model_name
def generate_model_name(raml_resource): """ Generate model name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ resource_uri = get_resource_uri(raml_resource).strip('/') resource_uri = re.sub('\W', ' ', resource_uri) model_name = inflection.titleize(resource_uri) return inflection.singularize(model_name).replace(' ', '')
python
def generate_model_name(raml_resource): """ Generate model name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ resource_uri = get_resource_uri(raml_resource).strip('/') resource_uri = re.sub('\W', ' ', resource_uri) model_name = inflection.titleize(resource_uri) return inflection.singularize(model_name).replace(' ', '')
[ "def", "generate_model_name", "(", "raml_resource", ")", ":", "resource_uri", "=", "get_resource_uri", "(", "raml_resource", ")", ".", "strip", "(", "'/'", ")", "resource_uri", "=", "re", ".", "sub", "(", "'\\W'", ",", "' '", ",", "resource_uri", ")", "model_name", "=", "inflection", ".", "titleize", "(", "resource_uri", ")", "return", "inflection", ".", "singularize", "(", "model_name", ")", ".", "replace", "(", "' '", ",", "''", ")" ]
Generate model name. :param raml_resource: Instance of ramlfications.raml.ResourceNode.
[ "Generate", "model", "name", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L76-L84
ramses-tech/ramses
ramses/utils.py
dynamic_part_name
def dynamic_part_name(raml_resource, route_name, pk_field): """ Generate a dynamic part for a resource :raml_resource:. A dynamic part is generated using 2 parts: :route_name: of the resource and the dynamic part of first dynamic child resources. If :raml_resource: has no dynamic child resources, 'id' is used as the 2nd part. E.g. if your dynamic part on route 'stories' is named 'superId' then dynamic part will be 'stories_superId'. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which dynamic part name is being generated. :param route_name: Cleaned name of :raml_resource: :param pk_field: Model Primary Key field name. """ subresources = get_resource_children(raml_resource) dynamic_uris = [res.path for res in subresources if is_dynamic_uri(res.path)] if dynamic_uris: dynamic_part = extract_dynamic_part(dynamic_uris[0]) else: dynamic_part = pk_field return '_'.join([route_name, dynamic_part])
python
def dynamic_part_name(raml_resource, route_name, pk_field): """ Generate a dynamic part for a resource :raml_resource:. A dynamic part is generated using 2 parts: :route_name: of the resource and the dynamic part of first dynamic child resources. If :raml_resource: has no dynamic child resources, 'id' is used as the 2nd part. E.g. if your dynamic part on route 'stories' is named 'superId' then dynamic part will be 'stories_superId'. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which dynamic part name is being generated. :param route_name: Cleaned name of :raml_resource: :param pk_field: Model Primary Key field name. """ subresources = get_resource_children(raml_resource) dynamic_uris = [res.path for res in subresources if is_dynamic_uri(res.path)] if dynamic_uris: dynamic_part = extract_dynamic_part(dynamic_uris[0]) else: dynamic_part = pk_field return '_'.join([route_name, dynamic_part])
[ "def", "dynamic_part_name", "(", "raml_resource", ",", "route_name", ",", "pk_field", ")", ":", "subresources", "=", "get_resource_children", "(", "raml_resource", ")", "dynamic_uris", "=", "[", "res", ".", "path", "for", "res", "in", "subresources", "if", "is_dynamic_uri", "(", "res", ".", "path", ")", "]", "if", "dynamic_uris", ":", "dynamic_part", "=", "extract_dynamic_part", "(", "dynamic_uris", "[", "0", "]", ")", "else", ":", "dynamic_part", "=", "pk_field", "return", "'_'", ".", "join", "(", "[", "route_name", ",", "dynamic_part", "]", ")" ]
Generate a dynamic part for a resource :raml_resource:. A dynamic part is generated using 2 parts: :route_name: of the resource and the dynamic part of first dynamic child resources. If :raml_resource: has no dynamic child resources, 'id' is used as the 2nd part. E.g. if your dynamic part on route 'stories' is named 'superId' then dynamic part will be 'stories_superId'. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which dynamic part name is being generated. :param route_name: Cleaned name of :raml_resource: :param pk_field: Model Primary Key field name.
[ "Generate", "a", "dynamic", "part", "for", "a", "resource", ":", "raml_resource", ":", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L87-L109
ramses-tech/ramses
ramses/utils.py
extract_dynamic_part
def extract_dynamic_part(uri): """ Extract dynamic url part from :uri: string. :param uri: URI string that may contain dynamic part. """ for part in uri.split('/'): part = part.strip() if part.startswith('{') and part.endswith('}'): return clean_dynamic_uri(part)
python
def extract_dynamic_part(uri): """ Extract dynamic url part from :uri: string. :param uri: URI string that may contain dynamic part. """ for part in uri.split('/'): part = part.strip() if part.startswith('{') and part.endswith('}'): return clean_dynamic_uri(part)
[ "def", "extract_dynamic_part", "(", "uri", ")", ":", "for", "part", "in", "uri", ".", "split", "(", "'/'", ")", ":", "part", "=", "part", ".", "strip", "(", ")", "if", "part", ".", "startswith", "(", "'{'", ")", "and", "part", ".", "endswith", "(", "'}'", ")", ":", "return", "clean_dynamic_uri", "(", "part", ")" ]
Extract dynamic url part from :uri: string. :param uri: URI string that may contain dynamic part.
[ "Extract", "dynamic", "url", "part", "from", ":", "uri", ":", "string", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L112-L120
ramses-tech/ramses
ramses/utils.py
resource_view_attrs
def resource_view_attrs(raml_resource, singular=False): """ Generate view method names needed for `raml_resource` view. Collects HTTP method names from resource siblings and dynamic children if exist. Collected methods are then translated to `nefertari.view.BaseView` method names, each of which is used to process a particular HTTP method request. Maps of {HTTP_method: view_method} `collection_methods` and `item_methods` are used to convert collection and item methods respectively. :param raml_resource: Instance of ramlfications.raml.ResourceNode :param singular: Boolean indicating if resource is singular or not """ from .views import collection_methods, item_methods # Singular resource doesn't have collection methods though # it looks like a collection if singular: collection_methods = item_methods siblings = get_resource_siblings(raml_resource) http_methods = [sibl.method.lower() for sibl in siblings] attrs = [collection_methods.get(method) for method in http_methods] # Check if resource has dynamic child resource like collection/{id} # If dynamic child resource exists, add its siblings' methods to attrs, # as both resources are handled by a single view children = get_resource_children(raml_resource) http_submethods = [child.method.lower() for child in children if is_dynamic_uri(child.path)] attrs += [item_methods.get(method) for method in http_submethods] return set(filter(bool, attrs))
python
def resource_view_attrs(raml_resource, singular=False): """ Generate view method names needed for `raml_resource` view. Collects HTTP method names from resource siblings and dynamic children if exist. Collected methods are then translated to `nefertari.view.BaseView` method names, each of which is used to process a particular HTTP method request. Maps of {HTTP_method: view_method} `collection_methods` and `item_methods` are used to convert collection and item methods respectively. :param raml_resource: Instance of ramlfications.raml.ResourceNode :param singular: Boolean indicating if resource is singular or not """ from .views import collection_methods, item_methods # Singular resource doesn't have collection methods though # it looks like a collection if singular: collection_methods = item_methods siblings = get_resource_siblings(raml_resource) http_methods = [sibl.method.lower() for sibl in siblings] attrs = [collection_methods.get(method) for method in http_methods] # Check if resource has dynamic child resource like collection/{id} # If dynamic child resource exists, add its siblings' methods to attrs, # as both resources are handled by a single view children = get_resource_children(raml_resource) http_submethods = [child.method.lower() for child in children if is_dynamic_uri(child.path)] attrs += [item_methods.get(method) for method in http_submethods] return set(filter(bool, attrs))
[ "def", "resource_view_attrs", "(", "raml_resource", ",", "singular", "=", "False", ")", ":", "from", ".", "views", "import", "collection_methods", ",", "item_methods", "# Singular resource doesn't have collection methods though", "# it looks like a collection", "if", "singular", ":", "collection_methods", "=", "item_methods", "siblings", "=", "get_resource_siblings", "(", "raml_resource", ")", "http_methods", "=", "[", "sibl", ".", "method", ".", "lower", "(", ")", "for", "sibl", "in", "siblings", "]", "attrs", "=", "[", "collection_methods", ".", "get", "(", "method", ")", "for", "method", "in", "http_methods", "]", "# Check if resource has dynamic child resource like collection/{id}", "# If dynamic child resource exists, add its siblings' methods to attrs,", "# as both resources are handled by a single view", "children", "=", "get_resource_children", "(", "raml_resource", ")", "http_submethods", "=", "[", "child", ".", "method", ".", "lower", "(", ")", "for", "child", "in", "children", "if", "is_dynamic_uri", "(", "child", ".", "path", ")", "]", "attrs", "+=", "[", "item_methods", ".", "get", "(", "method", ")", "for", "method", "in", "http_submethods", "]", "return", "set", "(", "filter", "(", "bool", ",", "attrs", ")", ")" ]
Generate view method names needed for `raml_resource` view. Collects HTTP method names from resource siblings and dynamic children if exist. Collected methods are then translated to `nefertari.view.BaseView` method names, each of which is used to process a particular HTTP method request. Maps of {HTTP_method: view_method} `collection_methods` and `item_methods` are used to convert collection and item methods respectively. :param raml_resource: Instance of ramlfications.raml.ResourceNode :param singular: Boolean indicating if resource is singular or not
[ "Generate", "view", "method", "names", "needed", "for", "raml_resource", "view", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L123-L156
ramses-tech/ramses
ramses/utils.py
resource_schema
def resource_schema(raml_resource): """ Get schema properties of RAML resource :raml_resource:. Must be called with RAML resource that defines body schema. First body that defines schema is used. Schema is converted on return using 'convert_schema'. :param raml_resource: Instance of ramlfications.raml.ResourceNode of POST method. """ # NOTE: Must be called with resource that defines body schema log.info('Searching for model schema') if not raml_resource.body: raise ValueError('RAML resource has no body to setup database ' 'schema from') for body in raml_resource.body: if body.schema: return convert_schema(body.schema, body.mime_type) log.debug('No model schema found.')
python
def resource_schema(raml_resource): """ Get schema properties of RAML resource :raml_resource:. Must be called with RAML resource that defines body schema. First body that defines schema is used. Schema is converted on return using 'convert_schema'. :param raml_resource: Instance of ramlfications.raml.ResourceNode of POST method. """ # NOTE: Must be called with resource that defines body schema log.info('Searching for model schema') if not raml_resource.body: raise ValueError('RAML resource has no body to setup database ' 'schema from') for body in raml_resource.body: if body.schema: return convert_schema(body.schema, body.mime_type) log.debug('No model schema found.')
[ "def", "resource_schema", "(", "raml_resource", ")", ":", "# NOTE: Must be called with resource that defines body schema", "log", ".", "info", "(", "'Searching for model schema'", ")", "if", "not", "raml_resource", ".", "body", ":", "raise", "ValueError", "(", "'RAML resource has no body to setup database '", "'schema from'", ")", "for", "body", "in", "raml_resource", ".", "body", ":", "if", "body", ".", "schema", ":", "return", "convert_schema", "(", "body", ".", "schema", ",", "body", ".", "mime_type", ")", "log", ".", "debug", "(", "'No model schema found.'", ")" ]
Get schema properties of RAML resource :raml_resource:. Must be called with RAML resource that defines body schema. First body that defines schema is used. Schema is converted on return using 'convert_schema'. :param raml_resource: Instance of ramlfications.raml.ResourceNode of POST method.
[ "Get", "schema", "properties", "of", "RAML", "resource", ":", "raml_resource", ":", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L159-L178
ramses-tech/ramses
ramses/utils.py
get_static_parent
def get_static_parent(raml_resource, method=None): """ Get static parent resource of :raml_resource: with HTTP method :method:. :param raml_resource:Instance of ramlfications.raml.ResourceNode. :param method: HTTP method name which matching static resource must have. """ parent = raml_resource.parent while is_dynamic_resource(parent): parent = parent.parent if parent is None: return parent match_method = method is not None if match_method: if parent.method.upper() == method.upper(): return parent else: return parent for res in parent.root.resources: if res.path == parent.path: if res.method.upper() == method.upper(): return res
python
def get_static_parent(raml_resource, method=None): """ Get static parent resource of :raml_resource: with HTTP method :method:. :param raml_resource:Instance of ramlfications.raml.ResourceNode. :param method: HTTP method name which matching static resource must have. """ parent = raml_resource.parent while is_dynamic_resource(parent): parent = parent.parent if parent is None: return parent match_method = method is not None if match_method: if parent.method.upper() == method.upper(): return parent else: return parent for res in parent.root.resources: if res.path == parent.path: if res.method.upper() == method.upper(): return res
[ "def", "get_static_parent", "(", "raml_resource", ",", "method", "=", "None", ")", ":", "parent", "=", "raml_resource", ".", "parent", "while", "is_dynamic_resource", "(", "parent", ")", ":", "parent", "=", "parent", ".", "parent", "if", "parent", "is", "None", ":", "return", "parent", "match_method", "=", "method", "is", "not", "None", "if", "match_method", ":", "if", "parent", ".", "method", ".", "upper", "(", ")", "==", "method", ".", "upper", "(", ")", ":", "return", "parent", "else", ":", "return", "parent", "for", "res", "in", "parent", ".", "root", ".", "resources", ":", "if", "res", ".", "path", "==", "parent", ".", "path", ":", "if", "res", ".", "method", ".", "upper", "(", ")", "==", "method", ".", "upper", "(", ")", ":", "return", "res" ]
Get static parent resource of :raml_resource: with HTTP method :method:. :param raml_resource:Instance of ramlfications.raml.ResourceNode. :param method: HTTP method name which matching static resource must have.
[ "Get", "static", "parent", "resource", "of", ":", "raml_resource", ":", "with", "HTTP", "method", ":", "method", ":", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L189-L214
ramses-tech/ramses
ramses/utils.py
attr_subresource
def attr_subresource(raml_resource, route_name): """ Determine if :raml_resource: is an attribute subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:. """ static_parent = get_static_parent(raml_resource, method='POST') if static_parent is None: return False schema = resource_schema(static_parent) or {} properties = schema.get('properties', {}) if route_name in properties: db_settings = properties[route_name].get('_db_settings', {}) return db_settings.get('type') in ('dict', 'list') return False
python
def attr_subresource(raml_resource, route_name): """ Determine if :raml_resource: is an attribute subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:. """ static_parent = get_static_parent(raml_resource, method='POST') if static_parent is None: return False schema = resource_schema(static_parent) or {} properties = schema.get('properties', {}) if route_name in properties: db_settings = properties[route_name].get('_db_settings', {}) return db_settings.get('type') in ('dict', 'list') return False
[ "def", "attr_subresource", "(", "raml_resource", ",", "route_name", ")", ":", "static_parent", "=", "get_static_parent", "(", "raml_resource", ",", "method", "=", "'POST'", ")", "if", "static_parent", "is", "None", ":", "return", "False", "schema", "=", "resource_schema", "(", "static_parent", ")", "or", "{", "}", "properties", "=", "schema", ".", "get", "(", "'properties'", ",", "{", "}", ")", "if", "route_name", "in", "properties", ":", "db_settings", "=", "properties", "[", "route_name", "]", ".", "get", "(", "'_db_settings'", ",", "{", "}", ")", "return", "db_settings", ".", "get", "(", "'type'", ")", "in", "(", "'dict'", ",", "'list'", ")", "return", "False" ]
Determine if :raml_resource: is an attribute subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:.
[ "Determine", "if", ":", "raml_resource", ":", "is", "an", "attribute", "subresource", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L217-L231
ramses-tech/ramses
ramses/utils.py
singular_subresource
def singular_subresource(raml_resource, route_name): """ Determine if :raml_resource: is a singular subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:. """ static_parent = get_static_parent(raml_resource, method='POST') if static_parent is None: return False schema = resource_schema(static_parent) or {} properties = schema.get('properties', {}) if route_name not in properties: return False db_settings = properties[route_name].get('_db_settings', {}) is_obj = db_settings.get('type') == 'relationship' single_obj = not db_settings.get('uselist', True) return is_obj and single_obj
python
def singular_subresource(raml_resource, route_name): """ Determine if :raml_resource: is a singular subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:. """ static_parent = get_static_parent(raml_resource, method='POST') if static_parent is None: return False schema = resource_schema(static_parent) or {} properties = schema.get('properties', {}) if route_name not in properties: return False db_settings = properties[route_name].get('_db_settings', {}) is_obj = db_settings.get('type') == 'relationship' single_obj = not db_settings.get('uselist', True) return is_obj and single_obj
[ "def", "singular_subresource", "(", "raml_resource", ",", "route_name", ")", ":", "static_parent", "=", "get_static_parent", "(", "raml_resource", ",", "method", "=", "'POST'", ")", "if", "static_parent", "is", "None", ":", "return", "False", "schema", "=", "resource_schema", "(", "static_parent", ")", "or", "{", "}", "properties", "=", "schema", ".", "get", "(", "'properties'", ",", "{", "}", ")", "if", "route_name", "not", "in", "properties", ":", "return", "False", "db_settings", "=", "properties", "[", "route_name", "]", ".", "get", "(", "'_db_settings'", ",", "{", "}", ")", "is_obj", "=", "db_settings", ".", "get", "(", "'type'", ")", "==", "'relationship'", "single_obj", "=", "not", "db_settings", ".", "get", "(", "'uselist'", ",", "True", ")", "return", "is_obj", "and", "single_obj" ]
Determine if :raml_resource: is a singular subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:.
[ "Determine", "if", ":", "raml_resource", ":", "is", "a", "singular", "subresource", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L234-L251
ramses-tech/ramses
ramses/utils.py
is_callable_tag
def is_callable_tag(tag): """ Determine whether :tag: is a valid callable string tag. String is assumed to be valid callable if it starts with '{{' and ends with '}}'. :param tag: String name of tag. """ return (isinstance(tag, six.string_types) and tag.strip().startswith('{{') and tag.strip().endswith('}}'))
python
def is_callable_tag(tag): """ Determine whether :tag: is a valid callable string tag. String is assumed to be valid callable if it starts with '{{' and ends with '}}'. :param tag: String name of tag. """ return (isinstance(tag, six.string_types) and tag.strip().startswith('{{') and tag.strip().endswith('}}'))
[ "def", "is_callable_tag", "(", "tag", ")", ":", "return", "(", "isinstance", "(", "tag", ",", "six", ".", "string_types", ")", "and", "tag", ".", "strip", "(", ")", ".", "startswith", "(", "'{{'", ")", "and", "tag", ".", "strip", "(", ")", ".", "endswith", "(", "'}}'", ")", ")" ]
Determine whether :tag: is a valid callable string tag. String is assumed to be valid callable if it starts with '{{' and ends with '}}'. :param tag: String name of tag.
[ "Determine", "whether", ":", "tag", ":", "is", "a", "valid", "callable", "string", "tag", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L254-L264
ramses-tech/ramses
ramses/utils.py
resolve_to_callable
def resolve_to_callable(callable_name): """ Resolve string :callable_name: to a callable. :param callable_name: String representing callable name as registered in ramses registry or dotted import path of callable. Can be wrapped in double curly brackets, e.g. '{{my_callable}}'. """ from . import registry clean_callable_name = callable_name.replace( '{{', '').replace('}}', '').strip() try: return registry.get(clean_callable_name) except KeyError: try: from zope.dottedname.resolve import resolve return resolve(clean_callable_name) except ImportError: raise ImportError( 'Failed to load callable `{}`'.format(clean_callable_name))
python
def resolve_to_callable(callable_name): """ Resolve string :callable_name: to a callable. :param callable_name: String representing callable name as registered in ramses registry or dotted import path of callable. Can be wrapped in double curly brackets, e.g. '{{my_callable}}'. """ from . import registry clean_callable_name = callable_name.replace( '{{', '').replace('}}', '').strip() try: return registry.get(clean_callable_name) except KeyError: try: from zope.dottedname.resolve import resolve return resolve(clean_callable_name) except ImportError: raise ImportError( 'Failed to load callable `{}`'.format(clean_callable_name))
[ "def", "resolve_to_callable", "(", "callable_name", ")", ":", "from", ".", "import", "registry", "clean_callable_name", "=", "callable_name", ".", "replace", "(", "'{{'", ",", "''", ")", ".", "replace", "(", "'}}'", ",", "''", ")", ".", "strip", "(", ")", "try", ":", "return", "registry", ".", "get", "(", "clean_callable_name", ")", "except", "KeyError", ":", "try", ":", "from", "zope", ".", "dottedname", ".", "resolve", "import", "resolve", "return", "resolve", "(", "clean_callable_name", ")", "except", "ImportError", ":", "raise", "ImportError", "(", "'Failed to load callable `{}`'", ".", "format", "(", "clean_callable_name", ")", ")" ]
Resolve string :callable_name: to a callable. :param callable_name: String representing callable name as registered in ramses registry or dotted import path of callable. Can be wrapped in double curly brackets, e.g. '{{my_callable}}'.
[ "Resolve", "string", ":", "callable_name", ":", "to", "a", "callable", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L267-L285
ramses-tech/ramses
ramses/utils.py
get_resource_siblings
def get_resource_siblings(raml_resource): """ Get siblings of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ path = raml_resource.path return [res for res in raml_resource.root.resources if res.path == path]
python
def get_resource_siblings(raml_resource): """ Get siblings of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ path = raml_resource.path return [res for res in raml_resource.root.resources if res.path == path]
[ "def", "get_resource_siblings", "(", "raml_resource", ")", ":", "path", "=", "raml_resource", ".", "path", "return", "[", "res", "for", "res", "in", "raml_resource", ".", "root", ".", "resources", "if", "res", ".", "path", "==", "path", "]" ]
Get siblings of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode.
[ "Get", "siblings", "of", ":", "raml_resource", ":", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L288-L295
ramses-tech/ramses
ramses/utils.py
get_resource_children
def get_resource_children(raml_resource): """ Get children of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ path = raml_resource.path return [res for res in raml_resource.root.resources if res.parent and res.parent.path == path]
python
def get_resource_children(raml_resource): """ Get children of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ path = raml_resource.path return [res for res in raml_resource.root.resources if res.parent and res.parent.path == path]
[ "def", "get_resource_children", "(", "raml_resource", ")", ":", "path", "=", "raml_resource", ".", "path", "return", "[", "res", "for", "res", "in", "raml_resource", ".", "root", ".", "resources", "if", "res", ".", "parent", "and", "res", ".", "parent", ".", "path", "==", "path", "]" ]
Get children of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode.
[ "Get", "children", "of", ":", "raml_resource", ":", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L298-L305
ramses-tech/ramses
ramses/utils.py
get_events_map
def get_events_map(): """ Prepare map of event subscribers. * Extends copies of BEFORE_EVENTS and AFTER_EVENTS maps with 'set' action. * Returns map of {before/after: {action: event class(es)}} """ from nefertari import events set_keys = ('create', 'update', 'replace', 'update_many', 'register') before_events = events.BEFORE_EVENTS.copy() before_events['set'] = [before_events[key] for key in set_keys] after_events = events.AFTER_EVENTS.copy() after_events['set'] = [after_events[key] for key in set_keys] return { 'before': before_events, 'after': after_events, }
python
def get_events_map(): """ Prepare map of event subscribers. * Extends copies of BEFORE_EVENTS and AFTER_EVENTS maps with 'set' action. * Returns map of {before/after: {action: event class(es)}} """ from nefertari import events set_keys = ('create', 'update', 'replace', 'update_many', 'register') before_events = events.BEFORE_EVENTS.copy() before_events['set'] = [before_events[key] for key in set_keys] after_events = events.AFTER_EVENTS.copy() after_events['set'] = [after_events[key] for key in set_keys] return { 'before': before_events, 'after': after_events, }
[ "def", "get_events_map", "(", ")", ":", "from", "nefertari", "import", "events", "set_keys", "=", "(", "'create'", ",", "'update'", ",", "'replace'", ",", "'update_many'", ",", "'register'", ")", "before_events", "=", "events", ".", "BEFORE_EVENTS", ".", "copy", "(", ")", "before_events", "[", "'set'", "]", "=", "[", "before_events", "[", "key", "]", "for", "key", "in", "set_keys", "]", "after_events", "=", "events", ".", "AFTER_EVENTS", ".", "copy", "(", ")", "after_events", "[", "'set'", "]", "=", "[", "after_events", "[", "key", "]", "for", "key", "in", "set_keys", "]", "return", "{", "'before'", ":", "before_events", ",", "'after'", ":", "after_events", ",", "}" ]
Prepare map of event subscribers. * Extends copies of BEFORE_EVENTS and AFTER_EVENTS maps with 'set' action. * Returns map of {before/after: {action: event class(es)}}
[ "Prepare", "map", "of", "event", "subscribers", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L308-L324
ramses-tech/ramses
ramses/utils.py
patch_view_model
def patch_view_model(view_cls, model_cls): """ Patches view_cls.Model with model_cls. :param view_cls: View class "Model" param of which should be patched :param model_cls: Model class which should be used to patch view_cls.Model """ original_model = view_cls.Model view_cls.Model = model_cls try: yield finally: view_cls.Model = original_model
python
def patch_view_model(view_cls, model_cls): """ Patches view_cls.Model with model_cls. :param view_cls: View class "Model" param of which should be patched :param model_cls: Model class which should be used to patch view_cls.Model """ original_model = view_cls.Model view_cls.Model = model_cls try: yield finally: view_cls.Model = original_model
[ "def", "patch_view_model", "(", "view_cls", ",", "model_cls", ")", ":", "original_model", "=", "view_cls", ".", "Model", "view_cls", ".", "Model", "=", "model_cls", "try", ":", "yield", "finally", ":", "view_cls", ".", "Model", "=", "original_model" ]
Patches view_cls.Model with model_cls. :param view_cls: View class "Model" param of which should be patched :param model_cls: Model class which should be used to patch view_cls.Model
[ "Patches", "view_cls", ".", "Model", "with", "model_cls", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L328-L342
ramses-tech/ramses
ramses/utils.py
get_route_name
def get_route_name(resource_uri): """ Get route name from RAML resource URI. :param resource_uri: String representing RAML resource URI. :returns string: String with route name, which is :resource_uri: stripped of non-word characters. """ resource_uri = resource_uri.strip('/') resource_uri = re.sub('\W', '', resource_uri) return resource_uri
python
def get_route_name(resource_uri): """ Get route name from RAML resource URI. :param resource_uri: String representing RAML resource URI. :returns string: String with route name, which is :resource_uri: stripped of non-word characters. """ resource_uri = resource_uri.strip('/') resource_uri = re.sub('\W', '', resource_uri) return resource_uri
[ "def", "get_route_name", "(", "resource_uri", ")", ":", "resource_uri", "=", "resource_uri", ".", "strip", "(", "'/'", ")", "resource_uri", "=", "re", ".", "sub", "(", "'\\W'", ",", "''", ",", "resource_uri", ")", "return", "resource_uri" ]
Get route name from RAML resource URI. :param resource_uri: String representing RAML resource URI. :returns string: String with route name, which is :resource_uri: stripped of non-word characters.
[ "Get", "route", "name", "from", "RAML", "resource", "URI", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/utils.py#L345-L354
ramses-tech/ramses
ramses/generators.py
generate_resource
def generate_resource(config, raml_resource, parent_resource): """ Perform complete one resource configuration process This function generates: ACL, view, route, resource, database model for a given `raml_resource`. New nefertari resource is attached to `parent_resource` class which is an instance of `nefertari.resource.Resource`. Things to consider: * Top-level resources must be collection names. * No resources are explicitly created for dynamic (ending with '}') RAML resources as they are implicitly processed by parent collection resources. * Resource nesting must look like collection/id/collection/id/... * Only part of resource path after last '/' is taken into account, thus each level of resource nesting should add one more path element. E.g. /stories -> /stories/{id} and not /stories -> /stories/mystories/{id}. Latter route will be generated at /stories/{id}. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param parent_resource: Parent nefertari resource object. """ from .models import get_existing_model # Don't generate resources for dynamic routes as they are already # generated by their parent resource_uri = get_resource_uri(raml_resource) if is_dynamic_uri(resource_uri): if parent_resource.is_root: raise Exception("Top-level resources can't be dynamic and must " "represent collections instead") return route_name = get_route_name(resource_uri) log.info('Configuring resource: `{}`. Parent: `{}`'.format( route_name, parent_resource.uid or 'root')) # Get DB model. If this is an attribute or singular resource, # we don't need to get model is_singular = singular_subresource(raml_resource, route_name) is_attr_res = attr_subresource(raml_resource, route_name) if not parent_resource.is_root and (is_attr_res or is_singular): model_cls = parent_resource.view.Model else: model_name = generate_model_name(raml_resource) model_cls = get_existing_model(model_name) resource_kwargs = {} # Generate ACL log.info('Generating ACL for `{}`'.format(route_name)) resource_kwargs['factory'] = generate_acl( config, model_cls=model_cls, raml_resource=raml_resource) # Generate dynamic part name if not is_singular: resource_kwargs['id_name'] = dynamic_part_name( raml_resource=raml_resource, route_name=route_name, pk_field=model_cls.pk_field()) # Generate REST view log.info('Generating view for `{}`'.format(route_name)) view_attrs = resource_view_attrs(raml_resource, is_singular) resource_kwargs['view'] = generate_rest_view( config, model_cls=model_cls, attrs=view_attrs, attr_view=is_attr_res, singular=is_singular, ) # In case of singular resource, model still needs to be generated, # but we store it on a different view attribute if is_singular: model_name = generate_model_name(raml_resource) view_cls = resource_kwargs['view'] view_cls._parent_model = view_cls.Model view_cls.Model = get_existing_model(model_name) # Create new nefertari resource log.info('Creating new resource for `{}`'.format(route_name)) clean_uri = resource_uri.strip('/') resource_args = (singularize(clean_uri),) if not is_singular: resource_args += (clean_uri,) return parent_resource.add(*resource_args, **resource_kwargs)
python
def generate_resource(config, raml_resource, parent_resource): """ Perform complete one resource configuration process This function generates: ACL, view, route, resource, database model for a given `raml_resource`. New nefertari resource is attached to `parent_resource` class which is an instance of `nefertari.resource.Resource`. Things to consider: * Top-level resources must be collection names. * No resources are explicitly created for dynamic (ending with '}') RAML resources as they are implicitly processed by parent collection resources. * Resource nesting must look like collection/id/collection/id/... * Only part of resource path after last '/' is taken into account, thus each level of resource nesting should add one more path element. E.g. /stories -> /stories/{id} and not /stories -> /stories/mystories/{id}. Latter route will be generated at /stories/{id}. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param parent_resource: Parent nefertari resource object. """ from .models import get_existing_model # Don't generate resources for dynamic routes as they are already # generated by their parent resource_uri = get_resource_uri(raml_resource) if is_dynamic_uri(resource_uri): if parent_resource.is_root: raise Exception("Top-level resources can't be dynamic and must " "represent collections instead") return route_name = get_route_name(resource_uri) log.info('Configuring resource: `{}`. Parent: `{}`'.format( route_name, parent_resource.uid or 'root')) # Get DB model. If this is an attribute or singular resource, # we don't need to get model is_singular = singular_subresource(raml_resource, route_name) is_attr_res = attr_subresource(raml_resource, route_name) if not parent_resource.is_root and (is_attr_res or is_singular): model_cls = parent_resource.view.Model else: model_name = generate_model_name(raml_resource) model_cls = get_existing_model(model_name) resource_kwargs = {} # Generate ACL log.info('Generating ACL for `{}`'.format(route_name)) resource_kwargs['factory'] = generate_acl( config, model_cls=model_cls, raml_resource=raml_resource) # Generate dynamic part name if not is_singular: resource_kwargs['id_name'] = dynamic_part_name( raml_resource=raml_resource, route_name=route_name, pk_field=model_cls.pk_field()) # Generate REST view log.info('Generating view for `{}`'.format(route_name)) view_attrs = resource_view_attrs(raml_resource, is_singular) resource_kwargs['view'] = generate_rest_view( config, model_cls=model_cls, attrs=view_attrs, attr_view=is_attr_res, singular=is_singular, ) # In case of singular resource, model still needs to be generated, # but we store it on a different view attribute if is_singular: model_name = generate_model_name(raml_resource) view_cls = resource_kwargs['view'] view_cls._parent_model = view_cls.Model view_cls.Model = get_existing_model(model_name) # Create new nefertari resource log.info('Creating new resource for `{}`'.format(route_name)) clean_uri = resource_uri.strip('/') resource_args = (singularize(clean_uri),) if not is_singular: resource_args += (clean_uri,) return parent_resource.add(*resource_args, **resource_kwargs)
[ "def", "generate_resource", "(", "config", ",", "raml_resource", ",", "parent_resource", ")", ":", "from", ".", "models", "import", "get_existing_model", "# Don't generate resources for dynamic routes as they are already", "# generated by their parent", "resource_uri", "=", "get_resource_uri", "(", "raml_resource", ")", "if", "is_dynamic_uri", "(", "resource_uri", ")", ":", "if", "parent_resource", ".", "is_root", ":", "raise", "Exception", "(", "\"Top-level resources can't be dynamic and must \"", "\"represent collections instead\"", ")", "return", "route_name", "=", "get_route_name", "(", "resource_uri", ")", "log", ".", "info", "(", "'Configuring resource: `{}`. Parent: `{}`'", ".", "format", "(", "route_name", ",", "parent_resource", ".", "uid", "or", "'root'", ")", ")", "# Get DB model. If this is an attribute or singular resource,", "# we don't need to get model", "is_singular", "=", "singular_subresource", "(", "raml_resource", ",", "route_name", ")", "is_attr_res", "=", "attr_subresource", "(", "raml_resource", ",", "route_name", ")", "if", "not", "parent_resource", ".", "is_root", "and", "(", "is_attr_res", "or", "is_singular", ")", ":", "model_cls", "=", "parent_resource", ".", "view", ".", "Model", "else", ":", "model_name", "=", "generate_model_name", "(", "raml_resource", ")", "model_cls", "=", "get_existing_model", "(", "model_name", ")", "resource_kwargs", "=", "{", "}", "# Generate ACL", "log", ".", "info", "(", "'Generating ACL for `{}`'", ".", "format", "(", "route_name", ")", ")", "resource_kwargs", "[", "'factory'", "]", "=", "generate_acl", "(", "config", ",", "model_cls", "=", "model_cls", ",", "raml_resource", "=", "raml_resource", ")", "# Generate dynamic part name", "if", "not", "is_singular", ":", "resource_kwargs", "[", "'id_name'", "]", "=", "dynamic_part_name", "(", "raml_resource", "=", "raml_resource", ",", "route_name", "=", "route_name", ",", "pk_field", "=", "model_cls", ".", "pk_field", "(", ")", ")", "# Generate REST view", "log", ".", "info", "(", "'Generating view for `{}`'", ".", "format", "(", "route_name", ")", ")", "view_attrs", "=", "resource_view_attrs", "(", "raml_resource", ",", "is_singular", ")", "resource_kwargs", "[", "'view'", "]", "=", "generate_rest_view", "(", "config", ",", "model_cls", "=", "model_cls", ",", "attrs", "=", "view_attrs", ",", "attr_view", "=", "is_attr_res", ",", "singular", "=", "is_singular", ",", ")", "# In case of singular resource, model still needs to be generated,", "# but we store it on a different view attribute", "if", "is_singular", ":", "model_name", "=", "generate_model_name", "(", "raml_resource", ")", "view_cls", "=", "resource_kwargs", "[", "'view'", "]", "view_cls", ".", "_parent_model", "=", "view_cls", ".", "Model", "view_cls", ".", "Model", "=", "get_existing_model", "(", "model_name", ")", "# Create new nefertari resource", "log", ".", "info", "(", "'Creating new resource for `{}`'", ".", "format", "(", "route_name", ")", ")", "clean_uri", "=", "resource_uri", ".", "strip", "(", "'/'", ")", "resource_args", "=", "(", "singularize", "(", "clean_uri", ")", ",", ")", "if", "not", "is_singular", ":", "resource_args", "+=", "(", "clean_uri", ",", ")", "return", "parent_resource", ".", "add", "(", "*", "resource_args", ",", "*", "*", "resource_kwargs", ")" ]
Perform complete one resource configuration process This function generates: ACL, view, route, resource, database model for a given `raml_resource`. New nefertari resource is attached to `parent_resource` class which is an instance of `nefertari.resource.Resource`. Things to consider: * Top-level resources must be collection names. * No resources are explicitly created for dynamic (ending with '}') RAML resources as they are implicitly processed by parent collection resources. * Resource nesting must look like collection/id/collection/id/... * Only part of resource path after last '/' is taken into account, thus each level of resource nesting should add one more path element. E.g. /stories -> /stories/{id} and not /stories -> /stories/mystories/{id}. Latter route will be generated at /stories/{id}. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param parent_resource: Parent nefertari resource object.
[ "Perform", "complete", "one", "resource", "configuration", "process" ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/generators.py#L32-L122
ramses-tech/ramses
ramses/generators.py
generate_server
def generate_server(raml_root, config): """ Handle server generation process. :param raml_root: Instance of ramlfications.raml.RootNode. :param config: Pyramid Configurator instance. """ log.info('Server generation started') if not raml_root.resources: return root_resource = config.get_root_resource() generated_resources = {} for raml_resource in raml_root.resources: if raml_resource.path in generated_resources: continue # Get Nefertari parent resource parent_resource = _get_nefertari_parent_resource( raml_resource, generated_resources, root_resource) # Get generated resource and store it new_resource = generate_resource( config, raml_resource, parent_resource) if new_resource is not None: generated_resources[raml_resource.path] = new_resource
python
def generate_server(raml_root, config): """ Handle server generation process. :param raml_root: Instance of ramlfications.raml.RootNode. :param config: Pyramid Configurator instance. """ log.info('Server generation started') if not raml_root.resources: return root_resource = config.get_root_resource() generated_resources = {} for raml_resource in raml_root.resources: if raml_resource.path in generated_resources: continue # Get Nefertari parent resource parent_resource = _get_nefertari_parent_resource( raml_resource, generated_resources, root_resource) # Get generated resource and store it new_resource = generate_resource( config, raml_resource, parent_resource) if new_resource is not None: generated_resources[raml_resource.path] = new_resource
[ "def", "generate_server", "(", "raml_root", ",", "config", ")", ":", "log", ".", "info", "(", "'Server generation started'", ")", "if", "not", "raml_root", ".", "resources", ":", "return", "root_resource", "=", "config", ".", "get_root_resource", "(", ")", "generated_resources", "=", "{", "}", "for", "raml_resource", "in", "raml_root", ".", "resources", ":", "if", "raml_resource", ".", "path", "in", "generated_resources", ":", "continue", "# Get Nefertari parent resource", "parent_resource", "=", "_get_nefertari_parent_resource", "(", "raml_resource", ",", "generated_resources", ",", "root_resource", ")", "# Get generated resource and store it", "new_resource", "=", "generate_resource", "(", "config", ",", "raml_resource", ",", "parent_resource", ")", "if", "new_resource", "is", "not", "None", ":", "generated_resources", "[", "raml_resource", ".", "path", "]", "=", "new_resource" ]
Handle server generation process. :param raml_root: Instance of ramlfications.raml.RootNode. :param config: Pyramid Configurator instance.
[ "Handle", "server", "generation", "process", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/generators.py#L125-L151
ramses-tech/ramses
ramses/generators.py
generate_models
def generate_models(config, raml_resources): """ Generate model for each resource in :raml_resources: The DB model name is generated using singular titled version of current resource's url. E.g. for resource under url '/stories', model with name 'Story' will be generated. :param config: Pyramid Configurator instance. :param raml_resources: List of ramlfications.raml.ResourceNode. """ from .models import handle_model_generation if not raml_resources: return for raml_resource in raml_resources: # No need to generate models for dynamic resource if is_dynamic_uri(raml_resource.path): continue # Since POST resource must define schema use only POST # resources to generate models if raml_resource.method.upper() != 'POST': continue # Generate DB model # If this is an attribute resource we don't need to generate model resource_uri = get_resource_uri(raml_resource) route_name = get_route_name(resource_uri) if not attr_subresource(raml_resource, route_name): log.info('Configuring model for route `{}`'.format(route_name)) model_cls, is_auth_model = handle_model_generation( config, raml_resource) if is_auth_model: config.registry.auth_model = model_cls
python
def generate_models(config, raml_resources): """ Generate model for each resource in :raml_resources: The DB model name is generated using singular titled version of current resource's url. E.g. for resource under url '/stories', model with name 'Story' will be generated. :param config: Pyramid Configurator instance. :param raml_resources: List of ramlfications.raml.ResourceNode. """ from .models import handle_model_generation if not raml_resources: return for raml_resource in raml_resources: # No need to generate models for dynamic resource if is_dynamic_uri(raml_resource.path): continue # Since POST resource must define schema use only POST # resources to generate models if raml_resource.method.upper() != 'POST': continue # Generate DB model # If this is an attribute resource we don't need to generate model resource_uri = get_resource_uri(raml_resource) route_name = get_route_name(resource_uri) if not attr_subresource(raml_resource, route_name): log.info('Configuring model for route `{}`'.format(route_name)) model_cls, is_auth_model = handle_model_generation( config, raml_resource) if is_auth_model: config.registry.auth_model = model_cls
[ "def", "generate_models", "(", "config", ",", "raml_resources", ")", ":", "from", ".", "models", "import", "handle_model_generation", "if", "not", "raml_resources", ":", "return", "for", "raml_resource", "in", "raml_resources", ":", "# No need to generate models for dynamic resource", "if", "is_dynamic_uri", "(", "raml_resource", ".", "path", ")", ":", "continue", "# Since POST resource must define schema use only POST", "# resources to generate models", "if", "raml_resource", ".", "method", ".", "upper", "(", ")", "!=", "'POST'", ":", "continue", "# Generate DB model", "# If this is an attribute resource we don't need to generate model", "resource_uri", "=", "get_resource_uri", "(", "raml_resource", ")", "route_name", "=", "get_route_name", "(", "resource_uri", ")", "if", "not", "attr_subresource", "(", "raml_resource", ",", "route_name", ")", ":", "log", ".", "info", "(", "'Configuring model for route `{}`'", ".", "format", "(", "route_name", ")", ")", "model_cls", ",", "is_auth_model", "=", "handle_model_generation", "(", "config", ",", "raml_resource", ")", "if", "is_auth_model", ":", "config", ".", "registry", ".", "auth_model", "=", "model_cls" ]
Generate model for each resource in :raml_resources: The DB model name is generated using singular titled version of current resource's url. E.g. for resource under url '/stories', model with name 'Story' will be generated. :param config: Pyramid Configurator instance. :param raml_resources: List of ramlfications.raml.ResourceNode.
[ "Generate", "model", "for", "each", "resource", "in", ":", "raml_resources", ":" ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/generators.py#L154-L186
ramses-tech/ramses
ramses/views.py
generate_rest_view
def generate_rest_view(config, model_cls, attrs=None, es_based=True, attr_view=False, singular=False): """ Generate REST view for a model class. :param model_cls: Generated DB model class. :param attr: List of strings that represent names of view methods, new generated view should support. Not supported methods are replaced with property that raises AttributeError to display MethodNotAllowed error. :param es_based: Boolean indicating if generated view should read from elasticsearch. If True - collection reads are performed from elasticsearch. Database is used for reads otherwise. Defaults to True. :param attr_view: Boolean indicating if ItemAttributeView should be used as a base class for generated view. :param singular: Boolean indicating if ItemSingularView should be used as a base class for generated view. """ valid_attrs = (list(collection_methods.values()) + list(item_methods.values())) missing_attrs = set(valid_attrs) - set(attrs) if singular: bases = [ItemSingularView] elif attr_view: bases = [ItemAttributeView] elif es_based: bases = [ESCollectionView] else: bases = [CollectionView] if config.registry.database_acls: from nefertari_guards.view import ACLFilterViewMixin bases = [SetObjectACLMixin] + bases + [ACLFilterViewMixin] bases.append(NefertariBaseView) RESTView = type('RESTView', tuple(bases), {'Model': model_cls}) def _attr_error(*args, **kwargs): raise AttributeError for attr in missing_attrs: setattr(RESTView, attr, property(_attr_error)) return RESTView
python
def generate_rest_view(config, model_cls, attrs=None, es_based=True, attr_view=False, singular=False): """ Generate REST view for a model class. :param model_cls: Generated DB model class. :param attr: List of strings that represent names of view methods, new generated view should support. Not supported methods are replaced with property that raises AttributeError to display MethodNotAllowed error. :param es_based: Boolean indicating if generated view should read from elasticsearch. If True - collection reads are performed from elasticsearch. Database is used for reads otherwise. Defaults to True. :param attr_view: Boolean indicating if ItemAttributeView should be used as a base class for generated view. :param singular: Boolean indicating if ItemSingularView should be used as a base class for generated view. """ valid_attrs = (list(collection_methods.values()) + list(item_methods.values())) missing_attrs = set(valid_attrs) - set(attrs) if singular: bases = [ItemSingularView] elif attr_view: bases = [ItemAttributeView] elif es_based: bases = [ESCollectionView] else: bases = [CollectionView] if config.registry.database_acls: from nefertari_guards.view import ACLFilterViewMixin bases = [SetObjectACLMixin] + bases + [ACLFilterViewMixin] bases.append(NefertariBaseView) RESTView = type('RESTView', tuple(bases), {'Model': model_cls}) def _attr_error(*args, **kwargs): raise AttributeError for attr in missing_attrs: setattr(RESTView, attr, property(_attr_error)) return RESTView
[ "def", "generate_rest_view", "(", "config", ",", "model_cls", ",", "attrs", "=", "None", ",", "es_based", "=", "True", ",", "attr_view", "=", "False", ",", "singular", "=", "False", ")", ":", "valid_attrs", "=", "(", "list", "(", "collection_methods", ".", "values", "(", ")", ")", "+", "list", "(", "item_methods", ".", "values", "(", ")", ")", ")", "missing_attrs", "=", "set", "(", "valid_attrs", ")", "-", "set", "(", "attrs", ")", "if", "singular", ":", "bases", "=", "[", "ItemSingularView", "]", "elif", "attr_view", ":", "bases", "=", "[", "ItemAttributeView", "]", "elif", "es_based", ":", "bases", "=", "[", "ESCollectionView", "]", "else", ":", "bases", "=", "[", "CollectionView", "]", "if", "config", ".", "registry", ".", "database_acls", ":", "from", "nefertari_guards", ".", "view", "import", "ACLFilterViewMixin", "bases", "=", "[", "SetObjectACLMixin", "]", "+", "bases", "+", "[", "ACLFilterViewMixin", "]", "bases", ".", "append", "(", "NefertariBaseView", ")", "RESTView", "=", "type", "(", "'RESTView'", ",", "tuple", "(", "bases", ")", ",", "{", "'Model'", ":", "model_cls", "}", ")", "def", "_attr_error", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "raise", "AttributeError", "for", "attr", "in", "missing_attrs", ":", "setattr", "(", "RESTView", ",", "attr", ",", "property", "(", "_attr_error", ")", ")", "return", "RESTView" ]
Generate REST view for a model class. :param model_cls: Generated DB model class. :param attr: List of strings that represent names of view methods, new generated view should support. Not supported methods are replaced with property that raises AttributeError to display MethodNotAllowed error. :param es_based: Boolean indicating if generated view should read from elasticsearch. If True - collection reads are performed from elasticsearch. Database is used for reads otherwise. Defaults to True. :param attr_view: Boolean indicating if ItemAttributeView should be used as a base class for generated view. :param singular: Boolean indicating if ItemSingularView should be used as a base class for generated view.
[ "Generate", "REST", "view", "for", "a", "model", "class", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L447-L491
ramses-tech/ramses
ramses/views.py
SetObjectACLMixin.set_object_acl
def set_object_acl(self, obj): """ Set object ACL on creation if not already present. """ if not obj._acl: from nefertari_guards import engine as guards_engine acl = self._factory(self.request).generate_item_acl(obj) obj._acl = guards_engine.ACLField.stringify_acl(acl)
python
def set_object_acl(self, obj): """ Set object ACL on creation if not already present. """ if not obj._acl: from nefertari_guards import engine as guards_engine acl = self._factory(self.request).generate_item_acl(obj) obj._acl = guards_engine.ACLField.stringify_acl(acl)
[ "def", "set_object_acl", "(", "self", ",", "obj", ")", ":", "if", "not", "obj", ".", "_acl", ":", "from", "nefertari_guards", "import", "engine", "as", "guards_engine", "acl", "=", "self", ".", "_factory", "(", "self", ".", "request", ")", ".", "generate_item_acl", "(", "obj", ")", "obj", ".", "_acl", "=", "guards_engine", ".", "ACLField", ".", "stringify_acl", "(", "acl", ")" ]
Set object ACL on creation if not already present.
[ "Set", "object", "ACL", "on", "creation", "if", "not", "already", "present", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L37-L42
ramses-tech/ramses
ramses/views.py
BaseView.resolve_kw
def resolve_kw(self, kwargs): """ Resolve :kwargs: like `story_id: 1` to the form of `id: 1`. """ resolved = {} for key, value in kwargs.items(): split = key.split('_', 1) if len(split) > 1: key = split[1] resolved[key] = value return resolved
python
def resolve_kw(self, kwargs): """ Resolve :kwargs: like `story_id: 1` to the form of `id: 1`. """ resolved = {} for key, value in kwargs.items(): split = key.split('_', 1) if len(split) > 1: key = split[1] resolved[key] = value return resolved
[ "def", "resolve_kw", "(", "self", ",", "kwargs", ")", ":", "resolved", "=", "{", "}", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "split", "=", "key", ".", "split", "(", "'_'", ",", "1", ")", "if", "len", "(", "split", ")", ">", "1", ":", "key", "=", "split", "[", "1", "]", "resolved", "[", "key", "]", "=", "value", "return", "resolved" ]
Resolve :kwargs: like `story_id: 1` to the form of `id: 1`.
[ "Resolve", ":", "kwargs", ":", "like", "story_id", ":", "1", "to", "the", "form", "of", "id", ":", "1", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L62-L72
ramses-tech/ramses
ramses/views.py
BaseView._location
def _location(self, obj): """ Get location of the `obj` Arguments: :obj: self.Model instance. """ field_name = self.clean_id_name return self.request.route_url( self._resource.uid, **{self._resource.id_name: getattr(obj, field_name)})
python
def _location(self, obj): """ Get location of the `obj` Arguments: :obj: self.Model instance. """ field_name = self.clean_id_name return self.request.route_url( self._resource.uid, **{self._resource.id_name: getattr(obj, field_name)})
[ "def", "_location", "(", "self", ",", "obj", ")", ":", "field_name", "=", "self", ".", "clean_id_name", "return", "self", ".", "request", ".", "route_url", "(", "self", ".", "_resource", ".", "uid", ",", "*", "*", "{", "self", ".", "_resource", ".", "id_name", ":", "getattr", "(", "obj", ",", "field_name", ")", "}", ")" ]
Get location of the `obj` Arguments: :obj: self.Model instance.
[ "Get", "location", "of", "the", "obj" ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L74-L83
ramses-tech/ramses
ramses/views.py
BaseView._parent_queryset
def _parent_queryset(self): """ Get queryset of parent view. Generated queryset is used to run queries in the current level view. """ parent = self._resource.parent if hasattr(parent, 'view'): req = self.request.blank(self.request.path) req.registry = self.request.registry req.matchdict = { parent.id_name: self.request.matchdict.get(parent.id_name)} parent_view = parent.view(parent.view._factory, req) obj = parent_view.get_item(**req.matchdict) if isinstance(self, ItemSubresourceBaseView): return prop = self._resource.collection_name return getattr(obj, prop, None)
python
def _parent_queryset(self): """ Get queryset of parent view. Generated queryset is used to run queries in the current level view. """ parent = self._resource.parent if hasattr(parent, 'view'): req = self.request.blank(self.request.path) req.registry = self.request.registry req.matchdict = { parent.id_name: self.request.matchdict.get(parent.id_name)} parent_view = parent.view(parent.view._factory, req) obj = parent_view.get_item(**req.matchdict) if isinstance(self, ItemSubresourceBaseView): return prop = self._resource.collection_name return getattr(obj, prop, None)
[ "def", "_parent_queryset", "(", "self", ")", ":", "parent", "=", "self", ".", "_resource", ".", "parent", "if", "hasattr", "(", "parent", ",", "'view'", ")", ":", "req", "=", "self", ".", "request", ".", "blank", "(", "self", ".", "request", ".", "path", ")", "req", ".", "registry", "=", "self", ".", "request", ".", "registry", "req", ".", "matchdict", "=", "{", "parent", ".", "id_name", ":", "self", ".", "request", ".", "matchdict", ".", "get", "(", "parent", ".", "id_name", ")", "}", "parent_view", "=", "parent", ".", "view", "(", "parent", ".", "view", ".", "_factory", ",", "req", ")", "obj", "=", "parent_view", ".", "get_item", "(", "*", "*", "req", ".", "matchdict", ")", "if", "isinstance", "(", "self", ",", "ItemSubresourceBaseView", ")", ":", "return", "prop", "=", "self", ".", "_resource", ".", "collection_name", "return", "getattr", "(", "obj", ",", "prop", ",", "None", ")" ]
Get queryset of parent view. Generated queryset is used to run queries in the current level view.
[ "Get", "queryset", "of", "parent", "view", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L85-L101
ramses-tech/ramses
ramses/views.py
BaseView.get_collection
def get_collection(self, **kwargs): """ Get objects collection taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object. """ self._query_params.update(kwargs) objects = self._parent_queryset() if objects is not None: return self.Model.filter_objects( objects, **self._query_params) return self.Model.get_collection(**self._query_params)
python
def get_collection(self, **kwargs): """ Get objects collection taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object. """ self._query_params.update(kwargs) objects = self._parent_queryset() if objects is not None: return self.Model.filter_objects( objects, **self._query_params) return self.Model.get_collection(**self._query_params)
[ "def", "get_collection", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_query_params", ".", "update", "(", "kwargs", ")", "objects", "=", "self", ".", "_parent_queryset", "(", ")", "if", "objects", "is", "not", "None", ":", "return", "self", ".", "Model", ".", "filter_objects", "(", "objects", ",", "*", "*", "self", ".", "_query_params", ")", "return", "self", ".", "Model", ".", "get_collection", "(", "*", "*", "self", ".", "_query_params", ")" ]
Get objects collection taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object.
[ "Get", "objects", "collection", "taking", "into", "account", "generated", "queryset", "of", "parent", "view", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L103-L117
ramses-tech/ramses
ramses/views.py
BaseView.get_item
def get_item(self, **kwargs): """ Get collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object from the applicable ACL. If ACL wasn't applied, it is applied explicitly. """ if six.callable(self.context): self.reload_context(es_based=False, **kwargs) objects = self._parent_queryset() if objects is not None and self.context not in objects: raise JHTTPNotFound('{}({}) not found'.format( self.Model.__name__, self._get_context_key(**kwargs))) return self.context
python
def get_item(self, **kwargs): """ Get collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object from the applicable ACL. If ACL wasn't applied, it is applied explicitly. """ if six.callable(self.context): self.reload_context(es_based=False, **kwargs) objects = self._parent_queryset() if objects is not None and self.context not in objects: raise JHTTPNotFound('{}({}) not found'.format( self.Model.__name__, self._get_context_key(**kwargs))) return self.context
[ "def", "get_item", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "six", ".", "callable", "(", "self", ".", "context", ")", ":", "self", ".", "reload_context", "(", "es_based", "=", "False", ",", "*", "*", "kwargs", ")", "objects", "=", "self", ".", "_parent_queryset", "(", ")", "if", "objects", "is", "not", "None", "and", "self", ".", "context", "not", "in", "objects", ":", "raise", "JHTTPNotFound", "(", "'{}({}) not found'", ".", "format", "(", "self", ".", "Model", ".", "__name__", ",", "self", ".", "_get_context_key", "(", "*", "*", "kwargs", ")", ")", ")", "return", "self", ".", "context" ]
Get collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object from the applicable ACL. If ACL wasn't applied, it is applied explicitly.
[ "Get", "collection", "item", "taking", "into", "account", "generated", "queryset", "of", "parent", "view", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L119-L139
ramses-tech/ramses
ramses/views.py
BaseView.reload_context
def reload_context(self, es_based, **kwargs): """ Reload `self.context` object into a DB or ES object. A reload is performed by getting the object ID from :kwargs: and then getting a context key item from the new instance of `self._factory` which is an ACL class used by the current view. Arguments: :es_based: Boolean. Whether to init ACL ac es-based or not. This affects the backend which will be queried - either DB or ES :kwargs: Kwargs that contain value for current resource 'id_name' key """ from .acl import BaseACL key = self._get_context_key(**kwargs) kwargs = {'request': self.request} if issubclass(self._factory, BaseACL): kwargs['es_based'] = es_based acl = self._factory(**kwargs) if acl.item_model is None: acl.item_model = self.Model self.context = acl[key]
python
def reload_context(self, es_based, **kwargs): """ Reload `self.context` object into a DB or ES object. A reload is performed by getting the object ID from :kwargs: and then getting a context key item from the new instance of `self._factory` which is an ACL class used by the current view. Arguments: :es_based: Boolean. Whether to init ACL ac es-based or not. This affects the backend which will be queried - either DB or ES :kwargs: Kwargs that contain value for current resource 'id_name' key """ from .acl import BaseACL key = self._get_context_key(**kwargs) kwargs = {'request': self.request} if issubclass(self._factory, BaseACL): kwargs['es_based'] = es_based acl = self._factory(**kwargs) if acl.item_model is None: acl.item_model = self.Model self.context = acl[key]
[ "def", "reload_context", "(", "self", ",", "es_based", ",", "*", "*", "kwargs", ")", ":", "from", ".", "acl", "import", "BaseACL", "key", "=", "self", ".", "_get_context_key", "(", "*", "*", "kwargs", ")", "kwargs", "=", "{", "'request'", ":", "self", ".", "request", "}", "if", "issubclass", "(", "self", ".", "_factory", ",", "BaseACL", ")", ":", "kwargs", "[", "'es_based'", "]", "=", "es_based", "acl", "=", "self", ".", "_factory", "(", "*", "*", "kwargs", ")", "if", "acl", ".", "item_model", "is", "None", ":", "acl", ".", "item_model", "=", "self", ".", "Model", "self", ".", "context", "=", "acl", "[", "key", "]" ]
Reload `self.context` object into a DB or ES object. A reload is performed by getting the object ID from :kwargs: and then getting a context key item from the new instance of `self._factory` which is an ACL class used by the current view. Arguments: :es_based: Boolean. Whether to init ACL ac es-based or not. This affects the backend which will be queried - either DB or ES :kwargs: Kwargs that contain value for current resource 'id_name' key
[ "Reload", "self", ".", "context", "object", "into", "a", "DB", "or", "ES", "object", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L145-L168
ramses-tech/ramses
ramses/views.py
ESBaseView._parent_queryset_es
def _parent_queryset_es(self): """ Get queryset (list of object IDs) of parent view. The generated queryset is used to run queries in the current level's view. """ parent = self._resource.parent if hasattr(parent, 'view'): req = self.request.blank(self.request.path) req.registry = self.request.registry req.matchdict = { parent.id_name: self.request.matchdict.get(parent.id_name)} parent_view = parent.view(parent.view._factory, req) obj = parent_view.get_item_es(**req.matchdict) prop = self._resource.collection_name objects_ids = getattr(obj, prop, None) return objects_ids
python
def _parent_queryset_es(self): """ Get queryset (list of object IDs) of parent view. The generated queryset is used to run queries in the current level's view. """ parent = self._resource.parent if hasattr(parent, 'view'): req = self.request.blank(self.request.path) req.registry = self.request.registry req.matchdict = { parent.id_name: self.request.matchdict.get(parent.id_name)} parent_view = parent.view(parent.view._factory, req) obj = parent_view.get_item_es(**req.matchdict) prop = self._resource.collection_name objects_ids = getattr(obj, prop, None) return objects_ids
[ "def", "_parent_queryset_es", "(", "self", ")", ":", "parent", "=", "self", ".", "_resource", ".", "parent", "if", "hasattr", "(", "parent", ",", "'view'", ")", ":", "req", "=", "self", ".", "request", ".", "blank", "(", "self", ".", "request", ".", "path", ")", "req", ".", "registry", "=", "self", ".", "request", ".", "registry", "req", ".", "matchdict", "=", "{", "parent", ".", "id_name", ":", "self", ".", "request", ".", "matchdict", ".", "get", "(", "parent", ".", "id_name", ")", "}", "parent_view", "=", "parent", ".", "view", "(", "parent", ".", "view", ".", "_factory", ",", "req", ")", "obj", "=", "parent_view", ".", "get_item_es", "(", "*", "*", "req", ".", "matchdict", ")", "prop", "=", "self", ".", "_resource", ".", "collection_name", "objects_ids", "=", "getattr", "(", "obj", ",", "prop", ",", "None", ")", "return", "objects_ids" ]
Get queryset (list of object IDs) of parent view. The generated queryset is used to run queries in the current level's view.
[ "Get", "queryset", "(", "list", "of", "object", "IDs", ")", "of", "parent", "view", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L218-L234
ramses-tech/ramses
ramses/views.py
ESBaseView.get_es_object_ids
def get_es_object_ids(self, objects): """ Return IDs of :objects: if they are not IDs already. """ id_field = self.clean_id_name ids = [getattr(obj, id_field, obj) for obj in objects] return list(set(str(id_) for id_ in ids))
python
def get_es_object_ids(self, objects): """ Return IDs of :objects: if they are not IDs already. """ id_field = self.clean_id_name ids = [getattr(obj, id_field, obj) for obj in objects] return list(set(str(id_) for id_ in ids))
[ "def", "get_es_object_ids", "(", "self", ",", "objects", ")", ":", "id_field", "=", "self", ".", "clean_id_name", "ids", "=", "[", "getattr", "(", "obj", ",", "id_field", ",", "obj", ")", "for", "obj", "in", "objects", "]", "return", "list", "(", "set", "(", "str", "(", "id_", ")", "for", "id_", "in", "ids", ")", ")" ]
Return IDs of :objects: if they are not IDs already.
[ "Return", "IDs", "of", ":", "objects", ":", "if", "they", "are", "not", "IDs", "already", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L236-L240
ramses-tech/ramses
ramses/views.py
ESBaseView.get_collection_es
def get_collection_es(self): """ Get ES objects collection taking into account the generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object. """ objects_ids = self._parent_queryset_es() if objects_ids is not None: objects_ids = self.get_es_object_ids(objects_ids) if not objects_ids: return [] self._query_params['id'] = objects_ids return super(ESBaseView, self).get_collection_es()
python
def get_collection_es(self): """ Get ES objects collection taking into account the generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object. """ objects_ids = self._parent_queryset_es() if objects_ids is not None: objects_ids = self.get_es_object_ids(objects_ids) if not objects_ids: return [] self._query_params['id'] = objects_ids return super(ESBaseView, self).get_collection_es()
[ "def", "get_collection_es", "(", "self", ")", ":", "objects_ids", "=", "self", ".", "_parent_queryset_es", "(", ")", "if", "objects_ids", "is", "not", "None", ":", "objects_ids", "=", "self", ".", "get_es_object_ids", "(", "objects_ids", ")", "if", "not", "objects_ids", ":", "return", "[", "]", "self", ".", "_query_params", "[", "'id'", "]", "=", "objects_ids", "return", "super", "(", "ESBaseView", ",", "self", ")", ".", "get_collection_es", "(", ")" ]
Get ES objects collection taking into account the generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object.
[ "Get", "ES", "objects", "collection", "taking", "into", "account", "the", "generated", "queryset", "of", "parent", "view", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L242-L259
ramses-tech/ramses
ramses/views.py
ESBaseView.get_item_es
def get_item_es(self, **kwargs): """ Get ES collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object retrieved from the applicable ACL. If an ACL wasn't applied, it is applied explicitly. """ item_id = self._get_context_key(**kwargs) objects_ids = self._parent_queryset_es() if objects_ids is not None: objects_ids = self.get_es_object_ids(objects_ids) if six.callable(self.context): self.reload_context(es_based=True, **kwargs) if (objects_ids is not None) and (item_id not in objects_ids): raise JHTTPNotFound('{}(id={}) resource not found'.format( self.Model.__name__, item_id)) return self.context
python
def get_item_es(self, **kwargs): """ Get ES collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object retrieved from the applicable ACL. If an ACL wasn't applied, it is applied explicitly. """ item_id = self._get_context_key(**kwargs) objects_ids = self._parent_queryset_es() if objects_ids is not None: objects_ids = self.get_es_object_ids(objects_ids) if six.callable(self.context): self.reload_context(es_based=True, **kwargs) if (objects_ids is not None) and (item_id not in objects_ids): raise JHTTPNotFound('{}(id={}) resource not found'.format( self.Model.__name__, item_id)) return self.context
[ "def", "get_item_es", "(", "self", ",", "*", "*", "kwargs", ")", ":", "item_id", "=", "self", ".", "_get_context_key", "(", "*", "*", "kwargs", ")", "objects_ids", "=", "self", ".", "_parent_queryset_es", "(", ")", "if", "objects_ids", "is", "not", "None", ":", "objects_ids", "=", "self", ".", "get_es_object_ids", "(", "objects_ids", ")", "if", "six", ".", "callable", "(", "self", ".", "context", ")", ":", "self", ".", "reload_context", "(", "es_based", "=", "True", ",", "*", "*", "kwargs", ")", "if", "(", "objects_ids", "is", "not", "None", ")", "and", "(", "item_id", "not", "in", "objects_ids", ")", ":", "raise", "JHTTPNotFound", "(", "'{}(id={}) resource not found'", ".", "format", "(", "self", ".", "Model", ".", "__name__", ",", "item_id", ")", ")", "return", "self", ".", "context" ]
Get ES collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object retrieved from the applicable ACL. If an ACL wasn't applied, it is applied explicitly.
[ "Get", "ES", "collection", "item", "taking", "into", "account", "generated", "queryset", "of", "parent", "view", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L261-L284
ramses-tech/ramses
ramses/views.py
ESCollectionView.update
def update(self, **kwargs): """ Explicitly reload context with DB usage to get access to complete DB object. """ self.reload_context(es_based=False, **kwargs) return super(ESCollectionView, self).update(**kwargs)
python
def update(self, **kwargs): """ Explicitly reload context with DB usage to get access to complete DB object. """ self.reload_context(es_based=False, **kwargs) return super(ESCollectionView, self).update(**kwargs)
[ "def", "update", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "reload_context", "(", "es_based", "=", "False", ",", "*", "*", "kwargs", ")", "return", "super", "(", "ESCollectionView", ",", "self", ")", ".", "update", "(", "*", "*", "kwargs", ")" ]
Explicitly reload context with DB usage to get access to complete DB object.
[ "Explicitly", "reload", "context", "with", "DB", "usage", "to", "get", "access", "to", "complete", "DB", "object", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L298-L303
ramses-tech/ramses
ramses/views.py
ESCollectionView.delete
def delete(self, **kwargs): """ Explicitly reload context with DB usage to get access to complete DB object. """ self.reload_context(es_based=False, **kwargs) return super(ESCollectionView, self).delete(**kwargs)
python
def delete(self, **kwargs): """ Explicitly reload context with DB usage to get access to complete DB object. """ self.reload_context(es_based=False, **kwargs) return super(ESCollectionView, self).delete(**kwargs)
[ "def", "delete", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "reload_context", "(", "es_based", "=", "False", ",", "*", "*", "kwargs", ")", "return", "super", "(", "ESCollectionView", ",", "self", ")", ".", "delete", "(", "*", "*", "kwargs", ")" ]
Explicitly reload context with DB usage to get access to complete DB object.
[ "Explicitly", "reload", "context", "with", "DB", "usage", "to", "get", "access", "to", "complete", "DB", "object", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L305-L310
ramses-tech/ramses
ramses/views.py
ESCollectionView.get_dbcollection_with_es
def get_dbcollection_with_es(self, **kwargs): """ Get DB objects collection by first querying ES. """ es_objects = self.get_collection_es() db_objects = self.Model.filter_objects(es_objects) return db_objects
python
def get_dbcollection_with_es(self, **kwargs): """ Get DB objects collection by first querying ES. """ es_objects = self.get_collection_es() db_objects = self.Model.filter_objects(es_objects) return db_objects
[ "def", "get_dbcollection_with_es", "(", "self", ",", "*", "*", "kwargs", ")", ":", "es_objects", "=", "self", ".", "get_collection_es", "(", ")", "db_objects", "=", "self", ".", "Model", ".", "filter_objects", "(", "es_objects", ")", "return", "db_objects" ]
Get DB objects collection by first querying ES.
[ "Get", "DB", "objects", "collection", "by", "first", "querying", "ES", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L312-L316
ramses-tech/ramses
ramses/views.py
ESCollectionView.delete_many
def delete_many(self, **kwargs): """ Delete multiple objects from collection. First ES is queried, then the results are used to query the DB. This is done to make sure deleted objects are those filtered by ES in the 'index' method (so user deletes what he saw). """ db_objects = self.get_dbcollection_with_es(**kwargs) return self.Model._delete_many(db_objects, self.request)
python
def delete_many(self, **kwargs): """ Delete multiple objects from collection. First ES is queried, then the results are used to query the DB. This is done to make sure deleted objects are those filtered by ES in the 'index' method (so user deletes what he saw). """ db_objects = self.get_dbcollection_with_es(**kwargs) return self.Model._delete_many(db_objects, self.request)
[ "def", "delete_many", "(", "self", ",", "*", "*", "kwargs", ")", ":", "db_objects", "=", "self", ".", "get_dbcollection_with_es", "(", "*", "*", "kwargs", ")", "return", "self", ".", "Model", ".", "_delete_many", "(", "db_objects", ",", "self", ".", "request", ")" ]
Delete multiple objects from collection. First ES is queried, then the results are used to query the DB. This is done to make sure deleted objects are those filtered by ES in the 'index' method (so user deletes what he saw).
[ "Delete", "multiple", "objects", "from", "collection", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L318-L326
ramses-tech/ramses
ramses/views.py
ESCollectionView.update_many
def update_many(self, **kwargs): """ Update multiple objects from collection. First ES is queried, then the results are used to query DB. This is done to make sure updated objects are those filtered by ES in the 'index' method (so user updates what he saw). """ db_objects = self.get_dbcollection_with_es(**kwargs) return self.Model._update_many( db_objects, self._json_params, self.request)
python
def update_many(self, **kwargs): """ Update multiple objects from collection. First ES is queried, then the results are used to query DB. This is done to make sure updated objects are those filtered by ES in the 'index' method (so user updates what he saw). """ db_objects = self.get_dbcollection_with_es(**kwargs) return self.Model._update_many( db_objects, self._json_params, self.request)
[ "def", "update_many", "(", "self", ",", "*", "*", "kwargs", ")", ":", "db_objects", "=", "self", ".", "get_dbcollection_with_es", "(", "*", "*", "kwargs", ")", "return", "self", ".", "Model", ".", "_update_many", "(", "db_objects", ",", "self", ".", "_json_params", ",", "self", ".", "request", ")" ]
Update multiple objects from collection. First ES is queried, then the results are used to query DB. This is done to make sure updated objects are those filtered by ES in the 'index' method (so user updates what he saw).
[ "Update", "multiple", "objects", "from", "collection", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L328-L337
ramses-tech/ramses
ramses/views.py
ItemSubresourceBaseView._get_context_key
def _get_context_key(self, **kwargs): """ Get value of `self._resource.parent.id_name` from :kwargs: """ return str(kwargs.get(self._resource.parent.id_name))
python
def _get_context_key(self, **kwargs): """ Get value of `self._resource.parent.id_name` from :kwargs: """ return str(kwargs.get(self._resource.parent.id_name))
[ "def", "_get_context_key", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "str", "(", "kwargs", ".", "get", "(", "self", ".", "_resource", ".", "parent", ".", "id_name", ")", ")" ]
Get value of `self._resource.parent.id_name` from :kwargs:
[ "Get", "value", "of", "self", ".", "_resource", ".", "parent", ".", "id_name", "from", ":", "kwargs", ":" ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L359-L361
ramses-tech/ramses
ramses/views.py
ItemSubresourceBaseView.get_item
def get_item(self, **kwargs): """ Reload context on each access. """ self.reload_context(es_based=False, **kwargs) return super(ItemSubresourceBaseView, self).get_item(**kwargs)
python
def get_item(self, **kwargs): """ Reload context on each access. """ self.reload_context(es_based=False, **kwargs) return super(ItemSubresourceBaseView, self).get_item(**kwargs)
[ "def", "get_item", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "reload_context", "(", "es_based", "=", "False", ",", "*", "*", "kwargs", ")", "return", "super", "(", "ItemSubresourceBaseView", ",", "self", ")", ".", "get_item", "(", "*", "*", "kwargs", ")" ]
Reload context on each access.
[ "Reload", "context", "on", "each", "access", "." ]
train
https://github.com/ramses-tech/ramses/blob/ea2e1e896325b7256cdf5902309e05fd98e0c14c/ramses/views.py#L363-L366
edoburu/sphinxcontrib-django
sphinxcontrib_django/__init__.py
setup
def setup(app): """Allow this module to be used as sphinx extension. This attaches the Sphinx hooks. :type app: sphinx.application.Sphinx """ import sphinxcontrib_django.docstrings import sphinxcontrib_django.roles # Setup both modules at once. They can also be separately imported to # use only fragments of this package. sphinxcontrib_django.docstrings.setup(app) sphinxcontrib_django.roles.setup(app)
python
def setup(app): """Allow this module to be used as sphinx extension. This attaches the Sphinx hooks. :type app: sphinx.application.Sphinx """ import sphinxcontrib_django.docstrings import sphinxcontrib_django.roles # Setup both modules at once. They can also be separately imported to # use only fragments of this package. sphinxcontrib_django.docstrings.setup(app) sphinxcontrib_django.roles.setup(app)
[ "def", "setup", "(", "app", ")", ":", "import", "sphinxcontrib_django", ".", "docstrings", "import", "sphinxcontrib_django", ".", "roles", "# Setup both modules at once. They can also be separately imported to", "# use only fragments of this package.", "sphinxcontrib_django", ".", "docstrings", ".", "setup", "(", "app", ")", "sphinxcontrib_django", ".", "roles", ".", "setup", "(", "app", ")" ]
Allow this module to be used as sphinx extension. This attaches the Sphinx hooks. :type app: sphinx.application.Sphinx
[ "Allow", "this", "module", "to", "be", "used", "as", "sphinx", "extension", ".", "This", "attaches", "the", "Sphinx", "hooks", "." ]
train
https://github.com/edoburu/sphinxcontrib-django/blob/5116ac7f1510a76b1ff58cf7f8d2fab7d8bbe2a9/sphinxcontrib_django/__init__.py#L8-L20
edoburu/sphinxcontrib-django
sphinxcontrib_django/patches.py
patch_django_for_autodoc
def patch_django_for_autodoc(): """Fix the appearance of some classes in autodoc. This avoids query evaluation. """ # Fix Django's manager appearance ManagerDescriptor.__get__ = lambda self, *args, **kwargs: self.manager # Stop Django from executing DB queries models.QuerySet.__repr__ = lambda self: self.__class__.__name__
python
def patch_django_for_autodoc(): """Fix the appearance of some classes in autodoc. This avoids query evaluation. """ # Fix Django's manager appearance ManagerDescriptor.__get__ = lambda self, *args, **kwargs: self.manager # Stop Django from executing DB queries models.QuerySet.__repr__ = lambda self: self.__class__.__name__
[ "def", "patch_django_for_autodoc", "(", ")", ":", "# Fix Django's manager appearance", "ManagerDescriptor", ".", "__get__", "=", "lambda", "self", ",", "*", "args", ",", "*", "*", "kwargs", ":", "self", ".", "manager", "# Stop Django from executing DB queries", "models", ".", "QuerySet", ".", "__repr__", "=", "lambda", "self", ":", "self", ".", "__class__", ".", "__name__" ]
Fix the appearance of some classes in autodoc. This avoids query evaluation.
[ "Fix", "the", "appearance", "of", "some", "classes", "in", "autodoc", "." ]
train
https://github.com/edoburu/sphinxcontrib-django/blob/5116ac7f1510a76b1ff58cf7f8d2fab7d8bbe2a9/sphinxcontrib_django/patches.py#L5-L14
edoburu/sphinxcontrib-django
sphinxcontrib_django/docstrings.py
setup
def setup(app): """Allow this package to be used as Sphinx extension. This is also called from the top-level ``__init__.py``. :type app: sphinx.application.Sphinx """ from .patches import patch_django_for_autodoc # When running, make sure Django doesn't execute querysets patch_django_for_autodoc() # Generate docstrings for Django model fields # Register the docstring processor with sphinx app.connect('autodoc-process-docstring', improve_model_docstring) # influence skip rules app.connect("autodoc-skip-member", autodoc_skip)
python
def setup(app): """Allow this package to be used as Sphinx extension. This is also called from the top-level ``__init__.py``. :type app: sphinx.application.Sphinx """ from .patches import patch_django_for_autodoc # When running, make sure Django doesn't execute querysets patch_django_for_autodoc() # Generate docstrings for Django model fields # Register the docstring processor with sphinx app.connect('autodoc-process-docstring', improve_model_docstring) # influence skip rules app.connect("autodoc-skip-member", autodoc_skip)
[ "def", "setup", "(", "app", ")", ":", "from", ".", "patches", "import", "patch_django_for_autodoc", "# When running, make sure Django doesn't execute querysets", "patch_django_for_autodoc", "(", ")", "# Generate docstrings for Django model fields", "# Register the docstring processor with sphinx", "app", ".", "connect", "(", "'autodoc-process-docstring'", ",", "improve_model_docstring", ")", "# influence skip rules", "app", ".", "connect", "(", "\"autodoc-skip-member\"", ",", "autodoc_skip", ")" ]
Allow this package to be used as Sphinx extension. This is also called from the top-level ``__init__.py``. :type app: sphinx.application.Sphinx
[ "Allow", "this", "package", "to", "be", "used", "as", "Sphinx", "extension", ".", "This", "is", "also", "called", "from", "the", "top", "-", "level", "__init__", ".", "py", "." ]
train
https://github.com/edoburu/sphinxcontrib-django/blob/5116ac7f1510a76b1ff58cf7f8d2fab7d8bbe2a9/sphinxcontrib_django/docstrings.py#L43-L59
edoburu/sphinxcontrib-django
sphinxcontrib_django/docstrings.py
autodoc_skip
def autodoc_skip(app, what, name, obj, skip, options): """Hook that tells autodoc to include or exclude certain fields. Sadly, it doesn't give a reference to the parent object, so only the ``name`` can be used for referencing. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The name of the child method/attribute. :type name: str :param obj: The child value (e.g. a method, dict, or module reference) :param options: The current autodoc settings. :type options: dict .. seealso:: http://www.sphinx-doc.org/en/stable/ext/autodoc.html#event-autodoc-skip-member """ if name in config.EXCLUDE_MEMBERS: return True if name in config.INCLUDE_MEMBERS: return False return skip
python
def autodoc_skip(app, what, name, obj, skip, options): """Hook that tells autodoc to include or exclude certain fields. Sadly, it doesn't give a reference to the parent object, so only the ``name`` can be used for referencing. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The name of the child method/attribute. :type name: str :param obj: The child value (e.g. a method, dict, or module reference) :param options: The current autodoc settings. :type options: dict .. seealso:: http://www.sphinx-doc.org/en/stable/ext/autodoc.html#event-autodoc-skip-member """ if name in config.EXCLUDE_MEMBERS: return True if name in config.INCLUDE_MEMBERS: return False return skip
[ "def", "autodoc_skip", "(", "app", ",", "what", ",", "name", ",", "obj", ",", "skip", ",", "options", ")", ":", "if", "name", "in", "config", ".", "EXCLUDE_MEMBERS", ":", "return", "True", "if", "name", "in", "config", ".", "INCLUDE_MEMBERS", ":", "return", "False", "return", "skip" ]
Hook that tells autodoc to include or exclude certain fields. Sadly, it doesn't give a reference to the parent object, so only the ``name`` can be used for referencing. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The name of the child method/attribute. :type name: str :param obj: The child value (e.g. a method, dict, or module reference) :param options: The current autodoc settings. :type options: dict .. seealso:: http://www.sphinx-doc.org/en/stable/ext/autodoc.html#event-autodoc-skip-member
[ "Hook", "that", "tells", "autodoc", "to", "include", "or", "exclude", "certain", "fields", "." ]
train
https://github.com/edoburu/sphinxcontrib-django/blob/5116ac7f1510a76b1ff58cf7f8d2fab7d8bbe2a9/sphinxcontrib_django/docstrings.py#L62-L85
edoburu/sphinxcontrib-django
sphinxcontrib_django/docstrings.py
improve_model_docstring
def improve_model_docstring(app, what, name, obj, options, lines): """Hook that improves the autodoc docstrings for Django models. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The dotted path to the child method/attribute. :type name: str :param obj: The Python object that i s being documented. :param options: The current autodoc settings. :type options: dict :param lines: The current documentation lines :type lines: list """ if what == 'class': _improve_class_docs(app, obj, lines) elif what == 'attribute': _improve_attribute_docs(obj, name, lines) elif what == 'method': _improve_method_docs(obj, name, lines) # Return the extended docstring return lines
python
def improve_model_docstring(app, what, name, obj, options, lines): """Hook that improves the autodoc docstrings for Django models. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The dotted path to the child method/attribute. :type name: str :param obj: The Python object that i s being documented. :param options: The current autodoc settings. :type options: dict :param lines: The current documentation lines :type lines: list """ if what == 'class': _improve_class_docs(app, obj, lines) elif what == 'attribute': _improve_attribute_docs(obj, name, lines) elif what == 'method': _improve_method_docs(obj, name, lines) # Return the extended docstring return lines
[ "def", "improve_model_docstring", "(", "app", ",", "what", ",", "name", ",", "obj", ",", "options", ",", "lines", ")", ":", "if", "what", "==", "'class'", ":", "_improve_class_docs", "(", "app", ",", "obj", ",", "lines", ")", "elif", "what", "==", "'attribute'", ":", "_improve_attribute_docs", "(", "obj", ",", "name", ",", "lines", ")", "elif", "what", "==", "'method'", ":", "_improve_method_docs", "(", "obj", ",", "name", ",", "lines", ")", "# Return the extended docstring", "return", "lines" ]
Hook that improves the autodoc docstrings for Django models. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The dotted path to the child method/attribute. :type name: str :param obj: The Python object that i s being documented. :param options: The current autodoc settings. :type options: dict :param lines: The current documentation lines :type lines: list
[ "Hook", "that", "improves", "the", "autodoc", "docstrings", "for", "Django", "models", "." ]
train
https://github.com/edoburu/sphinxcontrib-django/blob/5116ac7f1510a76b1ff58cf7f8d2fab7d8bbe2a9/sphinxcontrib_django/docstrings.py#L88-L110
edoburu/sphinxcontrib-django
sphinxcontrib_django/docstrings.py
_improve_class_docs
def _improve_class_docs(app, cls, lines): """Improve the documentation of a class.""" if issubclass(cls, models.Model): _add_model_fields_as_params(app, cls, lines) elif issubclass(cls, forms.Form): _add_form_fields(cls, lines)
python
def _improve_class_docs(app, cls, lines): """Improve the documentation of a class.""" if issubclass(cls, models.Model): _add_model_fields_as_params(app, cls, lines) elif issubclass(cls, forms.Form): _add_form_fields(cls, lines)
[ "def", "_improve_class_docs", "(", "app", ",", "cls", ",", "lines", ")", ":", "if", "issubclass", "(", "cls", ",", "models", ".", "Model", ")", ":", "_add_model_fields_as_params", "(", "app", ",", "cls", ",", "lines", ")", "elif", "issubclass", "(", "cls", ",", "forms", ".", "Form", ")", ":", "_add_form_fields", "(", "cls", ",", "lines", ")" ]
Improve the documentation of a class.
[ "Improve", "the", "documentation", "of", "a", "class", "." ]
train
https://github.com/edoburu/sphinxcontrib-django/blob/5116ac7f1510a76b1ff58cf7f8d2fab7d8bbe2a9/sphinxcontrib_django/docstrings.py#L113-L118
edoburu/sphinxcontrib-django
sphinxcontrib_django/docstrings.py
_add_model_fields_as_params
def _add_model_fields_as_params(app, obj, lines): """Improve the documentation of a Django model subclass. This adds all model fields as parameters to the ``__init__()`` method. :type app: sphinx.application.Sphinx :type lines: list """ for field in obj._meta.get_fields(): try: help_text = strip_tags(force_text(field.help_text)) verbose_name = force_text(field.verbose_name).capitalize() except AttributeError: # e.g. ManyToOneRel continue # Add parameter if help_text: lines.append(u':param %s: %s' % (field.name, help_text)) else: lines.append(u':param %s: %s' % (field.name, verbose_name)) # Add type lines.append(_get_field_type(field)) if 'sphinx.ext.inheritance_diagram' in app.extensions and \ 'sphinx.ext.graphviz' in app.extensions and \ not any('inheritance-diagram::' in line for line in lines): lines.append('.. inheritance-diagram::')
python
def _add_model_fields_as_params(app, obj, lines): """Improve the documentation of a Django model subclass. This adds all model fields as parameters to the ``__init__()`` method. :type app: sphinx.application.Sphinx :type lines: list """ for field in obj._meta.get_fields(): try: help_text = strip_tags(force_text(field.help_text)) verbose_name = force_text(field.verbose_name).capitalize() except AttributeError: # e.g. ManyToOneRel continue # Add parameter if help_text: lines.append(u':param %s: %s' % (field.name, help_text)) else: lines.append(u':param %s: %s' % (field.name, verbose_name)) # Add type lines.append(_get_field_type(field)) if 'sphinx.ext.inheritance_diagram' in app.extensions and \ 'sphinx.ext.graphviz' in app.extensions and \ not any('inheritance-diagram::' in line for line in lines): lines.append('.. inheritance-diagram::')
[ "def", "_add_model_fields_as_params", "(", "app", ",", "obj", ",", "lines", ")", ":", "for", "field", "in", "obj", ".", "_meta", ".", "get_fields", "(", ")", ":", "try", ":", "help_text", "=", "strip_tags", "(", "force_text", "(", "field", ".", "help_text", ")", ")", "verbose_name", "=", "force_text", "(", "field", ".", "verbose_name", ")", ".", "capitalize", "(", ")", "except", "AttributeError", ":", "# e.g. ManyToOneRel", "continue", "# Add parameter", "if", "help_text", ":", "lines", ".", "append", "(", "u':param %s: %s'", "%", "(", "field", ".", "name", ",", "help_text", ")", ")", "else", ":", "lines", ".", "append", "(", "u':param %s: %s'", "%", "(", "field", ".", "name", ",", "verbose_name", ")", ")", "# Add type", "lines", ".", "append", "(", "_get_field_type", "(", "field", ")", ")", "if", "'sphinx.ext.inheritance_diagram'", "in", "app", ".", "extensions", "and", "'sphinx.ext.graphviz'", "in", "app", ".", "extensions", "and", "not", "any", "(", "'inheritance-diagram::'", "in", "line", "for", "line", "in", "lines", ")", ":", "lines", ".", "append", "(", "'.. inheritance-diagram::'", ")" ]
Improve the documentation of a Django model subclass. This adds all model fields as parameters to the ``__init__()`` method. :type app: sphinx.application.Sphinx :type lines: list
[ "Improve", "the", "documentation", "of", "a", "Django", "model", "subclass", "." ]
train
https://github.com/edoburu/sphinxcontrib-django/blob/5116ac7f1510a76b1ff58cf7f8d2fab7d8bbe2a9/sphinxcontrib_django/docstrings.py#L121-L149
edoburu/sphinxcontrib-django
sphinxcontrib_django/docstrings.py
_add_form_fields
def _add_form_fields(obj, lines): """Improve the documentation of a Django Form class. This highlights the available fields in the form. """ lines.append("**Form fields:**") lines.append("") for name, field in obj.base_fields.items(): field_type = "{}.{}".format(field.__class__.__module__, field.__class__.__name__) tpl = "* ``{name}``: {label} (:class:`~{field_type}`)" lines.append(tpl.format( name=name, field=field, label=field.label or name.replace('_', ' ').title(), field_type=field_type ))
python
def _add_form_fields(obj, lines): """Improve the documentation of a Django Form class. This highlights the available fields in the form. """ lines.append("**Form fields:**") lines.append("") for name, field in obj.base_fields.items(): field_type = "{}.{}".format(field.__class__.__module__, field.__class__.__name__) tpl = "* ``{name}``: {label} (:class:`~{field_type}`)" lines.append(tpl.format( name=name, field=field, label=field.label or name.replace('_', ' ').title(), field_type=field_type ))
[ "def", "_add_form_fields", "(", "obj", ",", "lines", ")", ":", "lines", ".", "append", "(", "\"**Form fields:**\"", ")", "lines", ".", "append", "(", "\"\"", ")", "for", "name", ",", "field", "in", "obj", ".", "base_fields", ".", "items", "(", ")", ":", "field_type", "=", "\"{}.{}\"", ".", "format", "(", "field", ".", "__class__", ".", "__module__", ",", "field", ".", "__class__", ".", "__name__", ")", "tpl", "=", "\"* ``{name}``: {label} (:class:`~{field_type}`)\"", "lines", ".", "append", "(", "tpl", ".", "format", "(", "name", "=", "name", ",", "field", "=", "field", ",", "label", "=", "field", ".", "label", "or", "name", ".", "replace", "(", "'_'", ",", "' '", ")", ".", "title", "(", ")", ",", "field_type", "=", "field_type", ")", ")" ]
Improve the documentation of a Django Form class. This highlights the available fields in the form.
[ "Improve", "the", "documentation", "of", "a", "Django", "Form", "class", "." ]
train
https://github.com/edoburu/sphinxcontrib-django/blob/5116ac7f1510a76b1ff58cf7f8d2fab7d8bbe2a9/sphinxcontrib_django/docstrings.py#L152-L167
edoburu/sphinxcontrib-django
sphinxcontrib_django/docstrings.py
_improve_attribute_docs
def _improve_attribute_docs(obj, name, lines): """Improve the documentation of various attributes. This improves the navigation between related objects. :param obj: the instance of the object to document. :param name: full dotted path to the object. :param lines: expected documentation lines. """ if obj is None: # Happens with form attributes. return if isinstance(obj, DeferredAttribute): # This only points to a field name, not a field. # Get the field by importing the name. cls_path, field_name = name.rsplit('.', 1) model = import_string(cls_path) field = model._meta.get_field(obj.field_name) del lines[:] # lines.clear() is Python 3 only lines.append("**Model field:** {label}".format( label=field.verbose_name )) elif isinstance(obj, _FIELD_DESCRIPTORS): # These del lines[:] lines.append("**Model field:** {label}".format( label=obj.field.verbose_name )) if isinstance(obj, FileDescriptor): lines.append("**Return type:** :class:`~django.db.models.fields.files.FieldFile`") elif PhoneNumberDescriptor is not None and isinstance(obj, PhoneNumberDescriptor): lines.append("**Return type:** :class:`~phonenumber_field.phonenumber.PhoneNumber`") elif isinstance(obj, related_descriptors.ForwardManyToOneDescriptor): # Display a reasonable output for forward descriptors. related_model = obj.field.remote_field.model if isinstance(related_model, str): cls_path = related_model else: cls_path = "{}.{}".format(related_model.__module__, related_model.__name__) del lines[:] lines.append("**Model field:** {label}, " "accesses the :class:`~{cls_path}` model.".format( label=obj.field.verbose_name, cls_path=cls_path )) elif isinstance(obj, related_descriptors.ReverseOneToOneDescriptor): related_model = obj.related.related_model if isinstance(related_model, str): cls_path = related_model else: cls_path = "{}.{}".format(related_model.__module__, related_model.__name__) del lines[:] lines.append("**Model field:** {label}, " "accesses the :class:`~{cls_path}` model.".format( label=obj.related.field.verbose_name, cls_path=cls_path )) elif isinstance(obj, related_descriptors.ReverseManyToOneDescriptor): related_model = obj.rel.related_model if isinstance(related_model, str): cls_path = related_model else: cls_path = "{}.{}".format(related_model.__module__, related_model.__name__) del lines[:] lines.append("**Model field:** {label}, " "accesses the M2M :class:`~{cls_path}` model.".format( label=obj.field.verbose_name, cls_path=cls_path )) elif isinstance(obj, (models.Manager, ManagerDescriptor)): # Somehow the 'objects' manager doesn't pass through the docstrings. module, cls_name, field_name = name.rsplit('.', 2) lines.append("Django manager to access the ORM") tpl = "Use ``{cls_name}.objects.all()`` to fetch all objects." lines.append(tpl.format(cls_name=cls_name))
python
def _improve_attribute_docs(obj, name, lines): """Improve the documentation of various attributes. This improves the navigation between related objects. :param obj: the instance of the object to document. :param name: full dotted path to the object. :param lines: expected documentation lines. """ if obj is None: # Happens with form attributes. return if isinstance(obj, DeferredAttribute): # This only points to a field name, not a field. # Get the field by importing the name. cls_path, field_name = name.rsplit('.', 1) model = import_string(cls_path) field = model._meta.get_field(obj.field_name) del lines[:] # lines.clear() is Python 3 only lines.append("**Model field:** {label}".format( label=field.verbose_name )) elif isinstance(obj, _FIELD_DESCRIPTORS): # These del lines[:] lines.append("**Model field:** {label}".format( label=obj.field.verbose_name )) if isinstance(obj, FileDescriptor): lines.append("**Return type:** :class:`~django.db.models.fields.files.FieldFile`") elif PhoneNumberDescriptor is not None and isinstance(obj, PhoneNumberDescriptor): lines.append("**Return type:** :class:`~phonenumber_field.phonenumber.PhoneNumber`") elif isinstance(obj, related_descriptors.ForwardManyToOneDescriptor): # Display a reasonable output for forward descriptors. related_model = obj.field.remote_field.model if isinstance(related_model, str): cls_path = related_model else: cls_path = "{}.{}".format(related_model.__module__, related_model.__name__) del lines[:] lines.append("**Model field:** {label}, " "accesses the :class:`~{cls_path}` model.".format( label=obj.field.verbose_name, cls_path=cls_path )) elif isinstance(obj, related_descriptors.ReverseOneToOneDescriptor): related_model = obj.related.related_model if isinstance(related_model, str): cls_path = related_model else: cls_path = "{}.{}".format(related_model.__module__, related_model.__name__) del lines[:] lines.append("**Model field:** {label}, " "accesses the :class:`~{cls_path}` model.".format( label=obj.related.field.verbose_name, cls_path=cls_path )) elif isinstance(obj, related_descriptors.ReverseManyToOneDescriptor): related_model = obj.rel.related_model if isinstance(related_model, str): cls_path = related_model else: cls_path = "{}.{}".format(related_model.__module__, related_model.__name__) del lines[:] lines.append("**Model field:** {label}, " "accesses the M2M :class:`~{cls_path}` model.".format( label=obj.field.verbose_name, cls_path=cls_path )) elif isinstance(obj, (models.Manager, ManagerDescriptor)): # Somehow the 'objects' manager doesn't pass through the docstrings. module, cls_name, field_name = name.rsplit('.', 2) lines.append("Django manager to access the ORM") tpl = "Use ``{cls_name}.objects.all()`` to fetch all objects." lines.append(tpl.format(cls_name=cls_name))
[ "def", "_improve_attribute_docs", "(", "obj", ",", "name", ",", "lines", ")", ":", "if", "obj", "is", "None", ":", "# Happens with form attributes.", "return", "if", "isinstance", "(", "obj", ",", "DeferredAttribute", ")", ":", "# This only points to a field name, not a field.", "# Get the field by importing the name.", "cls_path", ",", "field_name", "=", "name", ".", "rsplit", "(", "'.'", ",", "1", ")", "model", "=", "import_string", "(", "cls_path", ")", "field", "=", "model", ".", "_meta", ".", "get_field", "(", "obj", ".", "field_name", ")", "del", "lines", "[", ":", "]", "# lines.clear() is Python 3 only", "lines", ".", "append", "(", "\"**Model field:** {label}\"", ".", "format", "(", "label", "=", "field", ".", "verbose_name", ")", ")", "elif", "isinstance", "(", "obj", ",", "_FIELD_DESCRIPTORS", ")", ":", "# These", "del", "lines", "[", ":", "]", "lines", ".", "append", "(", "\"**Model field:** {label}\"", ".", "format", "(", "label", "=", "obj", ".", "field", ".", "verbose_name", ")", ")", "if", "isinstance", "(", "obj", ",", "FileDescriptor", ")", ":", "lines", ".", "append", "(", "\"**Return type:** :class:`~django.db.models.fields.files.FieldFile`\"", ")", "elif", "PhoneNumberDescriptor", "is", "not", "None", "and", "isinstance", "(", "obj", ",", "PhoneNumberDescriptor", ")", ":", "lines", ".", "append", "(", "\"**Return type:** :class:`~phonenumber_field.phonenumber.PhoneNumber`\"", ")", "elif", "isinstance", "(", "obj", ",", "related_descriptors", ".", "ForwardManyToOneDescriptor", ")", ":", "# Display a reasonable output for forward descriptors.", "related_model", "=", "obj", ".", "field", ".", "remote_field", ".", "model", "if", "isinstance", "(", "related_model", ",", "str", ")", ":", "cls_path", "=", "related_model", "else", ":", "cls_path", "=", "\"{}.{}\"", ".", "format", "(", "related_model", ".", "__module__", ",", "related_model", ".", "__name__", ")", "del", "lines", "[", ":", "]", "lines", ".", "append", "(", "\"**Model field:** {label}, \"", "\"accesses the :class:`~{cls_path}` model.\"", ".", "format", "(", "label", "=", "obj", ".", "field", ".", "verbose_name", ",", "cls_path", "=", "cls_path", ")", ")", "elif", "isinstance", "(", "obj", ",", "related_descriptors", ".", "ReverseOneToOneDescriptor", ")", ":", "related_model", "=", "obj", ".", "related", ".", "related_model", "if", "isinstance", "(", "related_model", ",", "str", ")", ":", "cls_path", "=", "related_model", "else", ":", "cls_path", "=", "\"{}.{}\"", ".", "format", "(", "related_model", ".", "__module__", ",", "related_model", ".", "__name__", ")", "del", "lines", "[", ":", "]", "lines", ".", "append", "(", "\"**Model field:** {label}, \"", "\"accesses the :class:`~{cls_path}` model.\"", ".", "format", "(", "label", "=", "obj", ".", "related", ".", "field", ".", "verbose_name", ",", "cls_path", "=", "cls_path", ")", ")", "elif", "isinstance", "(", "obj", ",", "related_descriptors", ".", "ReverseManyToOneDescriptor", ")", ":", "related_model", "=", "obj", ".", "rel", ".", "related_model", "if", "isinstance", "(", "related_model", ",", "str", ")", ":", "cls_path", "=", "related_model", "else", ":", "cls_path", "=", "\"{}.{}\"", ".", "format", "(", "related_model", ".", "__module__", ",", "related_model", ".", "__name__", ")", "del", "lines", "[", ":", "]", "lines", ".", "append", "(", "\"**Model field:** {label}, \"", "\"accesses the M2M :class:`~{cls_path}` model.\"", ".", "format", "(", "label", "=", "obj", ".", "field", ".", "verbose_name", ",", "cls_path", "=", "cls_path", ")", ")", "elif", "isinstance", "(", "obj", ",", "(", "models", ".", "Manager", ",", "ManagerDescriptor", ")", ")", ":", "# Somehow the 'objects' manager doesn't pass through the docstrings.", "module", ",", "cls_name", ",", "field_name", "=", "name", ".", "rsplit", "(", "'.'", ",", "2", ")", "lines", ".", "append", "(", "\"Django manager to access the ORM\"", ")", "tpl", "=", "\"Use ``{cls_name}.objects.all()`` to fetch all objects.\"", "lines", ".", "append", "(", "tpl", ".", "format", "(", "cls_name", "=", "cls_name", ")", ")" ]
Improve the documentation of various attributes. This improves the navigation between related objects. :param obj: the instance of the object to document. :param name: full dotted path to the object. :param lines: expected documentation lines.
[ "Improve", "the", "documentation", "of", "various", "attributes", ".", "This", "improves", "the", "navigation", "between", "related", "objects", "." ]
train
https://github.com/edoburu/sphinxcontrib-django/blob/5116ac7f1510a76b1ff58cf7f8d2fab7d8bbe2a9/sphinxcontrib_django/docstrings.py#L195-L268