repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 52
3.87M
| func_documentation_string
stringlengths 1
47.2k
| func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.forget | def forget(self, *keys):
"""
Remove an item from the collection by key.
:param keys: The keys to remove
:type keys: tuple
:rtype: Collection
"""
keys = reversed(sorted(keys))
for key in keys:
del self[key]
return self | python | def forget(self, *keys):
"""
Remove an item from the collection by key.
:param keys: The keys to remove
:type keys: tuple
:rtype: Collection
"""
keys = reversed(sorted(keys))
for key in keys:
del self[key]
return self | Remove an item from the collection by key.
:param keys: The keys to remove
:type keys: tuple
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L308-L322 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.get | def get(self, key, default=None):
"""
Get an element of the collection.
:param key: The index of the element
:type key: mixed
:param default: The default value to return
:type default: mixed
:rtype: mixed
"""
try:
return self.items[key]
except IndexError:
return value(default) | python | def get(self, key, default=None):
"""
Get an element of the collection.
:param key: The index of the element
:type key: mixed
:param default: The default value to return
:type default: mixed
:rtype: mixed
"""
try:
return self.items[key]
except IndexError:
return value(default) | Get an element of the collection.
:param key: The index of the element
:type key: mixed
:param default: The default value to return
:type default: mixed
:rtype: mixed | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L324-L339 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.implode | def implode(self, value, glue=''):
"""
Concatenate values of a given key as a string.
:param value: The value
:type value: str
:param glue: The glue
:type glue: str
:rtype: str
"""
first = self.first()
if not isinstance(first, (basestring)):
return glue.join(self.pluck(value).all())
return value.join(self.items) | python | def implode(self, value, glue=''):
"""
Concatenate values of a given key as a string.
:param value: The value
:type value: str
:param glue: The glue
:type glue: str
:rtype: str
"""
first = self.first()
if not isinstance(first, (basestring)):
return glue.join(self.pluck(value).all())
return value.join(self.items) | Concatenate values of a given key as a string.
:param value: The value
:type value: str
:param glue: The glue
:type glue: str
:rtype: str | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L341-L358 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.last | def last(self, callback=None, default=None):
"""
Get the last item of the collection.
:param default: The default value
:type default: mixed
"""
if callback is not None:
for val in reversed(self.items):
if callback(val):
return val
return value(default)
if len(self.items) > 0:
return self.items[-1]
else:
return default | python | def last(self, callback=None, default=None):
"""
Get the last item of the collection.
:param default: The default value
:type default: mixed
"""
if callback is not None:
for val in reversed(self.items):
if callback(val):
return val
return value(default)
if len(self.items) > 0:
return self.items[-1]
else:
return default | Get the last item of the collection.
:param default: The default value
:type default: mixed | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L360-L377 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.pluck | def pluck(self, value, key=None):
"""
Get a list with the values of a given key.
:rtype: Collection
"""
if key:
return dict(map(lambda x: (data_get(x, key), data_get(x, value)), self.items))
else:
results = list(map(lambda x: data_get(x, value), self.items))
return self.__class__(results) | python | def pluck(self, value, key=None):
"""
Get a list with the values of a given key.
:rtype: Collection
"""
if key:
return dict(map(lambda x: (data_get(x, key), data_get(x, value)), self.items))
else:
results = list(map(lambda x: data_get(x, value), self.items))
return self.__class__(results) | Get a list with the values of a given key.
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L379-L390 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.map | def map(self, callback):
"""
Run a map over each of the item.
:param callback: The map function
:type callback: callable
:rtype: Collection
"""
return self.__class__(list(map(callback, self.items))) | python | def map(self, callback):
"""
Run a map over each of the item.
:param callback: The map function
:type callback: callable
:rtype: Collection
"""
return self.__class__(list(map(callback, self.items))) | Run a map over each of the item.
:param callback: The map function
:type callback: callable
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L400-L409 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.max | def max(self, key=None):
"""
Get the max value of a given key.
:param key: The key
:type key: str or None
:rtype: mixed
"""
def _max(result, item):
val = data_get(item, key)
if result is None or val > result:
return value
return result
return self.reduce(_max) | python | def max(self, key=None):
"""
Get the max value of a given key.
:param key: The key
:type key: str or None
:rtype: mixed
"""
def _max(result, item):
val = data_get(item, key)
if result is None or val > result:
return value
return result
return self.reduce(_max) | Get the max value of a given key.
:param key: The key
:type key: str or None
:rtype: mixed | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L411-L429 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.min | def min(self, key=None):
"""
Get the min value of a given key.
:param key: The key
:type key: str or None
:rtype: mixed
"""
def _min(result, item):
val = data_get(item, key)
if result is None or val < result:
return value
return result
return self.reduce(_min) | python | def min(self, key=None):
"""
Get the min value of a given key.
:param key: The key
:type key: str or None
:rtype: mixed
"""
def _min(result, item):
val = data_get(item, key)
if result is None or val < result:
return value
return result
return self.reduce(_min) | Get the min value of a given key.
:param key: The key
:type key: str or None
:rtype: mixed | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L431-L449 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.for_page | def for_page(self, page, per_page):
"""
"Paginate" the collection by slicing it into a smaller collection.
:param page: The current page
:type page: int
:param per_page: Number of items by slice
:type per_page: int
:rtype: Collection
"""
start = (page - 1) * per_page
return self[start:start + per_page] | python | def for_page(self, page, per_page):
"""
"Paginate" the collection by slicing it into a smaller collection.
:param page: The current page
:type page: int
:param per_page: Number of items by slice
:type per_page: int
:rtype: Collection
"""
start = (page - 1) * per_page
return self[start:start + per_page] | "Paginate" the collection by slicing it into a smaller collection.
:param page: The current page
:type page: int
:param per_page: Number of items by slice
:type per_page: int
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L451-L465 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.pull | def pull(self, key, default=None):
"""
Pulls an item from the collection.
:param key: The key
:type key: mixed
:param default: The default value
:type default: mixed
:rtype: mixed
"""
val = self.get(key, default)
self.forget(key)
return val | python | def pull(self, key, default=None):
"""
Pulls an item from the collection.
:param key: The key
:type key: mixed
:param default: The default value
:type default: mixed
:rtype: mixed
"""
val = self.get(key, default)
self.forget(key)
return val | Pulls an item from the collection.
:param key: The key
:type key: mixed
:param default: The default value
:type default: mixed
:rtype: mixed | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L519-L535 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.reject | def reject(self, callback):
"""
Create a collection of all elements that do not pass a given truth test.
:param callback: The truth test
:type callback: callable
:rtype: Collection
"""
if self._use_as_callable(callback):
return self.filter(lambda item: not callback(item))
return self.filter(lambda item: item != callback) | python | def reject(self, callback):
"""
Create a collection of all elements that do not pass a given truth test.
:param callback: The truth test
:type callback: callable
:rtype: Collection
"""
if self._use_as_callable(callback):
return self.filter(lambda item: not callback(item))
return self.filter(lambda item: item != callback) | Create a collection of all elements that do not pass a given truth test.
:param callback: The truth test
:type callback: callable
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L567-L579 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.sort | def sort(self, callback=None):
"""
Sort through each item with a callback.
:param callback: The callback
:type callback: callable or None
:rtype: Collection
"""
items = self.items
if callback:
return self.__class__(sorted(items, key=callback))
else:
return self.__class__(sorted(items)) | python | def sort(self, callback=None):
"""
Sort through each item with a callback.
:param callback: The callback
:type callback: callable or None
:rtype: Collection
"""
items = self.items
if callback:
return self.__class__(sorted(items, key=callback))
else:
return self.__class__(sorted(items)) | Sort through each item with a callback.
:param callback: The callback
:type callback: callable or None
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L597-L611 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.sum | def sum(self, callback=None):
"""
Get the sum of the given values.
:param callback: The callback
:type callback: callable or string or None
:rtype: mixed
"""
if callback is None:
return sum(self.items)
callback = self._value_retriever(callback)
return self.reduce(lambda result, item: (result or 0) + callback(item)) | python | def sum(self, callback=None):
"""
Get the sum of the given values.
:param callback: The callback
:type callback: callable or string or None
:rtype: mixed
"""
if callback is None:
return sum(self.items)
callback = self._value_retriever(callback)
return self.reduce(lambda result, item: (result or 0) + callback(item)) | Get the sum of the given values.
:param callback: The callback
:type callback: callable or string or None
:rtype: mixed | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L613-L627 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.unique | def unique(self, key=None):
"""
Return only unique items from the collection list.
:param key: The key to chech uniqueness on
:type key: mixed
:rtype: Collection
"""
if key is None:
seen = set()
seen_add = seen.add
return self.__class__([x for x in self.items if not (x in seen or seen_add(x))])
key = self._value_retriever(key)
exists = []
def _check(item):
id_ = key(item)
if id_ in exists:
return True
exists.append(id_)
return self.reject(_check) | python | def unique(self, key=None):
"""
Return only unique items from the collection list.
:param key: The key to chech uniqueness on
:type key: mixed
:rtype: Collection
"""
if key is None:
seen = set()
seen_add = seen.add
return self.__class__([x for x in self.items if not (x in seen or seen_add(x))])
key = self._value_retriever(key)
exists = []
def _check(item):
id_ = key(item)
if id_ in exists:
return True
exists.append(id_)
return self.reject(_check) | Return only unique items from the collection list.
:param key: The key to chech uniqueness on
:type key: mixed
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L643-L669 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.zip | def zip(self, *items):
"""
Zip the collection together with one or more arrays.
:param items: The items to zip
:type items: list
:rtype: Collection
"""
return self.__class__(list(zip(self.items, *items))) | python | def zip(self, *items):
"""
Zip the collection together with one or more arrays.
:param items: The items to zip
:type items: list
:rtype: Collection
"""
return self.__class__(list(zip(self.items, *items))) | Zip the collection together with one or more arrays.
:param items: The items to zip
:type items: list
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L687-L696 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.merge | def merge(self, items):
"""
Merge the collection with the given items.
:param items: The items to merge
:type items: list or Collection
:rtype: Collection
"""
if isinstance(items, BaseCollection):
items = items.all()
if not isinstance(items, list):
raise ValueError('Unable to merge uncompatible types')
self._items += items
return self | python | def merge(self, items):
"""
Merge the collection with the given items.
:param items: The items to merge
:type items: list or Collection
:rtype: Collection
"""
if isinstance(items, BaseCollection):
items = items.all()
if not isinstance(items, list):
raise ValueError('Unable to merge uncompatible types')
self._items += items
return self | Merge the collection with the given items.
:param items: The items to merge
:type items: list or Collection
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L701-L718 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.transform | def transform(self, callback):
"""
Transform each item in the collection using a callback.
:param callback: The callback
:type callback: callable
:rtype: Collection
"""
self._items = self.map(callback).all()
return self | python | def transform(self, callback):
"""
Transform each item in the collection using a callback.
:param callback: The callback
:type callback: callable
:rtype: Collection
"""
self._items = self.map(callback).all()
return self | Transform each item in the collection using a callback.
:param callback: The callback
:type callback: callable
:rtype: Collection | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L720-L731 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection._value_retriever | def _value_retriever(self, value):
"""
Get a value retrieving callback.
:type value: mixed
:rtype: callable
"""
if self._use_as_callable(value):
return value
return lambda item: data_get(item, value) | python | def _value_retriever(self, value):
"""
Get a value retrieving callback.
:type value: mixed
:rtype: callable
"""
if self._use_as_callable(value):
return value
return lambda item: data_get(item, value) | Get a value retrieving callback.
:type value: mixed
:rtype: callable | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L733-L744 |
sdispater/backpack | backpack/collections/base_collection.py | BaseCollection.serialize | def serialize(self):
"""
Get the collection of items as a serialized object (ready to be json encoded).
:rtype: dict or list
"""
def _serialize(value):
if hasattr(value, 'serialize'):
return value.serialize()
elif hasattr(value, 'to_dict'):
return value.to_dict()
else:
return value
return list(map(_serialize, self.items)) | python | def serialize(self):
"""
Get the collection of items as a serialized object (ready to be json encoded).
:rtype: dict or list
"""
def _serialize(value):
if hasattr(value, 'serialize'):
return value.serialize()
elif hasattr(value, 'to_dict'):
return value.to_dict()
else:
return value
return list(map(_serialize, self.items)) | Get the collection of items as a serialized object (ready to be json encoded).
:rtype: dict or list | https://github.com/sdispater/backpack/blob/764e7f79fd2b1c1ac4883d8e5c9da5c65dfc875e/backpack/collections/base_collection.py#L756-L771 |
orsinium/deal | deal/core.py | _Base.validate | def validate(self, *args, **kwargs):
"""
Step 4 (6 for invariant). Process contract (validator)
"""
# Schemes validation interface
if is_scheme(self.validator):
params = getcallargs(self.function, *args, **kwargs)
params.update(kwargs)
validator = self.validator(data=params, request=None)
if validator.is_valid():
return
raise self.exception(validator.errors)
# Simple validation interface
if hasattr(self.validator, 'is_valid'):
validator = self.validator(*args, **kwargs)
# is valid
if validator.is_valid():
return
# is invalid
if hasattr(validator, 'errors'):
raise self.exception(validator.errors)
if hasattr(validator, '_errors'):
raise self.exception(validator._errors)
raise self.exception
validation_result = self.validator(*args, **kwargs)
# is invalid (validator return error message)
if isinstance(validation_result, string_types):
raise self.exception(validation_result)
# is valid (truely result)
if validation_result:
return
# is invalid (falsy result)
raise self.exception | python | def validate(self, *args, **kwargs):
"""
Step 4 (6 for invariant). Process contract (validator)
"""
# Schemes validation interface
if is_scheme(self.validator):
params = getcallargs(self.function, *args, **kwargs)
params.update(kwargs)
validator = self.validator(data=params, request=None)
if validator.is_valid():
return
raise self.exception(validator.errors)
# Simple validation interface
if hasattr(self.validator, 'is_valid'):
validator = self.validator(*args, **kwargs)
# is valid
if validator.is_valid():
return
# is invalid
if hasattr(validator, 'errors'):
raise self.exception(validator.errors)
if hasattr(validator, '_errors'):
raise self.exception(validator._errors)
raise self.exception
validation_result = self.validator(*args, **kwargs)
# is invalid (validator return error message)
if isinstance(validation_result, string_types):
raise self.exception(validation_result)
# is valid (truely result)
if validation_result:
return
# is invalid (falsy result)
raise self.exception | Step 4 (6 for invariant). Process contract (validator) | https://github.com/orsinium/deal/blob/e23c716216543d0080a956250fb45d9e170c3940/deal/core.py#L32-L66 |
orsinium/deal | deal/core.py | Pre.patched_function | def patched_function(self, *args, **kwargs):
"""
Step 3. Wrapped function calling.
"""
self.validate(*args, **kwargs)
return self.function(*args, **kwargs) | python | def patched_function(self, *args, **kwargs):
"""
Step 3. Wrapped function calling.
"""
self.validate(*args, **kwargs)
return self.function(*args, **kwargs) | Step 3. Wrapped function calling. | https://github.com/orsinium/deal/blob/e23c716216543d0080a956250fb45d9e170c3940/deal/core.py#L92-L97 |
orsinium/deal | deal/core.py | Post.patched_function | def patched_function(self, *args, **kwargs):
"""
Step 3. Wrapped function calling.
"""
result = self.function(*args, **kwargs)
self.validate(result)
return result | python | def patched_function(self, *args, **kwargs):
"""
Step 3. Wrapped function calling.
"""
result = self.function(*args, **kwargs)
self.validate(result)
return result | Step 3. Wrapped function calling. | https://github.com/orsinium/deal/blob/e23c716216543d0080a956250fb45d9e170c3940/deal/core.py#L107-L113 |
orsinium/deal | deal/core.py | InvariantedClass._validate | def _validate(self):
"""
Step 5 (1st flow) or Step 4 (2nd flow). Process contract for object.
"""
# disable methods matching before validation
self._disable_patching = True
# validation by Invariant.validate
self._validate_base(self)
# enable methods matching after validation
self._disable_patching = False | python | def _validate(self):
"""
Step 5 (1st flow) or Step 4 (2nd flow). Process contract for object.
"""
# disable methods matching before validation
self._disable_patching = True
# validation by Invariant.validate
self._validate_base(self)
# enable methods matching after validation
self._disable_patching = False | Step 5 (1st flow) or Step 4 (2nd flow). Process contract for object. | https://github.com/orsinium/deal/blob/e23c716216543d0080a956250fb45d9e170c3940/deal/core.py#L119-L128 |
orsinium/deal | deal/core.py | InvariantedClass._patched_method | def _patched_method(self, method, *args, **kwargs):
"""
Step 4 (1st flow). Call method
"""
self._validate()
result = method(*args, **kwargs)
self._validate()
return result | python | def _patched_method(self, method, *args, **kwargs):
"""
Step 4 (1st flow). Call method
"""
self._validate()
result = method(*args, **kwargs)
self._validate()
return result | Step 4 (1st flow). Call method | https://github.com/orsinium/deal/blob/e23c716216543d0080a956250fb45d9e170c3940/deal/core.py#L130-L137 |
benmack/eo-box | eobox/raster/gdalutils.py | buildvrt | def buildvrt(input_file_list, output_file,
relative=True, **kwargs):
"""Build a VRT
See also: https://www.gdal.org/gdalbuildvrt.html
You can find the possible BuildVRTOptions (**kwargs**) here:
https://github.com/nextgis/pygdal/blob/78a793057d2162c292af4f6b240e19da5d5e52e2/2.1.0/osgeo/gdal.py#L1051
Arguments:
input_file_list {list of str or Path objects} -- List of input files.
output_file {str or Path object} -- Output file (VRT).
Keyword Arguments:
relative {bool} -- If ``True``, the ``input_file_list`` paths are converted to relative
paths (relative to the output file) and the VRT works even if the data is moved somewhere else -
given that the relative location of theVRT and the input files does not chance!
**kwargs {} -- BuildVRTOptions - see function description for a link to .
Returns:
[int] -- If successful, 0 is returned as exit code.
"""
# create destination directory
if not Path(output_file).parent.exists():
Path(output_file).parent.mkdir(parents=True, exist_ok=True)
# make sure we have absolute paths and strings since BuildVRT does not like something else
input_file_list = [str(Path(p).absolute()) for p in input_file_list]
output_file = str(Path(output_file).absolute())
vrt_options = gdal.BuildVRTOptions(**kwargs)
vrt = gdal.BuildVRT(output_file,
input_file_list,
options=vrt_options)
vrt = None
# if needed, create the input file paths relative to the output vrt path
# and replace them in the vrt.
# if desired, fix the paths and the relativeToVRT tag in the VRT
if relative:
input_file_list_relative = [relpath(p, Path(output_file).parent) for p in input_file_list]
with open(output_file, 'r') as file:
# read a list of lines into data
lines = file.readlines()
new_lines = []
counter = -1
for line in lines:
# sometimes it is relative by default
# maybe when all files contain the parent directory of the output file (?)
if "relativeToVRT=\"1\"" in line:
counter += 1
elif "relativeToVRT=\"0\"" in line:
counter += 1
input_file = str(input_file_list[counter])
input_file_relative = str(input_file_list_relative[counter])
if input_file not in line:
raise Exception(f"Expect path {input_file} not part of line {line}.")
line = line.replace(input_file,
input_file_relative)
line = line.replace("relativeToVRT=\"0\"",
"relativeToVRT=\"1\"")
else:
pass
new_lines.append(line)
with open(output_file, 'w') as file:
file.writelines(new_lines)
return 0 | python | def buildvrt(input_file_list, output_file,
relative=True, **kwargs):
"""Build a VRT
See also: https://www.gdal.org/gdalbuildvrt.html
You can find the possible BuildVRTOptions (**kwargs**) here:
https://github.com/nextgis/pygdal/blob/78a793057d2162c292af4f6b240e19da5d5e52e2/2.1.0/osgeo/gdal.py#L1051
Arguments:
input_file_list {list of str or Path objects} -- List of input files.
output_file {str or Path object} -- Output file (VRT).
Keyword Arguments:
relative {bool} -- If ``True``, the ``input_file_list`` paths are converted to relative
paths (relative to the output file) and the VRT works even if the data is moved somewhere else -
given that the relative location of theVRT and the input files does not chance!
**kwargs {} -- BuildVRTOptions - see function description for a link to .
Returns:
[int] -- If successful, 0 is returned as exit code.
"""
# create destination directory
if not Path(output_file).parent.exists():
Path(output_file).parent.mkdir(parents=True, exist_ok=True)
# make sure we have absolute paths and strings since BuildVRT does not like something else
input_file_list = [str(Path(p).absolute()) for p in input_file_list]
output_file = str(Path(output_file).absolute())
vrt_options = gdal.BuildVRTOptions(**kwargs)
vrt = gdal.BuildVRT(output_file,
input_file_list,
options=vrt_options)
vrt = None
# if needed, create the input file paths relative to the output vrt path
# and replace them in the vrt.
# if desired, fix the paths and the relativeToVRT tag in the VRT
if relative:
input_file_list_relative = [relpath(p, Path(output_file).parent) for p in input_file_list]
with open(output_file, 'r') as file:
# read a list of lines into data
lines = file.readlines()
new_lines = []
counter = -1
for line in lines:
# sometimes it is relative by default
# maybe when all files contain the parent directory of the output file (?)
if "relativeToVRT=\"1\"" in line:
counter += 1
elif "relativeToVRT=\"0\"" in line:
counter += 1
input_file = str(input_file_list[counter])
input_file_relative = str(input_file_list_relative[counter])
if input_file not in line:
raise Exception(f"Expect path {input_file} not part of line {line}.")
line = line.replace(input_file,
input_file_relative)
line = line.replace("relativeToVRT=\"0\"",
"relativeToVRT=\"1\"")
else:
pass
new_lines.append(line)
with open(output_file, 'w') as file:
file.writelines(new_lines)
return 0 | Build a VRT
See also: https://www.gdal.org/gdalbuildvrt.html
You can find the possible BuildVRTOptions (**kwargs**) here:
https://github.com/nextgis/pygdal/blob/78a793057d2162c292af4f6b240e19da5d5e52e2/2.1.0/osgeo/gdal.py#L1051
Arguments:
input_file_list {list of str or Path objects} -- List of input files.
output_file {str or Path object} -- Output file (VRT).
Keyword Arguments:
relative {bool} -- If ``True``, the ``input_file_list`` paths are converted to relative
paths (relative to the output file) and the VRT works even if the data is moved somewhere else -
given that the relative location of theVRT and the input files does not chance!
**kwargs {} -- BuildVRTOptions - see function description for a link to .
Returns:
[int] -- If successful, 0 is returned as exit code. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/gdalutils.py#L21-L92 |
benmack/eo-box | eobox/raster/gdalutils.py | reproject_on_template_raster | def reproject_on_template_raster(src_file, dst_file, template_file, resampling="near", compress=None, overwrite=False):
"""Reproject a one-band raster to fit the projection, extend, pixel size etc. of a template raster.
Function based on https://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
Arguments:
src_file {str} -- Filename of the source one-band raster.
dst_file {str} -- Filename of the destination raster.
template_file {str} -- Filename of the template raster.
resampling {str} -- Resampling type:
'near' (default), 'bilinear', 'cubic', 'cubicspline', 'lanczos', 'average', 'mode', 'max', 'min', 'med', 'q1', 'q3',
see https://www.gdal.org/gdalwarp.html -r parameter.
compress {str} -- Compression type: None (default), 'lzw', 'packbits', 'defalte'.
"""
if not overwrite and Path(dst_file).exists():
print("Processing skipped. Destination file exists.")
return 0
GDAL_RESAMPLING_ALGORITHMS = {
"bilinear": "GRA_Bilinear",
"cubic": "GRA_Cubic",
"cubicspline": "GRA_CubicSpline",
"lanczos": "GRA_Lanczos",
"average": "GRA_Average",
"mode": "GRA_Mode",
"max": "GRA_Max",
"min": "GRA_Min",
"med": "GRA_Med",
"near": "GRA_NearestNeighbour",
"q1": "GRA_Q1",
"q3": "GRA_Q3"
}
compressions = ["lzw", "packbits", "deflate"]
if resampling not in GDAL_RESAMPLING_ALGORITHMS.keys():
raise ValueError(f"'resampling must be one of {', '.join(GDAL_RESAMPLING_ALGORITHMS.keys())}")
if compress is None:
options = []
else:
if compress.lower() not in compressions:
raise ValueError(f"'compress must be one of {', '.join(compressions)}")
else:
options = [f'COMPRESS={compress.upper()}']
# Source
src = gdal.Open(src_file, gdalconst.GA_ReadOnly)
src_band = src.GetRasterBand(1)
src_proj = src.GetProjection()
# We want a section of source that matches this:
match_ds = gdal.Open(template_file, gdalconst.GA_ReadOnly)
match_proj = match_ds.GetProjection()
match_geotrans = match_ds.GetGeoTransform()
wide = match_ds.RasterXSize
high = match_ds.RasterYSize
# Output / destination
Path(dst_file).parent.mkdir(parents=True, exist_ok=True)
dst = gdal.GetDriverByName('GTiff').Create(dst_file, wide, high, 1, src_band.DataType, options=options)
dst.SetGeoTransform( match_geotrans )
dst.SetProjection( match_proj)
# Do the work
gdal.ReprojectImage(src, dst, src_proj, match_proj,
getattr(gdalconst, GDAL_RESAMPLING_ALGORITHMS[resampling]))
del dst # Flush
return 0 | python | def reproject_on_template_raster(src_file, dst_file, template_file, resampling="near", compress=None, overwrite=False):
"""Reproject a one-band raster to fit the projection, extend, pixel size etc. of a template raster.
Function based on https://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
Arguments:
src_file {str} -- Filename of the source one-band raster.
dst_file {str} -- Filename of the destination raster.
template_file {str} -- Filename of the template raster.
resampling {str} -- Resampling type:
'near' (default), 'bilinear', 'cubic', 'cubicspline', 'lanczos', 'average', 'mode', 'max', 'min', 'med', 'q1', 'q3',
see https://www.gdal.org/gdalwarp.html -r parameter.
compress {str} -- Compression type: None (default), 'lzw', 'packbits', 'defalte'.
"""
if not overwrite and Path(dst_file).exists():
print("Processing skipped. Destination file exists.")
return 0
GDAL_RESAMPLING_ALGORITHMS = {
"bilinear": "GRA_Bilinear",
"cubic": "GRA_Cubic",
"cubicspline": "GRA_CubicSpline",
"lanczos": "GRA_Lanczos",
"average": "GRA_Average",
"mode": "GRA_Mode",
"max": "GRA_Max",
"min": "GRA_Min",
"med": "GRA_Med",
"near": "GRA_NearestNeighbour",
"q1": "GRA_Q1",
"q3": "GRA_Q3"
}
compressions = ["lzw", "packbits", "deflate"]
if resampling not in GDAL_RESAMPLING_ALGORITHMS.keys():
raise ValueError(f"'resampling must be one of {', '.join(GDAL_RESAMPLING_ALGORITHMS.keys())}")
if compress is None:
options = []
else:
if compress.lower() not in compressions:
raise ValueError(f"'compress must be one of {', '.join(compressions)}")
else:
options = [f'COMPRESS={compress.upper()}']
# Source
src = gdal.Open(src_file, gdalconst.GA_ReadOnly)
src_band = src.GetRasterBand(1)
src_proj = src.GetProjection()
# We want a section of source that matches this:
match_ds = gdal.Open(template_file, gdalconst.GA_ReadOnly)
match_proj = match_ds.GetProjection()
match_geotrans = match_ds.GetGeoTransform()
wide = match_ds.RasterXSize
high = match_ds.RasterYSize
# Output / destination
Path(dst_file).parent.mkdir(parents=True, exist_ok=True)
dst = gdal.GetDriverByName('GTiff').Create(dst_file, wide, high, 1, src_band.DataType, options=options)
dst.SetGeoTransform( match_geotrans )
dst.SetProjection( match_proj)
# Do the work
gdal.ReprojectImage(src, dst, src_proj, match_proj,
getattr(gdalconst, GDAL_RESAMPLING_ALGORITHMS[resampling]))
del dst # Flush
return 0 | Reproject a one-band raster to fit the projection, extend, pixel size etc. of a template raster.
Function based on https://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
Arguments:
src_file {str} -- Filename of the source one-band raster.
dst_file {str} -- Filename of the destination raster.
template_file {str} -- Filename of the template raster.
resampling {str} -- Resampling type:
'near' (default), 'bilinear', 'cubic', 'cubicspline', 'lanczos', 'average', 'mode', 'max', 'min', 'med', 'q1', 'q3',
see https://www.gdal.org/gdalwarp.html -r parameter.
compress {str} -- Compression type: None (default), 'lzw', 'packbits', 'defalte'. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/gdalutils.py#L94-L165 |
benmack/eo-box | eobox/raster/gdalutils.py | rasterize | def rasterize(src_vector: str,
burn_attribute: str,
src_raster_template: str,
dst_rasterized: str,
gdal_dtype: int = 4):
"""Rasterize the values of a spatial vector file.
Arguments:
src_vector {str}} -- A OGR vector file (e.g. GeoPackage, ESRI Shapefile) path containing the
data to be rasterized.
burn_attribute {str} -- The attribute of the vector data to be burned in the raster.
src_raster_template {str} -- Path to a GDAL raster file to be used as template for the
rasterized data.
dst_rasterized {str} -- Path of the destination file.
gdal_dtype {int} -- Numeric GDAL data type, defaults to 4 which is UInt32.
See https://github.com/mapbox/rasterio/blob/master/rasterio/dtypes.py for useful look-up
tables.
Returns:
None
"""
data = gdal.Open(str(src_raster_template), # str for the case that a Path instance arrives here
gdalconst.GA_ReadOnly)
geo_transform = data.GetGeoTransform()
#source_layer = data.GetLayer()
# x_max = x_min + geo_transform[1] * data.RasterXSize
# y_min = y_max + geo_transform[5] * data.RasterYSize
x_res = data.RasterXSize
y_res = data.RasterYSize
mb_v = ogr.Open(src_vector)
mb_l = mb_v.GetLayer()
target_ds = gdal.GetDriverByName('GTiff').Create(dst_rasterized,
x_res, y_res, 1,
gdal_dtype) # gdal.GDT_Byte
# import osr
target_ds.SetGeoTransform((geo_transform[0], # x_min
geo_transform[1], # pixel_width
0,
geo_transform[3], # y_max
0,
geo_transform[5] # pixel_height
))
prj = data.GetProjection()
# srs = osr.SpatialReference(wkt=prj) # Where was this needed?
target_ds.SetProjection(prj)
band = target_ds.GetRasterBand(1)
# NoData_value = 0
# band.SetNoDataValue(NoData_value)
band.FlushCache()
gdal.RasterizeLayer(target_ds, [1], mb_l, options=[f"ATTRIBUTE={burn_attribute}"])
target_ds = None | python | def rasterize(src_vector: str,
burn_attribute: str,
src_raster_template: str,
dst_rasterized: str,
gdal_dtype: int = 4):
"""Rasterize the values of a spatial vector file.
Arguments:
src_vector {str}} -- A OGR vector file (e.g. GeoPackage, ESRI Shapefile) path containing the
data to be rasterized.
burn_attribute {str} -- The attribute of the vector data to be burned in the raster.
src_raster_template {str} -- Path to a GDAL raster file to be used as template for the
rasterized data.
dst_rasterized {str} -- Path of the destination file.
gdal_dtype {int} -- Numeric GDAL data type, defaults to 4 which is UInt32.
See https://github.com/mapbox/rasterio/blob/master/rasterio/dtypes.py for useful look-up
tables.
Returns:
None
"""
data = gdal.Open(str(src_raster_template), # str for the case that a Path instance arrives here
gdalconst.GA_ReadOnly)
geo_transform = data.GetGeoTransform()
#source_layer = data.GetLayer()
# x_max = x_min + geo_transform[1] * data.RasterXSize
# y_min = y_max + geo_transform[5] * data.RasterYSize
x_res = data.RasterXSize
y_res = data.RasterYSize
mb_v = ogr.Open(src_vector)
mb_l = mb_v.GetLayer()
target_ds = gdal.GetDriverByName('GTiff').Create(dst_rasterized,
x_res, y_res, 1,
gdal_dtype) # gdal.GDT_Byte
# import osr
target_ds.SetGeoTransform((geo_transform[0], # x_min
geo_transform[1], # pixel_width
0,
geo_transform[3], # y_max
0,
geo_transform[5] # pixel_height
))
prj = data.GetProjection()
# srs = osr.SpatialReference(wkt=prj) # Where was this needed?
target_ds.SetProjection(prj)
band = target_ds.GetRasterBand(1)
# NoData_value = 0
# band.SetNoDataValue(NoData_value)
band.FlushCache()
gdal.RasterizeLayer(target_ds, [1], mb_l, options=[f"ATTRIBUTE={burn_attribute}"])
target_ds = None | Rasterize the values of a spatial vector file.
Arguments:
src_vector {str}} -- A OGR vector file (e.g. GeoPackage, ESRI Shapefile) path containing the
data to be rasterized.
burn_attribute {str} -- The attribute of the vector data to be burned in the raster.
src_raster_template {str} -- Path to a GDAL raster file to be used as template for the
rasterized data.
dst_rasterized {str} -- Path of the destination file.
gdal_dtype {int} -- Numeric GDAL data type, defaults to 4 which is UInt32.
See https://github.com/mapbox/rasterio/blob/master/rasterio/dtypes.py for useful look-up
tables.
Returns:
None | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/gdalutils.py#L167-L218 |
benmack/eo-box | eobox/vector/conversion.py | calc_distance_to_border | def calc_distance_to_border(polygons, template_raster, dst_raster, overwrite=False,
keep_interim_files=False):
"""Calculate the distance of each raster cell (in and outside the polygons) to the next polygon border.
Arguments:
polygons {str} -- Filename to a geopandas-readable file with polygon features.
template_raster {[type]} -- Filename to a rasterio-readable file.
dst_raster {[type]} -- Destination filename for the distance to polygon border raster file (tif).
Keyword Arguments:
overwrite {bool} -- Overwrite files if they exists? (default: {False})
keep_interim_files {bool} -- Keep the interim line vector and raster files (default: {True})
Returns:
[type] -- [description]
"""
if Path(dst_raster).exists() and not overwrite:
print(f"Returning 0 - File exists: {dst_raster}")
return 0
with rasterio.open(template_raster) as tmp:
crs = tmp.crs
dst_raster = Path(dst_raster)
dst_raster.parent.mkdir(exist_ok=True, parents=True)
tempdir = Path(tempfile.mkdtemp(prefix=f"TEMPDIR_{dst_raster.stem}_", dir=dst_raster.parent))
interim_file_lines_vector = tempdir / "interim_sample_vector_dataset_lines.shp"
interim_file_lines_raster = tempdir / "interim_sample_vector_dataset_lines.tif"
exit_code = convert_polygons_to_lines(polygons,
interim_file_lines_vector,
crs=crs,
add_allone_col=True)
rasterize(src_vector=str(interim_file_lines_vector),
burn_attribute="ALLONE",
src_raster_template=str(template_raster),
dst_rasterized=str(interim_file_lines_raster),
gdal_dtype=1)
cmd = f"{PROXIMITY_PATH} " \
f"{str(Path(interim_file_lines_raster).absolute())} " \
f"{str(Path(dst_raster).absolute())} " \
f"-ot Float32 -distunits PIXEL -values 1 -maxdist 255"
subprocess.check_call(cmd, shell=True)
if not keep_interim_files:
shutil.rmtree(tempdir)
else:
print(f"Interim files are in {tempdir}")
return 0 | python | def calc_distance_to_border(polygons, template_raster, dst_raster, overwrite=False,
keep_interim_files=False):
"""Calculate the distance of each raster cell (in and outside the polygons) to the next polygon border.
Arguments:
polygons {str} -- Filename to a geopandas-readable file with polygon features.
template_raster {[type]} -- Filename to a rasterio-readable file.
dst_raster {[type]} -- Destination filename for the distance to polygon border raster file (tif).
Keyword Arguments:
overwrite {bool} -- Overwrite files if they exists? (default: {False})
keep_interim_files {bool} -- Keep the interim line vector and raster files (default: {True})
Returns:
[type] -- [description]
"""
if Path(dst_raster).exists() and not overwrite:
print(f"Returning 0 - File exists: {dst_raster}")
return 0
with rasterio.open(template_raster) as tmp:
crs = tmp.crs
dst_raster = Path(dst_raster)
dst_raster.parent.mkdir(exist_ok=True, parents=True)
tempdir = Path(tempfile.mkdtemp(prefix=f"TEMPDIR_{dst_raster.stem}_", dir=dst_raster.parent))
interim_file_lines_vector = tempdir / "interim_sample_vector_dataset_lines.shp"
interim_file_lines_raster = tempdir / "interim_sample_vector_dataset_lines.tif"
exit_code = convert_polygons_to_lines(polygons,
interim_file_lines_vector,
crs=crs,
add_allone_col=True)
rasterize(src_vector=str(interim_file_lines_vector),
burn_attribute="ALLONE",
src_raster_template=str(template_raster),
dst_rasterized=str(interim_file_lines_raster),
gdal_dtype=1)
cmd = f"{PROXIMITY_PATH} " \
f"{str(Path(interim_file_lines_raster).absolute())} " \
f"{str(Path(dst_raster).absolute())} " \
f"-ot Float32 -distunits PIXEL -values 1 -maxdist 255"
subprocess.check_call(cmd, shell=True)
if not keep_interim_files:
shutil.rmtree(tempdir)
else:
print(f"Interim files are in {tempdir}")
return 0 | Calculate the distance of each raster cell (in and outside the polygons) to the next polygon border.
Arguments:
polygons {str} -- Filename to a geopandas-readable file with polygon features.
template_raster {[type]} -- Filename to a rasterio-readable file.
dst_raster {[type]} -- Destination filename for the distance to polygon border raster file (tif).
Keyword Arguments:
overwrite {bool} -- Overwrite files if they exists? (default: {False})
keep_interim_files {bool} -- Keep the interim line vector and raster files (default: {True})
Returns:
[type] -- [description] | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/vector/conversion.py#L14-L65 |
benmack/eo-box | eobox/vector/conversion.py | convert_polygons_to_lines | def convert_polygons_to_lines(src_polygons, dst_lines, crs=None, add_allone_col=False):
"""Convert polygons to lines.
Arguments:
src_polygons {path to geopandas-readable file} -- Filename of the the polygon vector dataset to be
converted to lines.
dst_lines {[type]} -- Filename where to write the line vector dataset to.
Keyword Arguments:
crs {dict or str} -- Output projection parameters as string or in dictionary format.
This will reproject the data when a crs is given (not {None}) (default: {None}).
add_allone_col {bool} -- Add an additional attribute column with all ones.
This is useful, e.g. in case you want to use the lines with gdal_proximity afterwards (default: {True}).
Returns:
int -- Exit code 0 if successeful.
"""
gdf = gpd.read_file(src_polygons)
geom_coords = gdf["geometry"] # featureset.get(5)["geometry"]["coordinates"]
lines = []
row_ids = []
for i_row, pol in tqdm(enumerate(geom_coords), total=len(geom_coords)):
boundary = pol.boundary
if boundary.type == 'MultiLineString':
for line in boundary:
lines.append(line)
row_ids.append(i_row)
else:
lines.append(boundary)
row_ids.append(i_row)
gdf_lines = gdf.drop("geometry", axis=1).iloc[row_ids, :]
gdf_lines["Coordinates"] = lines
gdf_lines = gpd.GeoDataFrame(gdf_lines, geometry='Coordinates', crs=gdf.crs)
if crs is not None:
gdf_lines = gdf_lines.to_crs(crs)
if add_allone_col:
gdf_lines["ALLONE"] = 1
Path(dst_lines).parent.mkdir(exist_ok=True, parents=True)
gdf_lines.to_file(dst_lines)
return 0 | python | def convert_polygons_to_lines(src_polygons, dst_lines, crs=None, add_allone_col=False):
"""Convert polygons to lines.
Arguments:
src_polygons {path to geopandas-readable file} -- Filename of the the polygon vector dataset to be
converted to lines.
dst_lines {[type]} -- Filename where to write the line vector dataset to.
Keyword Arguments:
crs {dict or str} -- Output projection parameters as string or in dictionary format.
This will reproject the data when a crs is given (not {None}) (default: {None}).
add_allone_col {bool} -- Add an additional attribute column with all ones.
This is useful, e.g. in case you want to use the lines with gdal_proximity afterwards (default: {True}).
Returns:
int -- Exit code 0 if successeful.
"""
gdf = gpd.read_file(src_polygons)
geom_coords = gdf["geometry"] # featureset.get(5)["geometry"]["coordinates"]
lines = []
row_ids = []
for i_row, pol in tqdm(enumerate(geom_coords), total=len(geom_coords)):
boundary = pol.boundary
if boundary.type == 'MultiLineString':
for line in boundary:
lines.append(line)
row_ids.append(i_row)
else:
lines.append(boundary)
row_ids.append(i_row)
gdf_lines = gdf.drop("geometry", axis=1).iloc[row_ids, :]
gdf_lines["Coordinates"] = lines
gdf_lines = gpd.GeoDataFrame(gdf_lines, geometry='Coordinates', crs=gdf.crs)
if crs is not None:
gdf_lines = gdf_lines.to_crs(crs)
if add_allone_col:
gdf_lines["ALLONE"] = 1
Path(dst_lines).parent.mkdir(exist_ok=True, parents=True)
gdf_lines.to_file(dst_lines)
return 0 | Convert polygons to lines.
Arguments:
src_polygons {path to geopandas-readable file} -- Filename of the the polygon vector dataset to be
converted to lines.
dst_lines {[type]} -- Filename where to write the line vector dataset to.
Keyword Arguments:
crs {dict or str} -- Output projection parameters as string or in dictionary format.
This will reproject the data when a crs is given (not {None}) (default: {None}).
add_allone_col {bool} -- Add an additional attribute column with all ones.
This is useful, e.g. in case you want to use the lines with gdal_proximity afterwards (default: {True}).
Returns:
int -- Exit code 0 if successeful. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/vector/conversion.py#L67-L107 |
benmack/eo-box | eobox/raster/utils.py | dtype_checker_df | def dtype_checker_df(df, dtype, return_=None):
"""Check if there are NaN values of values outside of a given datatype range.
Arguments:
df {dataframe} -- A dataframe.
dtype {str} -- The datatype to check for.
Keyword Arguments:
return_ {str} -- Returns a boolean dataframe with the values not in the range of the dtype ('all'),
the row ('rowsums') or column ('colsums') sums of that dataframe or an exit code 1 (None, default)
if any of the values is not in the range.
Returns:
[int or DataFrame or Series] -- If no value is out of the range exit code 0 is returned, else depends on return_.
"""
dtype_range = dtype_ranges[dtype]
df_out_of_range = (df < dtype_range[0]) | (df > dtype_range[1]) | (~np.isfinite(df))
if df_out_of_range.any().any():
if return_== "colsums":
df_out_of_range = df_out_of_range.apply(sum, axis=0) # column
elif return_== "rowsums":
df_out_of_range = df_out_of_range.apply(sum, axis=1) # row
elif return_== "all":
df_out_of_range = df_out_of_range
else:
df_out_of_range = 1
else:
df_out_of_range = 0
return df_out_of_range | python | def dtype_checker_df(df, dtype, return_=None):
"""Check if there are NaN values of values outside of a given datatype range.
Arguments:
df {dataframe} -- A dataframe.
dtype {str} -- The datatype to check for.
Keyword Arguments:
return_ {str} -- Returns a boolean dataframe with the values not in the range of the dtype ('all'),
the row ('rowsums') or column ('colsums') sums of that dataframe or an exit code 1 (None, default)
if any of the values is not in the range.
Returns:
[int or DataFrame or Series] -- If no value is out of the range exit code 0 is returned, else depends on return_.
"""
dtype_range = dtype_ranges[dtype]
df_out_of_range = (df < dtype_range[0]) | (df > dtype_range[1]) | (~np.isfinite(df))
if df_out_of_range.any().any():
if return_== "colsums":
df_out_of_range = df_out_of_range.apply(sum, axis=0) # column
elif return_== "rowsums":
df_out_of_range = df_out_of_range.apply(sum, axis=1) # row
elif return_== "all":
df_out_of_range = df_out_of_range
else:
df_out_of_range = 1
else:
df_out_of_range = 0
return df_out_of_range | Check if there are NaN values of values outside of a given datatype range.
Arguments:
df {dataframe} -- A dataframe.
dtype {str} -- The datatype to check for.
Keyword Arguments:
return_ {str} -- Returns a boolean dataframe with the values not in the range of the dtype ('all'),
the row ('rowsums') or column ('colsums') sums of that dataframe or an exit code 1 (None, default)
if any of the values is not in the range.
Returns:
[int or DataFrame or Series] -- If no value is out of the range exit code 0 is returned, else depends on return_. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/utils.py#L13-L40 |
benmack/eo-box | eobox/raster/cube.py | EOCubeAbstract.get_df_ilocs | def get_df_ilocs(self, band, date):
"""Get positions of rows matching specific band(s) and date(s).
The method supports three typical queries:
* one band and one date (both given as strings)
* one band and of several dates (band given as strings, date as list of strings)
* several band and of one date (date given as strings, band as list of strings)
Arguments:
band {str or list} -- Band(s) for which to derive the iloc index.
date {str or list} -- Date(s) for which to derive the iloc index.
Returns:
int or list -- Integer (if band and date are str) or list of iloc indices.
"""
df = self.df_layers.copy()
df["index"] = range(df.shape[0])
idx_layers = []
if isinstance(band, str) and isinstance(date, str):
idx_layers = df[(df["date"] == date) & (df["band"] == band)]["index"].values[0]
if isinstance(band, list) and isinstance(date, str):
for b in band:
idx = df[(df["date"] == date) & (df["band"] == b)]["index"].values[0]
idx_layers.append(idx)
elif isinstance(band, str) and isinstance(date, list):
for d in date:
idx = df[(df["band"] == band) & (df["date"] == d)]["index"].values[0]
idx_layers.append(idx)
return idx_layers | python | def get_df_ilocs(self, band, date):
"""Get positions of rows matching specific band(s) and date(s).
The method supports three typical queries:
* one band and one date (both given as strings)
* one band and of several dates (band given as strings, date as list of strings)
* several band and of one date (date given as strings, band as list of strings)
Arguments:
band {str or list} -- Band(s) for which to derive the iloc index.
date {str or list} -- Date(s) for which to derive the iloc index.
Returns:
int or list -- Integer (if band and date are str) or list of iloc indices.
"""
df = self.df_layers.copy()
df["index"] = range(df.shape[0])
idx_layers = []
if isinstance(band, str) and isinstance(date, str):
idx_layers = df[(df["date"] == date) & (df["band"] == band)]["index"].values[0]
if isinstance(band, list) and isinstance(date, str):
for b in band:
idx = df[(df["date"] == date) & (df["band"] == b)]["index"].values[0]
idx_layers.append(idx)
elif isinstance(band, str) and isinstance(date, list):
for d in date:
idx = df[(df["band"] == band) & (df["date"] == d)]["index"].values[0]
idx_layers.append(idx)
return idx_layers | Get positions of rows matching specific band(s) and date(s).
The method supports three typical queries:
* one band and one date (both given as strings)
* one band and of several dates (band given as strings, date as list of strings)
* several band and of one date (date given as strings, band as list of strings)
Arguments:
band {str or list} -- Band(s) for which to derive the iloc index.
date {str or list} -- Date(s) for which to derive the iloc index.
Returns:
int or list -- Integer (if band and date are str) or list of iloc indices. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L68-L101 |
benmack/eo-box | eobox/raster/cube.py | EOCubeChunk._get_spatial_bounds | def _get_spatial_bounds(self):
"""Get the spatial bounds of the chunk."""
# This should be a MultiRasterIO method
with rasterio.open(self._mrio._get_template_for_given_resolution(self._mrio.dst_res, "path")) as src_layer:
pass # later we need src_layer for src_layer.window_transform(win)
win_transform = src_layer.window_transform(self._window)
bounds = rasterio.windows.bounds(window=self._window,
transform=win_transform,
height=0, width=0)
return bounds | python | def _get_spatial_bounds(self):
"""Get the spatial bounds of the chunk."""
# This should be a MultiRasterIO method
with rasterio.open(self._mrio._get_template_for_given_resolution(self._mrio.dst_res, "path")) as src_layer:
pass # later we need src_layer for src_layer.window_transform(win)
win_transform = src_layer.window_transform(self._window)
bounds = rasterio.windows.bounds(window=self._window,
transform=win_transform,
height=0, width=0)
return bounds | Get the spatial bounds of the chunk. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L173-L182 |
benmack/eo-box | eobox/raster/cube.py | EOCubeChunk.convert_data_to_ndarray | def convert_data_to_ndarray(self):
"""Converts the data from dataframe to ndarray format. Assumption: df-columns are ndarray-layers (3rd dim.)"""
if self._data_structure != "DataFrame":
raise Exception(f"Data is not a DataFrame but {self._data_structure}.")
self._data = self._convert_to_ndarray(self._data)
self._update_data_structure()
return self | python | def convert_data_to_ndarray(self):
"""Converts the data from dataframe to ndarray format. Assumption: df-columns are ndarray-layers (3rd dim.)"""
if self._data_structure != "DataFrame":
raise Exception(f"Data is not a DataFrame but {self._data_structure}.")
self._data = self._convert_to_ndarray(self._data)
self._update_data_structure()
return self | Converts the data from dataframe to ndarray format. Assumption: df-columns are ndarray-layers (3rd dim.) | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L207-L213 |
benmack/eo-box | eobox/raster/cube.py | EOCubeChunk._convert_to_ndarray | def _convert_to_ndarray(self, data):
"""Converts data from dataframe to ndarray format. Assumption: df-columns are ndarray-layers (3rd dim.)"""
if data.__class__.__name__ != "DataFrame":
raise Exception(f"data is not a DataFrame but {data.__class__.__name__}.")
shape_ndarray = (self._height, self._width, data.shape[1])
data_ndarray = data.values.reshape(shape_ndarray)
return data_ndarray | python | def _convert_to_ndarray(self, data):
"""Converts data from dataframe to ndarray format. Assumption: df-columns are ndarray-layers (3rd dim.)"""
if data.__class__.__name__ != "DataFrame":
raise Exception(f"data is not a DataFrame but {data.__class__.__name__}.")
shape_ndarray = (self._height, self._width, data.shape[1])
data_ndarray = data.values.reshape(shape_ndarray)
return data_ndarray | Converts data from dataframe to ndarray format. Assumption: df-columns are ndarray-layers (3rd dim.) | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L215-L221 |
benmack/eo-box | eobox/raster/cube.py | EOCubeChunk.write_dataframe | def write_dataframe(self, result, dst_paths, nodata=None, compress='lzw'):
"""Write results (dataframe) to disc."""
result = self._convert_to_ndarray(result)
self.write_ndarray(result, dst_paths, nodata=nodata, compress=compress) | python | def write_dataframe(self, result, dst_paths, nodata=None, compress='lzw'):
"""Write results (dataframe) to disc."""
result = self._convert_to_ndarray(result)
self.write_ndarray(result, dst_paths, nodata=nodata, compress=compress) | Write results (dataframe) to disc. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L223-L226 |
benmack/eo-box | eobox/raster/cube.py | EOCubeChunk.write_ndarray | def write_ndarray(self, result, dst_paths, nodata=None, compress='lzw'):
"""Write results (ndarray) to disc."""
assert len(dst_paths) == result.shape[2]
assert result.shape[0] == self._height
assert result.shape[1] == self._width
assert result.shape[2] == len(dst_paths)
with rasterio.open(self._mrio._get_template_for_given_resolution(self._mrio.dst_res, "path")) as src_layer:
pass # later we need src_layer for src_layer.window_transform(win)
for i, pth in enumerate(dst_paths):
dst_path_chunk = self.get_chunk_path_from_layer_path(pth, self.ji)
result_layer_i = result[:, :, [i]]
assert result_layer_i.shape[2] == 1
kwargs = self._mrio._get_template_for_given_resolution(
res=self._mrio.dst_res, return_="meta").copy()
kwargs.update({"driver": "GTiff",
"compress": compress,
"nodata": nodata,
"height": self._height,
"width": self._width,
"dtype": result_layer_i.dtype,
"transform": src_layer.window_transform(self._window)})
with rasterio.open(dst_path_chunk, "w", **kwargs) as dst:
dst.write(result_layer_i[:, :, 0], 1) | python | def write_ndarray(self, result, dst_paths, nodata=None, compress='lzw'):
"""Write results (ndarray) to disc."""
assert len(dst_paths) == result.shape[2]
assert result.shape[0] == self._height
assert result.shape[1] == self._width
assert result.shape[2] == len(dst_paths)
with rasterio.open(self._mrio._get_template_for_given_resolution(self._mrio.dst_res, "path")) as src_layer:
pass # later we need src_layer for src_layer.window_transform(win)
for i, pth in enumerate(dst_paths):
dst_path_chunk = self.get_chunk_path_from_layer_path(pth, self.ji)
result_layer_i = result[:, :, [i]]
assert result_layer_i.shape[2] == 1
kwargs = self._mrio._get_template_for_given_resolution(
res=self._mrio.dst_res, return_="meta").copy()
kwargs.update({"driver": "GTiff",
"compress": compress,
"nodata": nodata,
"height": self._height,
"width": self._width,
"dtype": result_layer_i.dtype,
"transform": src_layer.window_transform(self._window)})
with rasterio.open(dst_path_chunk, "w", **kwargs) as dst:
dst.write(result_layer_i[:, :, 0], 1) | Write results (ndarray) to disc. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L228-L252 |
benmack/eo-box | eobox/raster/cube.py | EOCubeChunk.robust_data_range | def robust_data_range(arr, robust=False, vmin=None, vmax=None):
"""Get a robust data range, i.e. 2nd and 98th percentile for vmin, vmax parameters."""
# from the seaborn code
# https://github.com/mwaskom/seaborn/blob/3a3ec75befab52c02650c62772a90f8c23046038/seaborn/matrix.py#L201
def _get_vmin_vmax(arr2d, vmin=None, vmax=None):
if vmin is None:
vmin = np.percentile(arr2d, 2) if robust else arr2d.min()
if vmax is None:
vmax = np.percentile(arr2d, 98) if robust else arr2d.max()
return vmin, vmax
if len(arr.shape) == 3 and vmin is None and vmax is None:
vmin = []
vmax = []
for i in range(arr.shape[2]):
arr_i = arr[:, :, i]
vmin_i, vmax_i = _get_vmin_vmax(arr_i, vmin=None, vmax=None)
vmin.append(vmin_i)
vmax.append(vmax_i)
else:
vmin, vmax = _get_vmin_vmax(arr, vmin=vmin, vmax=vmax)
return vmin, vmax | python | def robust_data_range(arr, robust=False, vmin=None, vmax=None):
"""Get a robust data range, i.e. 2nd and 98th percentile for vmin, vmax parameters."""
# from the seaborn code
# https://github.com/mwaskom/seaborn/blob/3a3ec75befab52c02650c62772a90f8c23046038/seaborn/matrix.py#L201
def _get_vmin_vmax(arr2d, vmin=None, vmax=None):
if vmin is None:
vmin = np.percentile(arr2d, 2) if robust else arr2d.min()
if vmax is None:
vmax = np.percentile(arr2d, 98) if robust else arr2d.max()
return vmin, vmax
if len(arr.shape) == 3 and vmin is None and vmax is None:
vmin = []
vmax = []
for i in range(arr.shape[2]):
arr_i = arr[:, :, i]
vmin_i, vmax_i = _get_vmin_vmax(arr_i, vmin=None, vmax=None)
vmin.append(vmin_i)
vmax.append(vmax_i)
else:
vmin, vmax = _get_vmin_vmax(arr, vmin=vmin, vmax=vmax)
return vmin, vmax | Get a robust data range, i.e. 2nd and 98th percentile for vmin, vmax parameters. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L255-L277 |
benmack/eo-box | eobox/raster/cube.py | EOCubeChunk.from_eocube | def from_eocube(eocube, ji):
"""Create a EOCubeChunk object from an EOCube object."""
eocubewin = EOCubeChunk(ji, eocube.df_layers, eocube.chunksize, eocube.wdir)
return eocubewin | python | def from_eocube(eocube, ji):
"""Create a EOCubeChunk object from an EOCube object."""
eocubewin = EOCubeChunk(ji, eocube.df_layers, eocube.chunksize, eocube.wdir)
return eocubewin | Create a EOCubeChunk object from an EOCube object. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L346-L349 |
benmack/eo-box | eobox/raster/cube.py | EOCubeSceneCollection.get_chunk | def get_chunk(self, ji):
"""Get a EOCubeChunk"""
return EOCubeSceneCollectionChunk(ji=ji,
df_layers=self.df_layers,
chunksize=self.chunksize,
variables=self.variables,
qa=self.qa,
qa_valid=self.qa_valid,
wdir=self.wdir) | python | def get_chunk(self, ji):
"""Get a EOCubeChunk"""
return EOCubeSceneCollectionChunk(ji=ji,
df_layers=self.df_layers,
chunksize=self.chunksize,
variables=self.variables,
qa=self.qa,
qa_valid=self.qa_valid,
wdir=self.wdir) | Get a EOCubeChunk | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L396-L404 |
benmack/eo-box | eobox/raster/cube.py | EOCubeSceneCollectionChunk.read_data_by_variable | def read_data_by_variable(self, mask=True):
"""Reads and masks (if desired) the data and converts it in one dataframe per variable."""
def print_elapsed_time(start, last_stopped, prefix):
# print(f"{prefix} - Elapsed time [s] since start / last stopped: \
# {(int(time.time() - start_time))} / {(int(time.time() - last_stopped))}")
return time.time()
start_time = time.time()
last_stopped = time.time()
last_stopped = print_elapsed_time(start_time, last_stopped, "Starting chunk function")
verbose = False
self.read_data()
last_stopped = print_elapsed_time(start_time, last_stopped, "Data read")
# 2.
sc_chunk = self.convert_data_to_dataframe()
last_stopped = print_elapsed_time(start_time, last_stopped, "Data converted to df")
# 3.B.
if mask:
# 3.A.
ilocs_qa = np.where((self.df_layers["band"] == self.qa).values)[0]
df_qa = self.data.iloc[:, ilocs_qa]
df_qa.columns = self.df_layers["date"].iloc[ilocs_qa]
df_clearsky = df_qa.isin(self.qa_valid)
last_stopped = print_elapsed_time(start_time, last_stopped, "Clearsky df created")
return_bands = self.variables
else:
return_bands = self.variables + [self.qa]
dfs_variables = {}
for var in return_bands:
if verbose:
print("VARIABLE:", var)
ilocs_var = np.where((self.df_layers["band"] == var).values)[0]
df_var = self.data.iloc[:, ilocs_var]
df_var.columns = self.df_layers["date"].iloc[ilocs_var]
if mask:
df_var = df_var.where(df_clearsky, other=np.nan)
dfs_variables[var] = df_var
last_stopped = print_elapsed_time(start_time, last_stopped, "Clearsky df created")
self._data = dfs_variables
return self | python | def read_data_by_variable(self, mask=True):
"""Reads and masks (if desired) the data and converts it in one dataframe per variable."""
def print_elapsed_time(start, last_stopped, prefix):
# print(f"{prefix} - Elapsed time [s] since start / last stopped: \
# {(int(time.time() - start_time))} / {(int(time.time() - last_stopped))}")
return time.time()
start_time = time.time()
last_stopped = time.time()
last_stopped = print_elapsed_time(start_time, last_stopped, "Starting chunk function")
verbose = False
self.read_data()
last_stopped = print_elapsed_time(start_time, last_stopped, "Data read")
# 2.
sc_chunk = self.convert_data_to_dataframe()
last_stopped = print_elapsed_time(start_time, last_stopped, "Data converted to df")
# 3.B.
if mask:
# 3.A.
ilocs_qa = np.where((self.df_layers["band"] == self.qa).values)[0]
df_qa = self.data.iloc[:, ilocs_qa]
df_qa.columns = self.df_layers["date"].iloc[ilocs_qa]
df_clearsky = df_qa.isin(self.qa_valid)
last_stopped = print_elapsed_time(start_time, last_stopped, "Clearsky df created")
return_bands = self.variables
else:
return_bands = self.variables + [self.qa]
dfs_variables = {}
for var in return_bands:
if verbose:
print("VARIABLE:", var)
ilocs_var = np.where((self.df_layers["band"] == var).values)[0]
df_var = self.data.iloc[:, ilocs_var]
df_var.columns = self.df_layers["date"].iloc[ilocs_var]
if mask:
df_var = df_var.where(df_clearsky, other=np.nan)
dfs_variables[var] = df_var
last_stopped = print_elapsed_time(start_time, last_stopped, "Clearsky df created")
self._data = dfs_variables
return self | Reads and masks (if desired) the data and converts it in one dataframe per variable. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/cube.py#L517-L562 |
benmack/eo-box | eobox/sampledata/base.py | get_dataset | def get_dataset(dataset="s2l1c"):
"""Get a specific sampledata to play around.
So far the following sampledata exist:
* 's2l1c': One Sentinel-2 Level 1C scene with a reference dataset.
* 'lsts': A time series of 105 Landsat scenes each with the bands b3 (red), b4 (nir), b5 (swir1) and fmask.
Keyword Arguments:
dataset {str} -- The name of the dataset (default: {'s2l1c'}).
Returns:
[dict] -- A dictionary with paths and information about the sampledata.
"""
if dataset == "s2l1c":
search_string = os.path.join(DIR_DATA, dataset, "**", "*_B??.jp2")
files = glob.glob(search_string, recursive=True)
if not files:
raise IOError(f"Could not find raster files of the s2l1c dataset. Search string: {search_string}")
basename_splitted = [pth.replace(".jp2", "").split("_")[-2:] for pth in files]
dset = {"raster_files": files,
"raster_bands": [ele[1] for ele in basename_splitted],
"raster_times": [ele[0] for ele in basename_splitted],
"vector_file": os.path.join(DIR_DATA, "s2l1c", "s2l1c_ref.gpkg"),
"vector_file_osm": os.path.join(DIR_DATA, "s2l1c", "gis_osm_landuse-water_a_free_1_area-10000-to-500000.gpkg")}
elif dataset == "lsts":
search_string = os.path.join(DIR_DATA, dataset, "**", "*.tif")
files = glob.glob(search_string, recursive=True)
if not files:
raise IOError(f"Could not find raster files of the lsts dataset. Search string: {search_string}")
basename_splitted = [os.path.basename(pth).replace(".tif", "").split("_") for pth in files]
dset = {"raster_files": files,
"raster_bands": [ele[1] for ele in basename_splitted],
"raster_times": [ele[0][9:16] for ele in basename_splitted]}
# If you want to add a new dataset here, do not forget to do all of the following steps:
# 1) add the dataset in the eo-box/sampledata/eobox/sampledata/data/<name of new dataset>
# 2) write the code here to get the paths of the data and eventually some additional information
# 3) write a test to make sure you get the data
# 4) add the new dataset to package_data in eo-box/sampledata/eobox/setup.py
# 5) add the new dataset to package_data in eo-box/sampledata/MANIFEST.in
# 4) change the version number in eo-box/sampledata/eobox/sampledata/__init__.py to '<current>.<current+1>.0'
return dset | python | def get_dataset(dataset="s2l1c"):
"""Get a specific sampledata to play around.
So far the following sampledata exist:
* 's2l1c': One Sentinel-2 Level 1C scene with a reference dataset.
* 'lsts': A time series of 105 Landsat scenes each with the bands b3 (red), b4 (nir), b5 (swir1) and fmask.
Keyword Arguments:
dataset {str} -- The name of the dataset (default: {'s2l1c'}).
Returns:
[dict] -- A dictionary with paths and information about the sampledata.
"""
if dataset == "s2l1c":
search_string = os.path.join(DIR_DATA, dataset, "**", "*_B??.jp2")
files = glob.glob(search_string, recursive=True)
if not files:
raise IOError(f"Could not find raster files of the s2l1c dataset. Search string: {search_string}")
basename_splitted = [pth.replace(".jp2", "").split("_")[-2:] for pth in files]
dset = {"raster_files": files,
"raster_bands": [ele[1] for ele in basename_splitted],
"raster_times": [ele[0] for ele in basename_splitted],
"vector_file": os.path.join(DIR_DATA, "s2l1c", "s2l1c_ref.gpkg"),
"vector_file_osm": os.path.join(DIR_DATA, "s2l1c", "gis_osm_landuse-water_a_free_1_area-10000-to-500000.gpkg")}
elif dataset == "lsts":
search_string = os.path.join(DIR_DATA, dataset, "**", "*.tif")
files = glob.glob(search_string, recursive=True)
if not files:
raise IOError(f"Could not find raster files of the lsts dataset. Search string: {search_string}")
basename_splitted = [os.path.basename(pth).replace(".tif", "").split("_") for pth in files]
dset = {"raster_files": files,
"raster_bands": [ele[1] for ele in basename_splitted],
"raster_times": [ele[0][9:16] for ele in basename_splitted]}
# If you want to add a new dataset here, do not forget to do all of the following steps:
# 1) add the dataset in the eo-box/sampledata/eobox/sampledata/data/<name of new dataset>
# 2) write the code here to get the paths of the data and eventually some additional information
# 3) write a test to make sure you get the data
# 4) add the new dataset to package_data in eo-box/sampledata/eobox/setup.py
# 5) add the new dataset to package_data in eo-box/sampledata/MANIFEST.in
# 4) change the version number in eo-box/sampledata/eobox/sampledata/__init__.py to '<current>.<current+1>.0'
return dset | Get a specific sampledata to play around.
So far the following sampledata exist:
* 's2l1c': One Sentinel-2 Level 1C scene with a reference dataset.
* 'lsts': A time series of 105 Landsat scenes each with the bands b3 (red), b4 (nir), b5 (swir1) and fmask.
Keyword Arguments:
dataset {str} -- The name of the dataset (default: {'s2l1c'}).
Returns:
[dict] -- A dictionary with paths and information about the sampledata. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/sampledata/base.py#L12-L56 |
benmack/eo-box | eobox/raster/rasterprocessing.py | windows_from_blocksize | def windows_from_blocksize(blocksize_xy, width, height):
"""Create rasterio.windows.Window instances with given size which fully cover a raster.
Arguments:
blocksize_xy {int or list of two int} -- [description]
width {int} -- With of the raster for which to create the windows.
height {int} -- Heigth of the raster for which to create the windows.
Returns:
list -- List of windows according to the following format
``[[<row-index>, <column index>], rasterio.windows.Window(<col_off>, <row_off>, <width>, <height>)]``.
"""
# checks the blocksize input
value_error_msg = "'blocksize must be an integer or a list of two integers.'"
if isinstance(blocksize_xy, int):
blockxsize, blockysize = (blocksize_xy, blocksize_xy)
elif isinstance(blocksize_xy, list):
if len(blocksize_xy) != 2:
raise ValueError(value_error_msg)
else:
if not all([isinstance(blocksize_xy[0], int), isinstance(blocksize_xy[1], int)]):
raise ValueError(value_error_msg)
blockxsize, blockysize = blocksize_xy
else:
raise ValueError(value_error_msg)
# create the col_off and row_off elements for all windows
n_cols = int(np.ceil(width / blockxsize))
n_rows = int(np.ceil(height / blockysize))
col = list(range(n_cols)) * n_rows
col_off = np.array(col) * blockxsize
row = np.repeat(list(range(n_rows)), n_cols)
row_off = row * blockysize
# create the windows
# if necessary, reduce the width and/or height of the border windows
blocksize_wins = []
for ridx, roff, cidx, coff, in zip(row, row_off, col, col_off):
if coff + blockxsize > width:
bxsize = width - coff
else:
bxsize = blockxsize
if roff + blockysize > height:
bysize = height - roff
else:
bysize = blockysize
blocksize_wins.append([[ridx, cidx], rasterio.windows.Window(coff, roff, bxsize, bysize)])
return blocksize_wins | python | def windows_from_blocksize(blocksize_xy, width, height):
"""Create rasterio.windows.Window instances with given size which fully cover a raster.
Arguments:
blocksize_xy {int or list of two int} -- [description]
width {int} -- With of the raster for which to create the windows.
height {int} -- Heigth of the raster for which to create the windows.
Returns:
list -- List of windows according to the following format
``[[<row-index>, <column index>], rasterio.windows.Window(<col_off>, <row_off>, <width>, <height>)]``.
"""
# checks the blocksize input
value_error_msg = "'blocksize must be an integer or a list of two integers.'"
if isinstance(blocksize_xy, int):
blockxsize, blockysize = (blocksize_xy, blocksize_xy)
elif isinstance(blocksize_xy, list):
if len(blocksize_xy) != 2:
raise ValueError(value_error_msg)
else:
if not all([isinstance(blocksize_xy[0], int), isinstance(blocksize_xy[1], int)]):
raise ValueError(value_error_msg)
blockxsize, blockysize = blocksize_xy
else:
raise ValueError(value_error_msg)
# create the col_off and row_off elements for all windows
n_cols = int(np.ceil(width / blockxsize))
n_rows = int(np.ceil(height / blockysize))
col = list(range(n_cols)) * n_rows
col_off = np.array(col) * blockxsize
row = np.repeat(list(range(n_rows)), n_cols)
row_off = row * blockysize
# create the windows
# if necessary, reduce the width and/or height of the border windows
blocksize_wins = []
for ridx, roff, cidx, coff, in zip(row, row_off, col, col_off):
if coff + blockxsize > width:
bxsize = width - coff
else:
bxsize = blockxsize
if roff + blockysize > height:
bysize = height - roff
else:
bysize = blockysize
blocksize_wins.append([[ridx, cidx], rasterio.windows.Window(coff, roff, bxsize, bysize)])
return blocksize_wins | Create rasterio.windows.Window instances with given size which fully cover a raster.
Arguments:
blocksize_xy {int or list of two int} -- [description]
width {int} -- With of the raster for which to create the windows.
height {int} -- Heigth of the raster for which to create the windows.
Returns:
list -- List of windows according to the following format
``[[<row-index>, <column index>], rasterio.windows.Window(<col_off>, <row_off>, <width>, <height>)]``. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L285-L333 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO._get_dst_resolution | def _get_dst_resolution(self, dst_res=None):
"""Get default resolution, i.e. the highest resolution or smallest cell size."""
if dst_res is None:
dst_res = min(self._res_indices.keys())
return dst_res | python | def _get_dst_resolution(self, dst_res=None):
"""Get default resolution, i.e. the highest resolution or smallest cell size."""
if dst_res is None:
dst_res = min(self._res_indices.keys())
return dst_res | Get default resolution, i.e. the highest resolution or smallest cell size. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L56-L60 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO.block_windows | def block_windows(self, res=None): # setter and getter ?
"""Load windows for chunks-wise processing from raster internal tiling (first raster of given resolution).
Arguments:
res {numeric} -- Resolution determining the raster (1st of resolution group) from which to take the tiling.
"""
if res is None:
res = max(self._res_indices.keys())
self._windows_res = res
a_file_index_given_res = self._res_indices[res][0]
with rasterio.open(self._layer_files[a_file_index_given_res]) as src:
wins_of_first_dst_res_layer = tuple(src.block_windows())
self.windows = np.array([win[1] for win in wins_of_first_dst_res_layer])
self.windows_row = np.array([win[0][0] for win in wins_of_first_dst_res_layer])
self.windows_col = np.array([win[0][1] for win in wins_of_first_dst_res_layer]) | python | def block_windows(self, res=None): # setter and getter ?
"""Load windows for chunks-wise processing from raster internal tiling (first raster of given resolution).
Arguments:
res {numeric} -- Resolution determining the raster (1st of resolution group) from which to take the tiling.
"""
if res is None:
res = max(self._res_indices.keys())
self._windows_res = res
a_file_index_given_res = self._res_indices[res][0]
with rasterio.open(self._layer_files[a_file_index_given_res]) as src:
wins_of_first_dst_res_layer = tuple(src.block_windows())
self.windows = np.array([win[1] for win in wins_of_first_dst_res_layer])
self.windows_row = np.array([win[0][0] for win in wins_of_first_dst_res_layer])
self.windows_col = np.array([win[0][1] for win in wins_of_first_dst_res_layer]) | Load windows for chunks-wise processing from raster internal tiling (first raster of given resolution).
Arguments:
res {numeric} -- Resolution determining the raster (1st of resolution group) from which to take the tiling. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L62-L77 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO.windows_from_blocksize | def windows_from_blocksize(self, blocksize_xy=512):
"""Create rasterio.windows.Window instances with given size which fully cover the raster.
Arguments:
blocksize_xy {int or list of two int} -- Size of the window. If one integer is given it defines
the width and height of the window. If a list of two integers if given the first defines the
width and the second the height.
Returns:
None -- But the attributes ``windows``, ``windows_row`` and ``windows_col`` are updated.
"""
meta = self._get_template_for_given_resolution(self.dst_res, "meta")
width = meta["width"]
height = meta["height"]
blocksize_wins = windows_from_blocksize(blocksize_xy, width, height)
self.windows = np.array([win[1] for win in blocksize_wins])
self.windows_row = np.array([win[0][0] for win in blocksize_wins])
self.windows_col = np.array([win[0][1] for win in blocksize_wins])
return self | python | def windows_from_blocksize(self, blocksize_xy=512):
"""Create rasterio.windows.Window instances with given size which fully cover the raster.
Arguments:
blocksize_xy {int or list of two int} -- Size of the window. If one integer is given it defines
the width and height of the window. If a list of two integers if given the first defines the
width and the second the height.
Returns:
None -- But the attributes ``windows``, ``windows_row`` and ``windows_col`` are updated.
"""
meta = self._get_template_for_given_resolution(self.dst_res, "meta")
width = meta["width"]
height = meta["height"]
blocksize_wins = windows_from_blocksize(blocksize_xy, width, height)
self.windows = np.array([win[1] for win in blocksize_wins])
self.windows_row = np.array([win[0][0] for win in blocksize_wins])
self.windows_col = np.array([win[0][1] for win in blocksize_wins])
return self | Create rasterio.windows.Window instances with given size which fully cover the raster.
Arguments:
blocksize_xy {int or list of two int} -- Size of the window. If one integer is given it defines
the width and height of the window. If a list of two integers if given the first defines the
width and the second the height.
Returns:
None -- But the attributes ``windows``, ``windows_row`` and ``windows_col`` are updated. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L79-L98 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO._get_template_for_given_resolution | def _get_template_for_given_resolution(self, res, return_):
"""Given specified resolution ('res') return template layer 'path', 'meta' or 'windows'."""
path = self._layer_files[self._res_indices[res][0]]
if return_ == "path":
return_value = path
else:
with rasterio.open(str(path)) as src:
if return_ == "meta":
return_value = src.meta
elif return_ == "windows":
return_value = tuple(src.block_windows())
else:
raise ValueError("'return_' must be 'path', meta' or 'windows'.")
return return_value | python | def _get_template_for_given_resolution(self, res, return_):
"""Given specified resolution ('res') return template layer 'path', 'meta' or 'windows'."""
path = self._layer_files[self._res_indices[res][0]]
if return_ == "path":
return_value = path
else:
with rasterio.open(str(path)) as src:
if return_ == "meta":
return_value = src.meta
elif return_ == "windows":
return_value = tuple(src.block_windows())
else:
raise ValueError("'return_' must be 'path', meta' or 'windows'.")
return return_value | Given specified resolution ('res') return template layer 'path', 'meta' or 'windows'. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L100-L113 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO.windows_df | def windows_df(self):
"""Get Windows (W) W-row, W-col and W-index of windows e.g. loaded with :meth:`block_windows` as a dataframe.
Returns:
[dataframe] -- A dataframe with the window information and indices (row, col, index).
"""
import pandas as pd
if self.windows is None:
raise Exception("You need to call the block_windows or windows before.")
df_wins = []
for row, col, win in zip(self.windows_row, self.windows_col, self.windows):
df_wins.append(pd.DataFrame({"row":[row], "col":[col], "Window":[win]}))
df_wins = pd.concat(df_wins).set_index(["row", "col"])
df_wins["window_index"] = range(df_wins.shape[0])
df_wins = df_wins.sort_index()
return df_wins | python | def windows_df(self):
"""Get Windows (W) W-row, W-col and W-index of windows e.g. loaded with :meth:`block_windows` as a dataframe.
Returns:
[dataframe] -- A dataframe with the window information and indices (row, col, index).
"""
import pandas as pd
if self.windows is None:
raise Exception("You need to call the block_windows or windows before.")
df_wins = []
for row, col, win in zip(self.windows_row, self.windows_col, self.windows):
df_wins.append(pd.DataFrame({"row":[row], "col":[col], "Window":[win]}))
df_wins = pd.concat(df_wins).set_index(["row", "col"])
df_wins["window_index"] = range(df_wins.shape[0])
df_wins = df_wins.sort_index()
return df_wins | Get Windows (W) W-row, W-col and W-index of windows e.g. loaded with :meth:`block_windows` as a dataframe.
Returns:
[dataframe] -- A dataframe with the window information and indices (row, col, index). | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L115-L131 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO.ji_windows | def ji_windows(self, ij_win): # what can be given to ij_win NOT intuitive/right name by now!!!
"""For a given specific window, i.e. an element of :attr:`windows`, get the windows of all resolutions.
Arguments:
ij_win {int} -- The index specifying the window for which to return the resolution-windows.
"""
ji_windows = {}
transform_src = self._layer_meta[self._res_indices[self._windows_res][0]]["transform"]
for res in self._res_indices:
transform_dst = self._layer_meta[self._res_indices[res][0]]["transform"]
ji_windows[res] = window_from_window(window_src=self.windows[ij_win],
transform_src=transform_src,
transform_dst=transform_dst)
return ji_windows | python | def ji_windows(self, ij_win): # what can be given to ij_win NOT intuitive/right name by now!!!
"""For a given specific window, i.e. an element of :attr:`windows`, get the windows of all resolutions.
Arguments:
ij_win {int} -- The index specifying the window for which to return the resolution-windows.
"""
ji_windows = {}
transform_src = self._layer_meta[self._res_indices[self._windows_res][0]]["transform"]
for res in self._res_indices:
transform_dst = self._layer_meta[self._res_indices[res][0]]["transform"]
ji_windows[res] = window_from_window(window_src=self.windows[ij_win],
transform_src=transform_src,
transform_dst=transform_dst)
return ji_windows | For a given specific window, i.e. an element of :attr:`windows`, get the windows of all resolutions.
Arguments:
ij_win {int} -- The index specifying the window for which to return the resolution-windows. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L133-L146 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO.get_arrays | def get_arrays(self, ji_win):
"""Get the data of the a window given the ji_windows derived with :method:`ji_windows`.
Arguments:
ji_win {[type]} -- The index of the window or the (multi-resolution) windows returned by :meth:`ji_window`.
Returns:
(list of) array(s) -- List of 2D arrays in native resolution in case `dst_res` is `None`
or a 3D array where all layers are resampled to `dst_res` resolution.
"""
if isinstance(ji_win, dict):
ji_windows = ji_win
else:
ji_windows = self.ji_windows(ji_win)
arrays = []
for filename, res in zip(self._layer_files, self._layer_resolution):
with rasterio.open(filename) as src:
arr = src.read(1, window=ji_windows[res])
arrays.append(arr)
if self.dst_res is not None:
arrays = self._resample(arrays=arrays, ji_windows=ji_windows)
return arrays | python | def get_arrays(self, ji_win):
"""Get the data of the a window given the ji_windows derived with :method:`ji_windows`.
Arguments:
ji_win {[type]} -- The index of the window or the (multi-resolution) windows returned by :meth:`ji_window`.
Returns:
(list of) array(s) -- List of 2D arrays in native resolution in case `dst_res` is `None`
or a 3D array where all layers are resampled to `dst_res` resolution.
"""
if isinstance(ji_win, dict):
ji_windows = ji_win
else:
ji_windows = self.ji_windows(ji_win)
arrays = []
for filename, res in zip(self._layer_files, self._layer_resolution):
with rasterio.open(filename) as src:
arr = src.read(1, window=ji_windows[res])
arrays.append(arr)
if self.dst_res is not None:
arrays = self._resample(arrays=arrays, ji_windows=ji_windows)
return arrays | Get the data of the a window given the ji_windows derived with :method:`ji_windows`.
Arguments:
ji_win {[type]} -- The index of the window or the (multi-resolution) windows returned by :meth:`ji_window`.
Returns:
(list of) array(s) -- List of 2D arrays in native resolution in case `dst_res` is `None`
or a 3D array where all layers are resampled to `dst_res` resolution. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L148-L170 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO._resample | def _resample(self, arrays, ji_windows):
"""Resample all arrays with potentially different resolutions to a common resolution."""
# get a destination array template
win_dst = ji_windows[self.dst_res]
aff_dst = self._layer_meta[self._res_indices[self.dst_res][0]]["transform"]
arrays_dst = list()
for i, array in enumerate(arrays):
arr_dst = np.zeros((int(win_dst.height), int(win_dst.width)))
if self._layer_resolution[i] > self.dst_res:
resampling = getattr(Resampling, self.upsampler)
elif self._layer_resolution[i] < self.dst_res:
resampling = getattr(Resampling, self.downsampler)
else:
arrays_dst.append(array.copy())
continue
reproject(array, arr_dst, # arr_dst[0, :, :, i],
src_transform=self._layer_meta[i]["transform"],
dst_transform=aff_dst,
src_crs=self._layer_meta[0]["crs"],
dst_crs=self._layer_meta[0]["crs"],
resampling=resampling)
arrays_dst.append(arr_dst.copy())
arrays_dst = np.stack(arrays_dst, axis=2) # n_images x n x m x 10 would be the synergise format
return arrays_dst | python | def _resample(self, arrays, ji_windows):
"""Resample all arrays with potentially different resolutions to a common resolution."""
# get a destination array template
win_dst = ji_windows[self.dst_res]
aff_dst = self._layer_meta[self._res_indices[self.dst_res][0]]["transform"]
arrays_dst = list()
for i, array in enumerate(arrays):
arr_dst = np.zeros((int(win_dst.height), int(win_dst.width)))
if self._layer_resolution[i] > self.dst_res:
resampling = getattr(Resampling, self.upsampler)
elif self._layer_resolution[i] < self.dst_res:
resampling = getattr(Resampling, self.downsampler)
else:
arrays_dst.append(array.copy())
continue
reproject(array, arr_dst, # arr_dst[0, :, :, i],
src_transform=self._layer_meta[i]["transform"],
dst_transform=aff_dst,
src_crs=self._layer_meta[0]["crs"],
dst_crs=self._layer_meta[0]["crs"],
resampling=resampling)
arrays_dst.append(arr_dst.copy())
arrays_dst = np.stack(arrays_dst, axis=2) # n_images x n x m x 10 would be the synergise format
return arrays_dst | Resample all arrays with potentially different resolutions to a common resolution. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L172-L195 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO._process_windows_merge_stack | def _process_windows_merge_stack(self, func, **kwargs):
"""Load (resampled) array of all windows, apply custom function on it, merge and stack results to one array."""
ji_results = self._process_windows(func, **kwargs)
for idx_layer in range(len(ji_results[0])): # this is the number of output layers
for j in np.unique(self.windows_row):
win_indices_j = np.where(self.windows_row == j)[0]
layer_merged_j = np.hstack([ji_results[idx][idx_layer] for idx in win_indices_j])
if j == 0:
layer_merged = layer_merged_j
else:
layer_merged = np.vstack([layer_merged, layer_merged_j])
if idx_layer == 0:
layers_merged = layer_merged
else:
layers_merged = np.stack([layers_merged, layer_merged], axis=2)
return layers_merged | python | def _process_windows_merge_stack(self, func, **kwargs):
"""Load (resampled) array of all windows, apply custom function on it, merge and stack results to one array."""
ji_results = self._process_windows(func, **kwargs)
for idx_layer in range(len(ji_results[0])): # this is the number of output layers
for j in np.unique(self.windows_row):
win_indices_j = np.where(self.windows_row == j)[0]
layer_merged_j = np.hstack([ji_results[idx][idx_layer] for idx in win_indices_j])
if j == 0:
layer_merged = layer_merged_j
else:
layer_merged = np.vstack([layer_merged, layer_merged_j])
if idx_layer == 0:
layers_merged = layer_merged
else:
layers_merged = np.stack([layers_merged, layer_merged], axis=2)
return layers_merged | Load (resampled) array of all windows, apply custom function on it, merge and stack results to one array. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L228-L243 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO._process_windows | def _process_windows(self, func, **kwargs):
"""Load (resampled) array of all windows and apply custom function on it."""
ji_results = []
for ji_win in range(len(self.windows)):
ji_results.append(self._process_window(ji_win, func, **kwargs))
return ji_results | python | def _process_windows(self, func, **kwargs):
"""Load (resampled) array of all windows and apply custom function on it."""
ji_results = []
for ji_win in range(len(self.windows)):
ji_results.append(self._process_window(ji_win, func, **kwargs))
return ji_results | Load (resampled) array of all windows and apply custom function on it. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L245-L250 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO._process_window | def _process_window(self, ji_win, func, **kwargs):
"""Load (resampled) array of window ji_win and apply custom function on it. """
arr = self.get_arrays(ji_win)
result = func(arr, **kwargs)
return result | python | def _process_window(self, ji_win, func, **kwargs):
"""Load (resampled) array of window ji_win and apply custom function on it. """
arr = self.get_arrays(ji_win)
result = func(arr, **kwargs)
return result | Load (resampled) array of window ji_win and apply custom function on it. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L252-L256 |
benmack/eo-box | eobox/raster/rasterprocessing.py | MultiRasterIO.get_window_from_xy | def get_window_from_xy(self, xy):
"""Get the window index given a coordinate (raster CRS)."""
a_transform = self._get_template_for_given_resolution(res=self.dst_res, return_="meta")["transform"]
row, col = transform.rowcol(a_transform, xy[0], xy[1])
ij_containing_xy = None
for ji, win in enumerate(self.windows):
(row_start, row_end), (col_start, col_end) = rasterio.windows.toranges(win)
# print(row, col, row_start, row_end, col_start, col_end)
if ((col >= col_start) & (col < col_end)) & ((row >= row_start) & (row < row_end)):
ij_containing_xy = ji
break
if ij_containing_xy is None:
raise ValueError("The given 'xy' value is not contained in any window.")
return ij_containing_xy | python | def get_window_from_xy(self, xy):
"""Get the window index given a coordinate (raster CRS)."""
a_transform = self._get_template_for_given_resolution(res=self.dst_res, return_="meta")["transform"]
row, col = transform.rowcol(a_transform, xy[0], xy[1])
ij_containing_xy = None
for ji, win in enumerate(self.windows):
(row_start, row_end), (col_start, col_end) = rasterio.windows.toranges(win)
# print(row, col, row_start, row_end, col_start, col_end)
if ((col >= col_start) & (col < col_end)) & ((row >= row_start) & (row < row_end)):
ij_containing_xy = ji
break
if ij_containing_xy is None:
raise ValueError("The given 'xy' value is not contained in any window.")
return ij_containing_xy | Get the window index given a coordinate (raster CRS). | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/rasterprocessing.py#L258-L271 |
benmack/eo-box | eobox/raster/extraction.py | extract | def extract(src_vector: str,
burn_attribute: str,
src_raster: list,
dst_names: list,
dst_dir: str,
src_raster_template: str = None,
gdal_dtype: int = 4,
n_jobs: int = 1):
"""Extract values from list of single band raster for pixels overlapping with a vector data.
The extracted data will be stored in the ``dst_dir`` by using the ``dst_names`` for the
filename. If a file with a given name already exists the raster will be skipped.
Arguments:
src_vector {str} -- Filename of the vector dataset. Currently it must have the same CRS as
the raster.
burn_attribute {str} -- Name of the attribute column in the ``src_vector`` dataset to be
stored with the extracted data. This should usually be a unique ID for the features
(points, lines, polygons) in the vector dataset.
src_raster {list} -- List of filenames of the single band raster files from which to
extract.
dst_names {list} -- List corresponding to ``src_raster`` names used to store and later
identify the extracted to.
dst_dir {str} -- Directory to store the data to.
Keyword Arguments:
src_raster_template {str} -- A template raster to be used for rasterizing the vectorfile.
Usually the first element of ``src_raster``. (default: {None})
gdal_dtype {int} -- Numeric GDAL data type, defaults to 4 which is UInt32.
See https://github.com/mapbox/rasterio/blob/master/rasterio/dtypes.py for useful look-up
tables.
Returns:
[int] -- If successful, 0 is returned as exit code.
"""
if src_raster_template is None:
src_raster_template = src_raster[0]
path_rasterized = os.path.join(dst_dir, f"burn_attribute_rasterized_{burn_attribute}.tif")
paths_extracted_aux = {ele: os.path.join(dst_dir, f"{ele}.npy") \
for ele in [f"aux_vector_{burn_attribute}",
"aux_coord_x",
"aux_coord_y"]}
paths_extracted_raster = {}
for path, name in zip(src_raster, dst_names):
dst = f"{os.path.join(dst_dir, name)}.npy"
if not os.path.exists(dst):
paths_extracted_raster[path] = dst
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
# if it does not already exist, here we first create the rasterized data
if not os.path.exists(path_rasterized):
if src_raster_template is None:
src_raster_template = src_raster[0]
# print("Rasterizing vector attribute.")
rasterize(src_vector=src_vector,
burn_attribute=burn_attribute,
src_raster_template=src_raster_template,
dst_rasterized=path_rasterized,
gdal_dtype=gdal_dtype)
# if any of the destination files do not exist we need the locations of the pixels to be
# extracted in form of a numpy array bool (mask_arr) that fits the rasters from which we will
# extract below
if not (all([os.path.exists(path) for path in paths_extracted_aux.values()]) and \
all([os.path.exists(path) for path in paths_extracted_raster.values()])):
# print("Creating mask array for pixels to be extracted.")
mask_arr = _get_mask_array(path_rasterized, paths_extracted_aux, burn_attribute)
else:
return 0
# create the pixel coordinates if they do not exist
if not all([os.path.exists(paths_extracted_aux["aux_coord_x"]),
os.path.exists(paths_extracted_aux["aux_coord_y"])]):
_create_and_save_coords(path_rasterized, paths_extracted_aux, mask_arr)
# lets extract the raster values in case of sequential processing
# or remove existing raster layers to prepare parallel processing
if n_jobs == 1:
for path_src, path_dst in tqdm(paths_extracted_raster.items(),
total=len(paths_extracted_raster)):
_extract_and_save_one_layer(path_src, path_dst, mask_arr)
else:
import multiprocessing as mp
if n_jobs == -1:
n_jobs = mp.cpu_count()
pool = mp.Pool(processes=n_jobs)
_ = [pool.apply_async(_extract_and_save_one_layer,
args=(src, dst, mask_arr)) for \
src, dst in paths_extracted_raster.items()]
pool.close()
pool.join()
return 0 | python | def extract(src_vector: str,
burn_attribute: str,
src_raster: list,
dst_names: list,
dst_dir: str,
src_raster_template: str = None,
gdal_dtype: int = 4,
n_jobs: int = 1):
"""Extract values from list of single band raster for pixels overlapping with a vector data.
The extracted data will be stored in the ``dst_dir`` by using the ``dst_names`` for the
filename. If a file with a given name already exists the raster will be skipped.
Arguments:
src_vector {str} -- Filename of the vector dataset. Currently it must have the same CRS as
the raster.
burn_attribute {str} -- Name of the attribute column in the ``src_vector`` dataset to be
stored with the extracted data. This should usually be a unique ID for the features
(points, lines, polygons) in the vector dataset.
src_raster {list} -- List of filenames of the single band raster files from which to
extract.
dst_names {list} -- List corresponding to ``src_raster`` names used to store and later
identify the extracted to.
dst_dir {str} -- Directory to store the data to.
Keyword Arguments:
src_raster_template {str} -- A template raster to be used for rasterizing the vectorfile.
Usually the first element of ``src_raster``. (default: {None})
gdal_dtype {int} -- Numeric GDAL data type, defaults to 4 which is UInt32.
See https://github.com/mapbox/rasterio/blob/master/rasterio/dtypes.py for useful look-up
tables.
Returns:
[int] -- If successful, 0 is returned as exit code.
"""
if src_raster_template is None:
src_raster_template = src_raster[0]
path_rasterized = os.path.join(dst_dir, f"burn_attribute_rasterized_{burn_attribute}.tif")
paths_extracted_aux = {ele: os.path.join(dst_dir, f"{ele}.npy") \
for ele in [f"aux_vector_{burn_attribute}",
"aux_coord_x",
"aux_coord_y"]}
paths_extracted_raster = {}
for path, name in zip(src_raster, dst_names):
dst = f"{os.path.join(dst_dir, name)}.npy"
if not os.path.exists(dst):
paths_extracted_raster[path] = dst
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
# if it does not already exist, here we first create the rasterized data
if not os.path.exists(path_rasterized):
if src_raster_template is None:
src_raster_template = src_raster[0]
# print("Rasterizing vector attribute.")
rasterize(src_vector=src_vector,
burn_attribute=burn_attribute,
src_raster_template=src_raster_template,
dst_rasterized=path_rasterized,
gdal_dtype=gdal_dtype)
# if any of the destination files do not exist we need the locations of the pixels to be
# extracted in form of a numpy array bool (mask_arr) that fits the rasters from which we will
# extract below
if not (all([os.path.exists(path) for path in paths_extracted_aux.values()]) and \
all([os.path.exists(path) for path in paths_extracted_raster.values()])):
# print("Creating mask array for pixels to be extracted.")
mask_arr = _get_mask_array(path_rasterized, paths_extracted_aux, burn_attribute)
else:
return 0
# create the pixel coordinates if they do not exist
if not all([os.path.exists(paths_extracted_aux["aux_coord_x"]),
os.path.exists(paths_extracted_aux["aux_coord_y"])]):
_create_and_save_coords(path_rasterized, paths_extracted_aux, mask_arr)
# lets extract the raster values in case of sequential processing
# or remove existing raster layers to prepare parallel processing
if n_jobs == 1:
for path_src, path_dst in tqdm(paths_extracted_raster.items(),
total=len(paths_extracted_raster)):
_extract_and_save_one_layer(path_src, path_dst, mask_arr)
else:
import multiprocessing as mp
if n_jobs == -1:
n_jobs = mp.cpu_count()
pool = mp.Pool(processes=n_jobs)
_ = [pool.apply_async(_extract_and_save_one_layer,
args=(src, dst, mask_arr)) for \
src, dst in paths_extracted_raster.items()]
pool.close()
pool.join()
return 0 | Extract values from list of single band raster for pixels overlapping with a vector data.
The extracted data will be stored in the ``dst_dir`` by using the ``dst_names`` for the
filename. If a file with a given name already exists the raster will be skipped.
Arguments:
src_vector {str} -- Filename of the vector dataset. Currently it must have the same CRS as
the raster.
burn_attribute {str} -- Name of the attribute column in the ``src_vector`` dataset to be
stored with the extracted data. This should usually be a unique ID for the features
(points, lines, polygons) in the vector dataset.
src_raster {list} -- List of filenames of the single band raster files from which to
extract.
dst_names {list} -- List corresponding to ``src_raster`` names used to store and later
identify the extracted to.
dst_dir {str} -- Directory to store the data to.
Keyword Arguments:
src_raster_template {str} -- A template raster to be used for rasterizing the vectorfile.
Usually the first element of ``src_raster``. (default: {None})
gdal_dtype {int} -- Numeric GDAL data type, defaults to 4 which is UInt32.
See https://github.com/mapbox/rasterio/blob/master/rasterio/dtypes.py for useful look-up
tables.
Returns:
[int] -- If successful, 0 is returned as exit code. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/extraction.py#L14-L106 |
benmack/eo-box | eobox/raster/extraction.py | load_extracted | def load_extracted(src_dir: str,
patterns="*.npy",
vars_in_cols: bool = True,
index: pd.Series = None):
"""Load data extracted and stored by :py:func:`extract`
Arguments:
src_dir {str} -- The directory where the data is stored.
Keyword Arguments:
patterns {str, or list of str} -- A pattern (str) or list of patterns (list)
to identify the variables to be loaded.
The default loads all variables, i.e. all .npy files. (default: {'*.npy'})
vars_in_cols {bool} -- Return the variables in columns (``True``) or rows ``False``
(default: {True})
index {pd.Series} -- A boolean pandas Series which indicates with ``True`` which samples to
load.
Returns:
pandas.DataFrame -- A dataframe with the data.
"""
def _load(path, index):
if index is None:
arr = np.load(str(path))
else:
arr = np.load(str(path), mmap_mode="r")[index]
return arr
src_dir = Path(src_dir)
paths = []
if isinstance(patterns, str):
patterns = [patterns]
for pat in patterns:
paths += src_dir.glob(pat)
if vars_in_cols:
df_data = {}
for path in paths:
df_data[path.stem] = _load(path, index)
df_data = pd.DataFrame(df_data)
if index is not None:
df_data.index = index.index[index]
else:
df_data = []
for path in paths:
arr = _load(path, index)
df_data.append(pd.DataFrame(np.expand_dims(arr, 0), index=[path.stem]))
df_data = pd.concat(df_data)
if index is not None:
df_data.columns = index.index[index]
return df_data | python | def load_extracted(src_dir: str,
patterns="*.npy",
vars_in_cols: bool = True,
index: pd.Series = None):
"""Load data extracted and stored by :py:func:`extract`
Arguments:
src_dir {str} -- The directory where the data is stored.
Keyword Arguments:
patterns {str, or list of str} -- A pattern (str) or list of patterns (list)
to identify the variables to be loaded.
The default loads all variables, i.e. all .npy files. (default: {'*.npy'})
vars_in_cols {bool} -- Return the variables in columns (``True``) or rows ``False``
(default: {True})
index {pd.Series} -- A boolean pandas Series which indicates with ``True`` which samples to
load.
Returns:
pandas.DataFrame -- A dataframe with the data.
"""
def _load(path, index):
if index is None:
arr = np.load(str(path))
else:
arr = np.load(str(path), mmap_mode="r")[index]
return arr
src_dir = Path(src_dir)
paths = []
if isinstance(patterns, str):
patterns = [patterns]
for pat in patterns:
paths += src_dir.glob(pat)
if vars_in_cols:
df_data = {}
for path in paths:
df_data[path.stem] = _load(path, index)
df_data = pd.DataFrame(df_data)
if index is not None:
df_data.index = index.index[index]
else:
df_data = []
for path in paths:
arr = _load(path, index)
df_data.append(pd.DataFrame(np.expand_dims(arr, 0), index=[path.stem]))
df_data = pd.concat(df_data)
if index is not None:
df_data.columns = index.index[index]
return df_data | Load data extracted and stored by :py:func:`extract`
Arguments:
src_dir {str} -- The directory where the data is stored.
Keyword Arguments:
patterns {str, or list of str} -- A pattern (str) or list of patterns (list)
to identify the variables to be loaded.
The default loads all variables, i.e. all .npy files. (default: {'*.npy'})
vars_in_cols {bool} -- Return the variables in columns (``True``) or rows ``False``
(default: {True})
index {pd.Series} -- A boolean pandas Series which indicates with ``True`` which samples to
load.
Returns:
pandas.DataFrame -- A dataframe with the data. | https://github.com/benmack/eo-box/blob/a291450c766bf50ea06adcdeb5729a4aad790ed5/eobox/raster/extraction.py#L145-L195 |
Tenchi2xh/Almonds | almonds/plane.py | Plane.extrema | def extrema(self, x0, y0, w, h):
"""
Returns the minimum and maximum values contained in a given area.
:param x0: Starting x index.
:param y0: Starting y index.
:param w: Width of the area to scan.
:param h: Height of the area to scan.
:return: Tuple containing the minimum and maximum values of the given area.
"""
minimum = 9223372036854775807
maximum = 0
for y in range(y0, y0 + h):
for x in range(x0, x0 + w):
value = self[x, y]
if value != self.filler:
minimum = min(minimum, value)
maximum = max(maximum, value)
return minimum, maximum | python | def extrema(self, x0, y0, w, h):
"""
Returns the minimum and maximum values contained in a given area.
:param x0: Starting x index.
:param y0: Starting y index.
:param w: Width of the area to scan.
:param h: Height of the area to scan.
:return: Tuple containing the minimum and maximum values of the given area.
"""
minimum = 9223372036854775807
maximum = 0
for y in range(y0, y0 + h):
for x in range(x0, x0 + w):
value = self[x, y]
if value != self.filler:
minimum = min(minimum, value)
maximum = max(maximum, value)
return minimum, maximum | Returns the minimum and maximum values contained in a given area.
:param x0: Starting x index.
:param y0: Starting y index.
:param w: Width of the area to scan.
:param h: Height of the area to scan.
:return: Tuple containing the minimum and maximum values of the given area. | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/plane.py#L25-L43 |
Tenchi2xh/Almonds | almonds/cursebox/colortrans.py | rgb2short | def rgb2short(rgb):
""" Find the closest xterm-256 approximation to the given RGB value.
@param rgb: Hex code representing an RGB value, eg, 'abcdef'
@returns: String between 0 and 255, compatible with xterm.
>>> rgb2short('123456')
('23', '005f5f')
>>> rgb2short('ffffff')
('231', 'ffffff')
>>> rgb2short('0DADD6') # vimeo logo
('38', '00afd7')
"""
incs = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
# Break 6-char RGB code into 3 integer vals.
parts = [ int(h, 16) for h in re.split(r'(..)(..)(..)', rgb)[1:4] ]
res = []
for part in parts:
i = 0
while i < len(incs)-1:
s, b = incs[i], incs[i+1] # smaller, bigger
if s <= part <= b:
s1 = abs(s - part)
b1 = abs(b - part)
if s1 < b1: closest = s
else: closest = b
res.append(closest)
break
i += 1
#print '***', res
res = ''.join([ ('%02.x' % i) for i in res ])
equiv = RGB2SHORT_DICT[ res ]
#print '***', res, equiv
return equiv, res | python | def rgb2short(rgb):
""" Find the closest xterm-256 approximation to the given RGB value.
@param rgb: Hex code representing an RGB value, eg, 'abcdef'
@returns: String between 0 and 255, compatible with xterm.
>>> rgb2short('123456')
('23', '005f5f')
>>> rgb2short('ffffff')
('231', 'ffffff')
>>> rgb2short('0DADD6') # vimeo logo
('38', '00afd7')
"""
incs = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
# Break 6-char RGB code into 3 integer vals.
parts = [ int(h, 16) for h in re.split(r'(..)(..)(..)', rgb)[1:4] ]
res = []
for part in parts:
i = 0
while i < len(incs)-1:
s, b = incs[i], incs[i+1] # smaller, bigger
if s <= part <= b:
s1 = abs(s - part)
b1 = abs(b - part)
if s1 < b1: closest = s
else: closest = b
res.append(closest)
break
i += 1
#print '***', res
res = ''.join([ ('%02.x' % i) for i in res ])
equiv = RGB2SHORT_DICT[ res ]
#print '***', res, equiv
return equiv, res | Find the closest xterm-256 approximation to the given RGB value.
@param rgb: Hex code representing an RGB value, eg, 'abcdef'
@returns: String between 0 and 255, compatible with xterm.
>>> rgb2short('123456')
('23', '005f5f')
>>> rgb2short('ffffff')
('231', 'ffffff')
>>> rgb2short('0DADD6') # vimeo logo
('38', '00afd7') | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/cursebox/colortrans.py#L298-L329 |
Tenchi2xh/Almonds | almonds/cursebox/cursebox.py | Cursebox.set_cursor | def set_cursor(self, x, y):
"""
Sets the cursor to the desired position.
:param x: X position
:param y: Y position
"""
curses.curs_set(1)
self.screen.move(y, x) | python | def set_cursor(self, x, y):
"""
Sets the cursor to the desired position.
:param x: X position
:param y: Y position
"""
curses.curs_set(1)
self.screen.move(y, x) | Sets the cursor to the desired position.
:param x: X position
:param y: Y position | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/cursebox/cursebox.py#L37-L45 |
Tenchi2xh/Almonds | almonds/cursebox/cursebox.py | Cursebox.put | def put(self, x, y, text, fg, bg):
"""
Puts a string at the desired coordinates using the provided colors.
:param x: X position
:param y: Y position
:param text: Text to write
:param fg: Foreground color number
:param bg: Background color number
"""
if x < self.width and y < self.height:
try:
self.screen.addstr(y, x, symbols.encode(text), self.pairs[fg, bg])
except curses.error:
# Ignore out of bounds error
pass | python | def put(self, x, y, text, fg, bg):
"""
Puts a string at the desired coordinates using the provided colors.
:param x: X position
:param y: Y position
:param text: Text to write
:param fg: Foreground color number
:param bg: Background color number
"""
if x < self.width and y < self.height:
try:
self.screen.addstr(y, x, symbols.encode(text), self.pairs[fg, bg])
except curses.error:
# Ignore out of bounds error
pass | Puts a string at the desired coordinates using the provided colors.
:param x: X position
:param y: Y position
:param text: Text to write
:param fg: Foreground color number
:param bg: Background color number | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/cursebox/cursebox.py#L53-L68 |
Tenchi2xh/Almonds | almonds/cursebox/cursebox.py | Cursebox.poll_event | def poll_event(self):
"""
Waits for an event to happen and returns a string related to the event.
If the event is a normal (letter) key press, the letter is returned (case sensitive)
:return: Event type
"""
# Flush all inputs before this one that were done since last poll
curses.flushinp()
ch = self.screen.getch()
if ch == 27:
return EVENT_ESC
elif ch == -1 or ch == curses.KEY_RESIZE:
return EVENT_RESIZE
elif ch == 10 or ch == curses.KEY_ENTER:
return EVENT_ENTER
elif ch == 127 or ch == curses.KEY_BACKSPACE:
return EVENT_BACKSPACE
elif ch == curses.KEY_UP:
return EVENT_UP
elif ch == curses.KEY_DOWN:
return EVENT_DOWN
elif ch == curses.KEY_LEFT:
return EVENT_LEFT
elif ch == curses.KEY_RIGHT:
return EVENT_RIGHT
elif ch == 3:
return EVENT_CTRL_C
elif 0 <= ch < 256:
return chr(ch)
else:
return EVENT_UNHANDLED | python | def poll_event(self):
"""
Waits for an event to happen and returns a string related to the event.
If the event is a normal (letter) key press, the letter is returned (case sensitive)
:return: Event type
"""
# Flush all inputs before this one that were done since last poll
curses.flushinp()
ch = self.screen.getch()
if ch == 27:
return EVENT_ESC
elif ch == -1 or ch == curses.KEY_RESIZE:
return EVENT_RESIZE
elif ch == 10 or ch == curses.KEY_ENTER:
return EVENT_ENTER
elif ch == 127 or ch == curses.KEY_BACKSPACE:
return EVENT_BACKSPACE
elif ch == curses.KEY_UP:
return EVENT_UP
elif ch == curses.KEY_DOWN:
return EVENT_DOWN
elif ch == curses.KEY_LEFT:
return EVENT_LEFT
elif ch == curses.KEY_RIGHT:
return EVENT_RIGHT
elif ch == 3:
return EVENT_CTRL_C
elif 0 <= ch < 256:
return chr(ch)
else:
return EVENT_UNHANDLED | Waits for an event to happen and returns a string related to the event.
If the event is a normal (letter) key press, the letter is returned (case sensitive)
:return: Event type | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/cursebox/cursebox.py#L107-L141 |
Tenchi2xh/Almonds | almonds/almonds.py | compute | def compute(args):
x, y, params = args
"""Callable function for the multiprocessing pool."""
return x, y, mandelbrot(x, y, params) | python | def compute(args):
x, y, params = args
"""Callable function for the multiprocessing pool."""
return x, y, mandelbrot(x, y, params) | Callable function for the multiprocessing pool. | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/almonds.py#L31-L34 |
Tenchi2xh/Almonds | almonds/almonds.py | compute_capture | def compute_capture(args):
x, y, w, h, params = args
"""Callable function for the multiprocessing pool."""
return x, y, mandelbrot_capture(x, y, w, h, params) | python | def compute_capture(args):
x, y, w, h, params = args
"""Callable function for the multiprocessing pool."""
return x, y, mandelbrot_capture(x, y, w, h, params) | Callable function for the multiprocessing pool. | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/almonds.py#L37-L40 |
Tenchi2xh/Almonds | almonds/almonds.py | draw_panel | def draw_panel(cb, pool, params, plane):
"""
Draws the application's main panel, displaying the current Mandelbrot view.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane
"""
w = cb.width - MENU_WIDTH - 1
h = cb.height - 1
params.plane_w = w
params.plane_h = h
params.resize(w, h)
palette = PALETTES[params.palette][1]
if params.reverse_palette:
palette = palette[::-1]
# draw_gradient(t, 1, 1, w, h, palette, params.dither_type)
generated = 0
missing_coords = []
# Check for coordinates that have no value in current plane
xs = range(params.plane_x0, params.plane_x0 + params.plane_w - 1)
ys = range(params.plane_y0, params.plane_y0 + params.plane_h - 1)
for x in xs:
for y in ys:
if plane[x, y] is None:
missing_coords.append((x, y, params))
generated += 1
# Compute all missing values via multiprocessing
n_processes = 0
if len(missing_coords) > 0:
n_cores = pool._processes
n_processes = len(missing_coords) // 256
if n_processes > n_cores:
n_processes = n_cores
start = time.time()
for i, result in enumerate(pool.imap_unordered(compute, missing_coords, chunksize=256)):
plane[result[0], result[1]] = result[2]
if time.time() - start > 2:
if i % 200 == 0:
draw_progress_bar(cb, "Render is taking a longer time...", i, len(missing_coords))
cb.refresh()
if generated > 0:
params.log("Added %d missing cells" % generated)
if n_processes > 1:
params.log("(Used %d processes)" % n_processes)
min_value = 0.0
max_value = params.max_iterations
max_iterations = params.max_iterations
if params.adaptive_palette:
min_value, max_value = plane.extrema(params.plane_x0, params.plane_y0,
params.plane_w, params.plane_h)
crosshairs_coord = None
if params.crosshairs:
crosshairs_coord = params.crosshairs_coord
# Draw all values in cursebox
for x in xs:
for y in ys:
value = (plane[x, y] + params.palette_offset) % (params.max_iterations + 1)
if params.adaptive_palette:
# Remap values from (min_value, max_value) to (0, max_iterations)
if max_value - min_value > 0:
value = ((value - min_value) / (max_value - min_value)) * max_iterations
else:
value = max_iterations
# Dithered mode
if params.dither_type < 2:
draw_dithered_color(cb, x - params.plane_x0 + 1,
y - params.plane_y0 + 1,
palette, params.dither_type,
value, max_iterations,
crosshairs_coord=crosshairs_coord)
# 256 colors mode
else:
draw_color(cb, x - params.plane_x0 + 1,
y - params.plane_y0 + 1,
value, max_iterations, palette,
crosshairs_coord=crosshairs_coord)
# Draw bounding box
draw_box(cb, 0, 0, w + 1, h + 1) | python | def draw_panel(cb, pool, params, plane):
"""
Draws the application's main panel, displaying the current Mandelbrot view.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane
"""
w = cb.width - MENU_WIDTH - 1
h = cb.height - 1
params.plane_w = w
params.plane_h = h
params.resize(w, h)
palette = PALETTES[params.palette][1]
if params.reverse_palette:
palette = palette[::-1]
# draw_gradient(t, 1, 1, w, h, palette, params.dither_type)
generated = 0
missing_coords = []
# Check for coordinates that have no value in current plane
xs = range(params.plane_x0, params.plane_x0 + params.plane_w - 1)
ys = range(params.plane_y0, params.plane_y0 + params.plane_h - 1)
for x in xs:
for y in ys:
if plane[x, y] is None:
missing_coords.append((x, y, params))
generated += 1
# Compute all missing values via multiprocessing
n_processes = 0
if len(missing_coords) > 0:
n_cores = pool._processes
n_processes = len(missing_coords) // 256
if n_processes > n_cores:
n_processes = n_cores
start = time.time()
for i, result in enumerate(pool.imap_unordered(compute, missing_coords, chunksize=256)):
plane[result[0], result[1]] = result[2]
if time.time() - start > 2:
if i % 200 == 0:
draw_progress_bar(cb, "Render is taking a longer time...", i, len(missing_coords))
cb.refresh()
if generated > 0:
params.log("Added %d missing cells" % generated)
if n_processes > 1:
params.log("(Used %d processes)" % n_processes)
min_value = 0.0
max_value = params.max_iterations
max_iterations = params.max_iterations
if params.adaptive_palette:
min_value, max_value = plane.extrema(params.plane_x0, params.plane_y0,
params.plane_w, params.plane_h)
crosshairs_coord = None
if params.crosshairs:
crosshairs_coord = params.crosshairs_coord
# Draw all values in cursebox
for x in xs:
for y in ys:
value = (plane[x, y] + params.palette_offset) % (params.max_iterations + 1)
if params.adaptive_palette:
# Remap values from (min_value, max_value) to (0, max_iterations)
if max_value - min_value > 0:
value = ((value - min_value) / (max_value - min_value)) * max_iterations
else:
value = max_iterations
# Dithered mode
if params.dither_type < 2:
draw_dithered_color(cb, x - params.plane_x0 + 1,
y - params.plane_y0 + 1,
palette, params.dither_type,
value, max_iterations,
crosshairs_coord=crosshairs_coord)
# 256 colors mode
else:
draw_color(cb, x - params.plane_x0 + 1,
y - params.plane_y0 + 1,
value, max_iterations, palette,
crosshairs_coord=crosshairs_coord)
# Draw bounding box
draw_box(cb, 0, 0, w + 1, h + 1) | Draws the application's main panel, displaying the current Mandelbrot view.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/almonds.py#L43-L138 |
Tenchi2xh/Almonds | almonds/almonds.py | draw_menu | def draw_menu(cb, params, qwertz):
"""
Draws the application's side menu and options.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
"""
w = cb.width
h = cb.height
x0 = w - MENU_WIDTH + 1
# Clear buffer inside the box
fill(cb, x0, 1, MENU_WIDTH, h - 2, " ")
def draw_option(key, value, shortcuts):
"""
Helper function to draw options. Self-increments own counter.
:param key: Name of the option.
:param value: Value of the option.
:param shortcuts: Keyboard shortcut keys.
:return:
"""
draw_text(cb, x0 + 1, 2 + draw_option.counter,
"%s %s %s" % (key, str(value).rjust(MENU_WIDTH - 14 - len(key)), shortcuts))
draw_option.counter += 1
draw_option.counter = 1
z = "Z"
y = "Y"
if qwertz:
z, y = y, z
h_seps = [2]
# Draw title
draw_text(cb, x0, 1, ("Almonds %s" % __version__).center(MENU_WIDTH - 2))
# Write options (and stats)
# Mandelbrot position
draw_option("Real", "{0:.13g}".format(params.mb_cx),
"$[" + symbols["ARROW_LEFT"] + "]$, $[" + symbols["ARROW_RIGHT"] + "]$")
draw_option("Imaginary", "{0:.13g}".format(params.mb_cy),
"$[" + symbols["ARROW_UP"] + "]$, $[" + symbols["ARROW_DOWN"] + "]$")
# FIXME: try to find a way to avoid this hack
if is_native_windows():
cb.put_arrow(x0 + 30, 3, "up", colors.default_bg(), colors.default_fg())
cb.put_arrow(x0 + 35, 3, "down", colors.default_bg(), colors.default_fg())
cb.put_arrow(x0 + 30, 4, "left", colors.default_bg(), colors.default_fg())
cb.put_arrow(x0 + 35, 4, "right", colors.default_bg(), colors.default_fg())
draw_option("Input coordinates...", "", "$[Enter]$")
draw_option.counter += 1
h_seps.append(draw_option.counter + 1)
# Mandelbrot options
draw_option("Move speed", params.move_speed, "$[C]$, $[V]$")
draw_option("Zoom", "{0:.13g}".format(params.zoom), "$[" + y + "]$, $[U]$")
draw_option("Iterations", params.max_iterations, "$[I]$, $[O]$")
draw_option("Julia mode", "On" if params.julia else "Off", "$[J]$")
draw_option.counter += 1
h_seps.append(draw_option.counter + 1)
# Palette options
draw_option("Palette", PALETTES[params.palette][0], "$[P]$")
draw_option("Color mode", DITHER_TYPES[params.dither_type][0], "$[D]$")
draw_option("Order", "Reversed" if params.reverse_palette else "Normal", "$[R]$")
draw_option("Mode", "Adaptive" if params.adaptive_palette else "Linear", "$[A]$")
draw_option("Cycle!", "", "$[" + z + "]$")
draw_option.counter += 1
h_seps.append(draw_option.counter + 1)
# Misc.
draw_option("Hi-res capture", "", "$[H]$")
draw_option("Crosshairs", "On" if params.crosshairs else "Off", "$[X]$")
draw_option("Theme", "Dark" if colors.dark else "Light", "$[T]$")
draw_option("Save", "", "$[S]$")
draw_option("Load...", "", "$[L]$")
draw_option("Exit", "", "$[ESC]$")
# Draw box with separators
middle = 3 + draw_option.counter
draw_box(cb, w - MENU_WIDTH, 0, MENU_WIDTH, h, h_seps=h_seps + [middle - 1, middle + 1])
# Draw log
draw_text(cb, x0, middle, "Event log".center(MENU_WIDTH - 2))
latest_logs = params.log.get_latest(h - middle)
latest_logs = [textwrap.wrap(l, MENU_WIDTH - 4)[::-1] for l in latest_logs] # Wrap all messages
latest_logs = [l for ls in latest_logs for l in ls] # Flatten [[str]] -> [str]
i = h - 2
for l in latest_logs:
draw_text(cb, x0 + 1, i, l)
i -= 1
if i == middle + 1:
break | python | def draw_menu(cb, params, qwertz):
"""
Draws the application's side menu and options.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
"""
w = cb.width
h = cb.height
x0 = w - MENU_WIDTH + 1
# Clear buffer inside the box
fill(cb, x0, 1, MENU_WIDTH, h - 2, " ")
def draw_option(key, value, shortcuts):
"""
Helper function to draw options. Self-increments own counter.
:param key: Name of the option.
:param value: Value of the option.
:param shortcuts: Keyboard shortcut keys.
:return:
"""
draw_text(cb, x0 + 1, 2 + draw_option.counter,
"%s %s %s" % (key, str(value).rjust(MENU_WIDTH - 14 - len(key)), shortcuts))
draw_option.counter += 1
draw_option.counter = 1
z = "Z"
y = "Y"
if qwertz:
z, y = y, z
h_seps = [2]
# Draw title
draw_text(cb, x0, 1, ("Almonds %s" % __version__).center(MENU_WIDTH - 2))
# Write options (and stats)
# Mandelbrot position
draw_option("Real", "{0:.13g}".format(params.mb_cx),
"$[" + symbols["ARROW_LEFT"] + "]$, $[" + symbols["ARROW_RIGHT"] + "]$")
draw_option("Imaginary", "{0:.13g}".format(params.mb_cy),
"$[" + symbols["ARROW_UP"] + "]$, $[" + symbols["ARROW_DOWN"] + "]$")
# FIXME: try to find a way to avoid this hack
if is_native_windows():
cb.put_arrow(x0 + 30, 3, "up", colors.default_bg(), colors.default_fg())
cb.put_arrow(x0 + 35, 3, "down", colors.default_bg(), colors.default_fg())
cb.put_arrow(x0 + 30, 4, "left", colors.default_bg(), colors.default_fg())
cb.put_arrow(x0 + 35, 4, "right", colors.default_bg(), colors.default_fg())
draw_option("Input coordinates...", "", "$[Enter]$")
draw_option.counter += 1
h_seps.append(draw_option.counter + 1)
# Mandelbrot options
draw_option("Move speed", params.move_speed, "$[C]$, $[V]$")
draw_option("Zoom", "{0:.13g}".format(params.zoom), "$[" + y + "]$, $[U]$")
draw_option("Iterations", params.max_iterations, "$[I]$, $[O]$")
draw_option("Julia mode", "On" if params.julia else "Off", "$[J]$")
draw_option.counter += 1
h_seps.append(draw_option.counter + 1)
# Palette options
draw_option("Palette", PALETTES[params.palette][0], "$[P]$")
draw_option("Color mode", DITHER_TYPES[params.dither_type][0], "$[D]$")
draw_option("Order", "Reversed" if params.reverse_palette else "Normal", "$[R]$")
draw_option("Mode", "Adaptive" if params.adaptive_palette else "Linear", "$[A]$")
draw_option("Cycle!", "", "$[" + z + "]$")
draw_option.counter += 1
h_seps.append(draw_option.counter + 1)
# Misc.
draw_option("Hi-res capture", "", "$[H]$")
draw_option("Crosshairs", "On" if params.crosshairs else "Off", "$[X]$")
draw_option("Theme", "Dark" if colors.dark else "Light", "$[T]$")
draw_option("Save", "", "$[S]$")
draw_option("Load...", "", "$[L]$")
draw_option("Exit", "", "$[ESC]$")
# Draw box with separators
middle = 3 + draw_option.counter
draw_box(cb, w - MENU_WIDTH, 0, MENU_WIDTH, h, h_seps=h_seps + [middle - 1, middle + 1])
# Draw log
draw_text(cb, x0, middle, "Event log".center(MENU_WIDTH - 2))
latest_logs = params.log.get_latest(h - middle)
latest_logs = [textwrap.wrap(l, MENU_WIDTH - 4)[::-1] for l in latest_logs] # Wrap all messages
latest_logs = [l for ls in latest_logs for l in ls] # Flatten [[str]] -> [str]
i = h - 2
for l in latest_logs:
draw_text(cb, x0 + 1, i, l)
i -= 1
if i == middle + 1:
break | Draws the application's side menu and options.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/almonds.py#L141-L232 |
Tenchi2xh/Almonds | almonds/almonds.py | update_display | def update_display(cb, pool, params, plane, qwertz):
"""
Draws everything.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane
:return:
"""
cb.clear()
draw_panel(cb, pool, params, plane)
update_position(params) # Update Mandelbrot-space coordinates before drawing them
draw_menu(cb, params, qwertz)
cb.refresh() | python | def update_display(cb, pool, params, plane, qwertz):
"""
Draws everything.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane
:return:
"""
cb.clear()
draw_panel(cb, pool, params, plane)
update_position(params) # Update Mandelbrot-space coordinates before drawing them
draw_menu(cb, params, qwertz)
cb.refresh() | Draws everything.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane
:return: | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/almonds.py#L235-L251 |
Tenchi2xh/Almonds | almonds/almonds.py | save | def save(params):
"""
Saves the current parameters to a file.
:param params: Current application parameters.
:return:
"""
if is_python3():
import pickle
cPickle = pickle
else:
import cPickle
ts = datetime.datetime.fromtimestamp(time.time()).strftime("%Y-%m-%d_%H-%M-%S")
if not os.path.exists("saves/"):
os.makedirs("saves/")
with open("saves/almonds_%s.params" % ts, "wb") as f:
cPickle.dump(params, f)
params.log("Current scene saved!") | python | def save(params):
"""
Saves the current parameters to a file.
:param params: Current application parameters.
:return:
"""
if is_python3():
import pickle
cPickle = pickle
else:
import cPickle
ts = datetime.datetime.fromtimestamp(time.time()).strftime("%Y-%m-%d_%H-%M-%S")
if not os.path.exists("saves/"):
os.makedirs("saves/")
with open("saves/almonds_%s.params" % ts, "wb") as f:
cPickle.dump(params, f)
params.log("Current scene saved!") | Saves the current parameters to a file.
:param params: Current application parameters.
:return: | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/almonds.py#L254-L271 |
Tenchi2xh/Almonds | almonds/almonds.py | capture | def capture(cb, pool, params):
"""
Renders and saves a screen-sized picture of the current position.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
"""
w, h = screen_resolution()
# Re-adapt dimensions to match current plane ratio
old_ratio = w / h
new_ratio = params.plane_ratio
if old_ratio > new_ratio:
w = int(h * new_ratio)
else:
h = int(w / new_ratio)
image = Image.new("RGB", (w, h), "white")
pixels = image.load()
# FIXME: refactor common code to get_palette(params)
palette = PALETTES[params.palette][1]
if params.reverse_palette:
palette = palette[::-1]
# All coordinates to be computed as single arguments for processes
coords = [(x, y, w, h, params) for x in range(w) for y in range(h)]
results = []
# Dispatch work to pool and draw results as they come in
for i, result in enumerate(pool.imap_unordered(compute_capture, coords, chunksize=256)):
results.append(result)
if i % 2000 == 0:
draw_progress_bar(cb, "Capturing current scene...", i, w * h)
cb.refresh()
min_value = 0.0
max_value = params.max_iterations
max_iterations = params.max_iterations
if params.adaptive_palette:
from operator import itemgetter
min_value = min(results, key=itemgetter(2))[2]
max_value = max(results, key=itemgetter(2))[2]
# Draw pixels
for result in results:
value = result[2]
if params.adaptive_palette:
# Remap values from (min_value, max_value) to (0, max_iterations)
if max_value - min_value > 0:
value = ((value - min_value) / (max_value - min_value)) * max_iterations
else:
value = max_iterations
pixels[result[0], result[1]] = get_color(value, params.max_iterations, palette)
if not os.path.exists("captures/"):
os.makedirs("captures/")
ts = datetime.datetime.fromtimestamp(time.time()).strftime("%Y-%m-%d_%H-%M-%S")
filename = "captures/almonds_%s.png" % ts
image.save(filename, "PNG")
params.log("Current scene captured!")
params.log("(Used %d processes)" % pool._processes)
open_file(filename) | python | def capture(cb, pool, params):
"""
Renders and saves a screen-sized picture of the current position.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
"""
w, h = screen_resolution()
# Re-adapt dimensions to match current plane ratio
old_ratio = w / h
new_ratio = params.plane_ratio
if old_ratio > new_ratio:
w = int(h * new_ratio)
else:
h = int(w / new_ratio)
image = Image.new("RGB", (w, h), "white")
pixels = image.load()
# FIXME: refactor common code to get_palette(params)
palette = PALETTES[params.palette][1]
if params.reverse_palette:
palette = palette[::-1]
# All coordinates to be computed as single arguments for processes
coords = [(x, y, w, h, params) for x in range(w) for y in range(h)]
results = []
# Dispatch work to pool and draw results as they come in
for i, result in enumerate(pool.imap_unordered(compute_capture, coords, chunksize=256)):
results.append(result)
if i % 2000 == 0:
draw_progress_bar(cb, "Capturing current scene...", i, w * h)
cb.refresh()
min_value = 0.0
max_value = params.max_iterations
max_iterations = params.max_iterations
if params.adaptive_palette:
from operator import itemgetter
min_value = min(results, key=itemgetter(2))[2]
max_value = max(results, key=itemgetter(2))[2]
# Draw pixels
for result in results:
value = result[2]
if params.adaptive_palette:
# Remap values from (min_value, max_value) to (0, max_iterations)
if max_value - min_value > 0:
value = ((value - min_value) / (max_value - min_value)) * max_iterations
else:
value = max_iterations
pixels[result[0], result[1]] = get_color(value, params.max_iterations, palette)
if not os.path.exists("captures/"):
os.makedirs("captures/")
ts = datetime.datetime.fromtimestamp(time.time()).strftime("%Y-%m-%d_%H-%M-%S")
filename = "captures/almonds_%s.png" % ts
image.save(filename, "PNG")
params.log("Current scene captured!")
params.log("(Used %d processes)" % pool._processes)
open_file(filename) | Renders and saves a screen-sized picture of the current position.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/almonds.py#L274-L341 |
Tenchi2xh/Almonds | almonds/almonds.py | cycle | def cycle(cb, pool, params, plane):
"""
Fun function to do a palette cycling animation.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane
:return:
"""
step = params.max_iterations // 20
if step == 0:
step = 1
for i in range(0, params.max_iterations, step):
params.palette_offset = i
draw_panel(cb, pool, params, plane)
cb.refresh()
params.palette_offset = 0 | python | def cycle(cb, pool, params, plane):
"""
Fun function to do a palette cycling animation.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane
:return:
"""
step = params.max_iterations // 20
if step == 0:
step = 1
for i in range(0, params.max_iterations, step):
params.palette_offset = i
draw_panel(cb, pool, params, plane)
cb.refresh()
params.palette_offset = 0 | Fun function to do a palette cycling animation.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane
:return: | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/almonds.py#L344-L363 |
Tenchi2xh/Almonds | almonds/almonds.py | init_coords | def init_coords(cb, params):
"""
Initializes coordinates and zoom for first use.
Loads coordinates from Mandelbrot-space.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:return:
"""
w = cb.width - MENU_WIDTH - 1
h = cb.height - 1
params.plane_w = w
params.plane_h = h
params.resize(w, h)
zoom(params, 1) | python | def init_coords(cb, params):
"""
Initializes coordinates and zoom for first use.
Loads coordinates from Mandelbrot-space.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:return:
"""
w = cb.width - MENU_WIDTH - 1
h = cb.height - 1
params.plane_w = w
params.plane_h = h
params.resize(w, h)
zoom(params, 1) | Initializes coordinates and zoom for first use.
Loads coordinates from Mandelbrot-space.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:return: | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/almonds.py#L366-L385 |
Tenchi2xh/Almonds | almonds/utils.py | clamp | def clamp(n, lower, upper):
"""
Restricts the given number to a lower and upper bound (inclusive)
:param n: input number
:param lower: lower bound (inclusive)
:param upper: upper bound (inclusive)
:return: clamped number
"""
if lower > upper:
lower, upper = upper, lower
return max(min(upper, n), lower) | python | def clamp(n, lower, upper):
"""
Restricts the given number to a lower and upper bound (inclusive)
:param n: input number
:param lower: lower bound (inclusive)
:param upper: upper bound (inclusive)
:return: clamped number
"""
if lower > upper:
lower, upper = upper, lower
return max(min(upper, n), lower) | Restricts the given number to a lower and upper bound (inclusive)
:param n: input number
:param lower: lower bound (inclusive)
:param upper: upper bound (inclusive)
:return: clamped number | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/utils.py#L8-L19 |
Tenchi2xh/Almonds | almonds/utils.py | screen_resolution | def screen_resolution():
"""
Returns the current screen's resolution.
Should be multi-platform.
:return: A tuple containing the width and height of the screen.
"""
w = 0
h = 0
try:
# Windows
import ctypes
user32 = ctypes.windll.user32
w, h = user32.GetSystemMetrics(0), user32.GetSystemMetrics(1)
except AttributeError:
try:
# Mac OS X
import AppKit
size = AppKit.NSScreen.screens()[0].frame().size
w, h = int(size.width), int(size.height)
except ImportError:
try:
# Linux
import Xlib
import Xlib.display
display = Xlib.display.Display()
root = display.screen().root
size = root.get_geometry()
w, h = size.width, size.height
except ImportError:
w = 1920
h = 1080
return w, h | python | def screen_resolution():
"""
Returns the current screen's resolution.
Should be multi-platform.
:return: A tuple containing the width and height of the screen.
"""
w = 0
h = 0
try:
# Windows
import ctypes
user32 = ctypes.windll.user32
w, h = user32.GetSystemMetrics(0), user32.GetSystemMetrics(1)
except AttributeError:
try:
# Mac OS X
import AppKit
size = AppKit.NSScreen.screens()[0].frame().size
w, h = int(size.width), int(size.height)
except ImportError:
try:
# Linux
import Xlib
import Xlib.display
display = Xlib.display.Display()
root = display.screen().root
size = root.get_geometry()
w, h = size.width, size.height
except ImportError:
w = 1920
h = 1080
return w, h | Returns the current screen's resolution.
Should be multi-platform.
:return: A tuple containing the width and height of the screen. | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/utils.py#L22-L56 |
Tenchi2xh/Almonds | almonds/utils.py | open_file | def open_file(filename):
"""
Multi-platform way to make the OS open a file with its default application
"""
if sys.platform.startswith("darwin"):
subprocess.call(("open", filename))
elif sys.platform == "cygwin":
subprocess.call(("cygstart", filename))
elif os.name == "nt":
os.system("start %s" % filename)
elif os.name == "posix":
subprocess.call(("xdg-open", filename)) | python | def open_file(filename):
"""
Multi-platform way to make the OS open a file with its default application
"""
if sys.platform.startswith("darwin"):
subprocess.call(("open", filename))
elif sys.platform == "cygwin":
subprocess.call(("cygstart", filename))
elif os.name == "nt":
os.system("start %s" % filename)
elif os.name == "posix":
subprocess.call(("xdg-open", filename)) | Multi-platform way to make the OS open a file with its default application | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/utils.py#L59-L70 |
Tenchi2xh/Almonds | almonds/graphics/drawing.py | dither_symbol | def dither_symbol(value, dither):
"""
Returns the appropriate block drawing symbol for the given intensity.
:param value: intensity of the color, in the range [0.0, 1.0]
:return: dithered symbol representing that intensity
"""
dither = DITHER_TYPES[dither][1]
return dither[int(round(value * (len(dither) - 1)))] | python | def dither_symbol(value, dither):
"""
Returns the appropriate block drawing symbol for the given intensity.
:param value: intensity of the color, in the range [0.0, 1.0]
:return: dithered symbol representing that intensity
"""
dither = DITHER_TYPES[dither][1]
return dither[int(round(value * (len(dither) - 1)))] | Returns the appropriate block drawing symbol for the given intensity.
:param value: intensity of the color, in the range [0.0, 1.0]
:return: dithered symbol representing that intensity | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/graphics/drawing.py#L66-L73 |
Tenchi2xh/Almonds | almonds/graphics/drawing.py | draw_dithered_color | def draw_dithered_color(cb, x, y, palette, dither, n, n_max, crosshairs_coord=None):
"""
Draws a dithered color block on the terminal, given a palette.
:type cb: cursebox.CurseBox
"""
i = n * (len(palette) - 1) / n_max
c1 = palette[int(math.floor(i))]
c2 = palette[int(math.ceil(i))]
value = i - int(math.floor(i))
symbol = dither_symbol(value, dither)
if crosshairs_coord is not None:
old_symbol = symbol
symbol, crosshairs = get_crosshairs_symbol(x, y, old_symbol, crosshairs_coord)
if crosshairs:
sorted_palette = sort_palette(palette)
if old_symbol == DITHER_TYPES[dither][1][0]:
c2 = c1
sorted_index = sorted_palette.index(c2)
if sorted_index > len(sorted_palette) // 2:
c1 = sorted_palette[0]
else:
c1 = sorted_palette[-1]
cb.put(x, y, symbol, c1(), c2()) | python | def draw_dithered_color(cb, x, y, palette, dither, n, n_max, crosshairs_coord=None):
"""
Draws a dithered color block on the terminal, given a palette.
:type cb: cursebox.CurseBox
"""
i = n * (len(palette) - 1) / n_max
c1 = palette[int(math.floor(i))]
c2 = palette[int(math.ceil(i))]
value = i - int(math.floor(i))
symbol = dither_symbol(value, dither)
if crosshairs_coord is not None:
old_symbol = symbol
symbol, crosshairs = get_crosshairs_symbol(x, y, old_symbol, crosshairs_coord)
if crosshairs:
sorted_palette = sort_palette(palette)
if old_symbol == DITHER_TYPES[dither][1][0]:
c2 = c1
sorted_index = sorted_palette.index(c2)
if sorted_index > len(sorted_palette) // 2:
c1 = sorted_palette[0]
else:
c1 = sorted_palette[-1]
cb.put(x, y, symbol, c1(), c2()) | Draws a dithered color block on the terminal, given a palette.
:type cb: cursebox.CurseBox | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/graphics/drawing.py#L76-L101 |
Tenchi2xh/Almonds | almonds/graphics/drawing.py | draw_box | def draw_box(cb, x0, y0, w, h, fg=colors.default_fg, bg=colors.default_bg, h_seps=[], v_seps=[]):
"""
Draws a box in the given terminal.
:type cb: cursebox.CurseBox
"""
w -= 1
h -= 1
corners = [(x0, y0), (x0 + w, y0), (x0, y0 + h), (x0 + w, y0 + h)]
fg = fg()
bg = bg()
for i, c in enumerate(corners):
cb.put(c[0], c[1], BOX_CORNERS[i], fg, bg)
for s in h_seps + [0, h]:
cb.put(x0 + 1, y0 + s, symbols["BOX_HORIZONTAL"] * (w - 1), fg, bg)
for y in range(1, h):
for s in v_seps + [0, w]:
cb.put(x0 + s, y0 + y, symbols["BOX_VERTICAL"], fg, bg)
for s in h_seps:
cb.put(x0, y0 + s, symbols["BOX_X_LEFT"], fg, bg)
cb.put(x0 + w, y0 + s, symbols["BOX_X_RIGHT"], fg, bg)
for s in v_seps:
cb.put(x0 + s, y0, symbols["BOX_X_TOP"], fg, bg)
cb.put(x0 + s, y0 + h, symbols["BOX_X_BOTTOM"], fg, bg) | python | def draw_box(cb, x0, y0, w, h, fg=colors.default_fg, bg=colors.default_bg, h_seps=[], v_seps=[]):
"""
Draws a box in the given terminal.
:type cb: cursebox.CurseBox
"""
w -= 1
h -= 1
corners = [(x0, y0), (x0 + w, y0), (x0, y0 + h), (x0 + w, y0 + h)]
fg = fg()
bg = bg()
for i, c in enumerate(corners):
cb.put(c[0], c[1], BOX_CORNERS[i], fg, bg)
for s in h_seps + [0, h]:
cb.put(x0 + 1, y0 + s, symbols["BOX_HORIZONTAL"] * (w - 1), fg, bg)
for y in range(1, h):
for s in v_seps + [0, w]:
cb.put(x0 + s, y0 + y, symbols["BOX_VERTICAL"], fg, bg)
for s in h_seps:
cb.put(x0, y0 + s, symbols["BOX_X_LEFT"], fg, bg)
cb.put(x0 + w, y0 + s, symbols["BOX_X_RIGHT"], fg, bg)
for s in v_seps:
cb.put(x0 + s, y0, symbols["BOX_X_TOP"], fg, bg)
cb.put(x0 + s, y0 + h, symbols["BOX_X_BOTTOM"], fg, bg) | Draws a box in the given terminal.
:type cb: cursebox.CurseBox | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/graphics/drawing.py#L132-L160 |
Tenchi2xh/Almonds | almonds/graphics/drawing.py | draw_progress_bar | def draw_progress_bar(cb, message, value, max_value):
"""
:type cb: cursebox.Cursebox
"""
m_x = cb.width // 2
m_y = cb.height // 2
w = len(message) + 4
h = 3
draw_box(cb, m_x - w // 2, m_y - 1, w, h)
message = " %s " % message
i = int((value / max_value) * (len(message) + 2))
message = "$" + message[:i] + "$" + message[i:]
draw_text(cb, m_x - w // 2 + 1, m_y, message) | python | def draw_progress_bar(cb, message, value, max_value):
"""
:type cb: cursebox.Cursebox
"""
m_x = cb.width // 2
m_y = cb.height // 2
w = len(message) + 4
h = 3
draw_box(cb, m_x - w // 2, m_y - 1, w, h)
message = " %s " % message
i = int((value / max_value) * (len(message) + 2))
message = "$" + message[:i] + "$" + message[i:]
draw_text(cb, m_x - w // 2 + 1, m_y, message) | :type cb: cursebox.Cursebox | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/graphics/drawing.py#L163-L175 |
Tenchi2xh/Almonds | almonds/mandelbrot.py | mandelbrot_iterate | def mandelbrot_iterate(c, max_iterations, julia_seed=None):
"""
Returns the number of iterations before escaping the Mandelbrot fractal.
:param c: Coordinates as a complex number
:type c: complex
:param max_iterations: Limit of how many tries are attempted.
:return: Tuple containing the last complex number in the sequence and the number of iterations.
"""
z = c
if julia_seed is not None:
c = julia_seed
for iterations in range(max_iterations):
z = z * z + c
if abs(z) > 1000:
return z, iterations
return z, max_iterations | python | def mandelbrot_iterate(c, max_iterations, julia_seed=None):
"""
Returns the number of iterations before escaping the Mandelbrot fractal.
:param c: Coordinates as a complex number
:type c: complex
:param max_iterations: Limit of how many tries are attempted.
:return: Tuple containing the last complex number in the sequence and the number of iterations.
"""
z = c
if julia_seed is not None:
c = julia_seed
for iterations in range(max_iterations):
z = z * z + c
if abs(z) > 1000:
return z, iterations
return z, max_iterations | Returns the number of iterations before escaping the Mandelbrot fractal.
:param c: Coordinates as a complex number
:type c: complex
:param max_iterations: Limit of how many tries are attempted.
:return: Tuple containing the last complex number in the sequence and the number of iterations. | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/mandelbrot.py#L11-L27 |
Tenchi2xh/Almonds | almonds/mandelbrot.py | get_coords | def get_coords(x, y, params):
"""
Transforms the given coordinates from plane-space to Mandelbrot-space (real and imaginary).
:param x: X coordinate on the plane.
:param y: Y coordinate on the plane.
:param params: Current application parameters.
:type params: params.Params
:return: Tuple containing the re-mapped coordinates in Mandelbrot-space.
"""
n_x = x * 2.0 / params.plane_w * params.plane_ratio - 1.0
n_y = y * 2.0 / params.plane_h - 1.0
mb_x = params.zoom * n_x
mb_y = params.zoom * n_y
return mb_x, mb_y | python | def get_coords(x, y, params):
"""
Transforms the given coordinates from plane-space to Mandelbrot-space (real and imaginary).
:param x: X coordinate on the plane.
:param y: Y coordinate on the plane.
:param params: Current application parameters.
:type params: params.Params
:return: Tuple containing the re-mapped coordinates in Mandelbrot-space.
"""
n_x = x * 2.0 / params.plane_w * params.plane_ratio - 1.0
n_y = y * 2.0 / params.plane_h - 1.0
mb_x = params.zoom * n_x
mb_y = params.zoom * n_y
return mb_x, mb_y | Transforms the given coordinates from plane-space to Mandelbrot-space (real and imaginary).
:param x: X coordinate on the plane.
:param y: Y coordinate on the plane.
:param params: Current application parameters.
:type params: params.Params
:return: Tuple containing the re-mapped coordinates in Mandelbrot-space. | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/mandelbrot.py#L30-L44 |
Tenchi2xh/Almonds | almonds/mandelbrot.py | mandelbrot | def mandelbrot(x, y, params):
"""
Computes the number of iterations of the given plane-space coordinates.
:param x: X coordinate on the plane.
:param y: Y coordinate on the plane.
:param params: Current application parameters.
:type params: params.Params
:return: Discrete number of iterations.
"""
mb_x, mb_y = get_coords(x, y, params)
mb = mandelbrot_iterate(mb_x + 1j * mb_y, params.max_iterations, params.julia_seed)
return mb[1] | python | def mandelbrot(x, y, params):
"""
Computes the number of iterations of the given plane-space coordinates.
:param x: X coordinate on the plane.
:param y: Y coordinate on the plane.
:param params: Current application parameters.
:type params: params.Params
:return: Discrete number of iterations.
"""
mb_x, mb_y = get_coords(x, y, params)
mb = mandelbrot_iterate(mb_x + 1j * mb_y, params.max_iterations, params.julia_seed)
return mb[1] | Computes the number of iterations of the given plane-space coordinates.
:param x: X coordinate on the plane.
:param y: Y coordinate on the plane.
:param params: Current application parameters.
:type params: params.Params
:return: Discrete number of iterations. | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/mandelbrot.py#L47-L60 |
Tenchi2xh/Almonds | almonds/mandelbrot.py | mandelbrot_capture | def mandelbrot_capture(x, y, w, h, params):
"""
Computes the number of iterations of the given pixel-space coordinates,
for high-res capture purposes.
Contrary to :func:`mandelbrot`, this function returns a continuous
number of iterations to avoid banding.
:param x: X coordinate on the picture
:param y: Y coordinate on the picture
:param w: Width of the picture
:param h: Height of the picture
:param params: Current application parameters.
:type params: params.Params
:return: Continuous number of iterations.
"""
# FIXME: Figure out why these corrections are necessary or how to make them perfect
# Viewport is offset compared to window when capturing without these (found empirically)
if params.plane_ratio >= 1.0:
x -= params.plane_w
else:
x += 3.0 * params.plane_w
ratio = w / h
n_x = x * 2.0 / w * ratio - 1.0
n_y = y * 2.0 / h - 1.0
mb_x = params.zoom * n_x + params.mb_cx
mb_y = params.zoom * n_y + params.mb_cy
mb = mandelbrot_iterate(mb_x + 1j * mb_y, params.max_iterations, params.julia_seed)
z, iterations = mb
# Continuous iteration count for no banding
# https://en.wikipedia.org/wiki/Mandelbrot_set#Continuous_.28smooth.29_coloring
nu = params.max_iterations
if iterations < params.max_iterations:
nu = iterations + 2 - abs(cmath.log(cmath.log(abs(z)) / cmath.log(params.max_iterations), 2))
return clamp(nu, 0, params.max_iterations) | python | def mandelbrot_capture(x, y, w, h, params):
"""
Computes the number of iterations of the given pixel-space coordinates,
for high-res capture purposes.
Contrary to :func:`mandelbrot`, this function returns a continuous
number of iterations to avoid banding.
:param x: X coordinate on the picture
:param y: Y coordinate on the picture
:param w: Width of the picture
:param h: Height of the picture
:param params: Current application parameters.
:type params: params.Params
:return: Continuous number of iterations.
"""
# FIXME: Figure out why these corrections are necessary or how to make them perfect
# Viewport is offset compared to window when capturing without these (found empirically)
if params.plane_ratio >= 1.0:
x -= params.plane_w
else:
x += 3.0 * params.plane_w
ratio = w / h
n_x = x * 2.0 / w * ratio - 1.0
n_y = y * 2.0 / h - 1.0
mb_x = params.zoom * n_x + params.mb_cx
mb_y = params.zoom * n_y + params.mb_cy
mb = mandelbrot_iterate(mb_x + 1j * mb_y, params.max_iterations, params.julia_seed)
z, iterations = mb
# Continuous iteration count for no banding
# https://en.wikipedia.org/wiki/Mandelbrot_set#Continuous_.28smooth.29_coloring
nu = params.max_iterations
if iterations < params.max_iterations:
nu = iterations + 2 - abs(cmath.log(cmath.log(abs(z)) / cmath.log(params.max_iterations), 2))
return clamp(nu, 0, params.max_iterations) | Computes the number of iterations of the given pixel-space coordinates,
for high-res capture purposes.
Contrary to :func:`mandelbrot`, this function returns a continuous
number of iterations to avoid banding.
:param x: X coordinate on the picture
:param y: Y coordinate on the picture
:param w: Width of the picture
:param h: Height of the picture
:param params: Current application parameters.
:type params: params.Params
:return: Continuous number of iterations. | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/mandelbrot.py#L63-L102 |
Tenchi2xh/Almonds | almonds/mandelbrot.py | update_position | def update_position(params):
"""
Computes the center of the viewport's Mandelbrot-space coordinates.
:param params: Current application parameters.
:type params: params.Params
"""
cx = params.plane_x0 + params.plane_w / 2.0
cy = params.plane_y0 + params.plane_h / 2.0
params.mb_cx, params.mb_cy = get_coords(cx, cy, params) | python | def update_position(params):
"""
Computes the center of the viewport's Mandelbrot-space coordinates.
:param params: Current application parameters.
:type params: params.Params
"""
cx = params.plane_x0 + params.plane_w / 2.0
cy = params.plane_y0 + params.plane_h / 2.0
params.mb_cx, params.mb_cy = get_coords(cx, cy, params) | Computes the center of the viewport's Mandelbrot-space coordinates.
:param params: Current application parameters.
:type params: params.Params | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/mandelbrot.py#L105-L114 |
Tenchi2xh/Almonds | almonds/mandelbrot.py | zoom | def zoom(params, factor):
"""
Applies a zoom on the current parameters.
Computes the top-left plane-space coordinates from the Mandelbrot-space coordinates.
:param params: Current application parameters.
:param factor: Zoom factor by which the zoom ratio is divided (bigger factor, more zoom)
"""
params.zoom /= factor
n_x = params.mb_cx / params.zoom
n_y = params.mb_cy / params.zoom
params.plane_x0 = int((n_x + 1.0) * params.plane_w / (2.0 * params.plane_ratio)) - params.plane_w // 2
params.plane_y0 = int((n_y + 1.0) * params.plane_h / 2.0) - params.plane_h // 2 | python | def zoom(params, factor):
"""
Applies a zoom on the current parameters.
Computes the top-left plane-space coordinates from the Mandelbrot-space coordinates.
:param params: Current application parameters.
:param factor: Zoom factor by which the zoom ratio is divided (bigger factor, more zoom)
"""
params.zoom /= factor
n_x = params.mb_cx / params.zoom
n_y = params.mb_cy / params.zoom
params.plane_x0 = int((n_x + 1.0) * params.plane_w / (2.0 * params.plane_ratio)) - params.plane_w // 2
params.plane_y0 = int((n_y + 1.0) * params.plane_h / 2.0) - params.plane_h // 2 | Applies a zoom on the current parameters.
Computes the top-left plane-space coordinates from the Mandelbrot-space coordinates.
:param params: Current application parameters.
:param factor: Zoom factor by which the zoom ratio is divided (bigger factor, more zoom) | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/mandelbrot.py#L117-L132 |
Tenchi2xh/Almonds | almonds/params.py | Params.resize | def resize(self, w, h):
"""
Used when resizing the plane, resets the plane ratio factor.
:param w: New width of the visible section of the plane.
:param h: New height of the visible section of the plane.
"""
self.plane_w = w
self.plane_h = h
self.plane_ratio = self.char_ratio * w / h
if self.crosshairs:
self.crosshairs_coord = ((w + 2) // 2, (h + 2) // 2) | python | def resize(self, w, h):
"""
Used when resizing the plane, resets the plane ratio factor.
:param w: New width of the visible section of the plane.
:param h: New height of the visible section of the plane.
"""
self.plane_w = w
self.plane_h = h
self.plane_ratio = self.char_ratio * w / h
if self.crosshairs:
self.crosshairs_coord = ((w + 2) // 2, (h + 2) // 2) | Used when resizing the plane, resets the plane ratio factor.
:param w: New width of the visible section of the plane.
:param h: New height of the visible section of the plane. | https://github.com/Tenchi2xh/Almonds/blob/6b27024729f055f2cb5e14ae3ca3cb428ae054bc/almonds/params.py#L71-L83 |
jaywink/federation | federation/entities/diaspora/mappers.py | check_sender_and_entity_handle_match | def check_sender_and_entity_handle_match(sender_handle, entity_handle):
"""Ensure that sender and entity handles match.
Basically we've already verified the sender is who they say when receiving the payload. However, the sender might
be trying to set another author in the payload itself, since Diaspora has the sender in both the payload headers
AND the object. We must ensure they're the same.
"""
if sender_handle != entity_handle:
logger.warning("sender_handle and entity_handle don't match, aborting! sender_handle: %s, entity_handle: %s",
sender_handle, entity_handle)
return False
return True | python | def check_sender_and_entity_handle_match(sender_handle, entity_handle):
"""Ensure that sender and entity handles match.
Basically we've already verified the sender is who they say when receiving the payload. However, the sender might
be trying to set another author in the payload itself, since Diaspora has the sender in both the payload headers
AND the object. We must ensure they're the same.
"""
if sender_handle != entity_handle:
logger.warning("sender_handle and entity_handle don't match, aborting! sender_handle: %s, entity_handle: %s",
sender_handle, entity_handle)
return False
return True | Ensure that sender and entity handles match.
Basically we've already verified the sender is who they say when receiving the payload. However, the sender might
be trying to set another author in the payload itself, since Diaspora has the sender in both the payload headers
AND the object. We must ensure they're the same. | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/entities/diaspora/mappers.py#L62-L73 |
jaywink/federation | federation/entities/diaspora/mappers.py | element_to_objects | def element_to_objects(
element: etree.ElementTree, sender: str, sender_key_fetcher:Callable[[str], str]=None, user: UserType =None,
) -> List:
"""Transform an Element to a list of entities recursively.
Possible child entities are added to each entity ``_children`` list.
:param tree: Element
:param sender: Payload sender id
:param sender_key_fetcher: Function to fetch sender public key. If not given, key will always be fetched
over network. The function should take sender handle as the only parameter.
:param user: Optional receiving user object. If given, should have an ``id``.
:returns: list of entities
"""
entities = []
cls = MAPPINGS.get(element.tag)
if not cls:
return []
attrs = xml_children_as_dict(element)
transformed = transform_attributes(attrs, cls)
if hasattr(cls, "fill_extra_attributes"):
transformed = cls.fill_extra_attributes(transformed)
entity = cls(**transformed)
# Add protocol name
entity._source_protocol = "diaspora"
# Save element object to entity for possible later use
entity._source_object = etree.tostring(element)
# Save receiving id to object
if user:
entity._receiving_actor_id = user.id
if issubclass(cls, DiasporaRelayableMixin):
# If relayable, fetch sender key for validation
entity._xml_tags = get_element_child_info(element, "tag")
if sender_key_fetcher:
entity._sender_key = sender_key_fetcher(entity.actor_id)
else:
profile = retrieve_and_parse_profile(entity.handle)
if profile:
entity._sender_key = profile.public_key
else:
# If not relayable, ensure handles match
if not check_sender_and_entity_handle_match(sender, entity.handle):
return []
try:
entity.validate()
except ValueError as ex:
logger.error("Failed to validate entity %s: %s", entity, ex, extra={
"attrs": attrs,
"transformed": transformed,
})
return []
# Extract mentions
entity._mentions = entity.extract_mentions()
# Do child elements
for child in element:
entity._children.extend(element_to_objects(child, sender, user=user))
# Add to entities list
entities.append(entity)
return entities | python | def element_to_objects(
element: etree.ElementTree, sender: str, sender_key_fetcher:Callable[[str], str]=None, user: UserType =None,
) -> List:
"""Transform an Element to a list of entities recursively.
Possible child entities are added to each entity ``_children`` list.
:param tree: Element
:param sender: Payload sender id
:param sender_key_fetcher: Function to fetch sender public key. If not given, key will always be fetched
over network. The function should take sender handle as the only parameter.
:param user: Optional receiving user object. If given, should have an ``id``.
:returns: list of entities
"""
entities = []
cls = MAPPINGS.get(element.tag)
if not cls:
return []
attrs = xml_children_as_dict(element)
transformed = transform_attributes(attrs, cls)
if hasattr(cls, "fill_extra_attributes"):
transformed = cls.fill_extra_attributes(transformed)
entity = cls(**transformed)
# Add protocol name
entity._source_protocol = "diaspora"
# Save element object to entity for possible later use
entity._source_object = etree.tostring(element)
# Save receiving id to object
if user:
entity._receiving_actor_id = user.id
if issubclass(cls, DiasporaRelayableMixin):
# If relayable, fetch sender key for validation
entity._xml_tags = get_element_child_info(element, "tag")
if sender_key_fetcher:
entity._sender_key = sender_key_fetcher(entity.actor_id)
else:
profile = retrieve_and_parse_profile(entity.handle)
if profile:
entity._sender_key = profile.public_key
else:
# If not relayable, ensure handles match
if not check_sender_and_entity_handle_match(sender, entity.handle):
return []
try:
entity.validate()
except ValueError as ex:
logger.error("Failed to validate entity %s: %s", entity, ex, extra={
"attrs": attrs,
"transformed": transformed,
})
return []
# Extract mentions
entity._mentions = entity.extract_mentions()
# Do child elements
for child in element:
entity._children.extend(element_to_objects(child, sender, user=user))
# Add to entities list
entities.append(entity)
return entities | Transform an Element to a list of entities recursively.
Possible child entities are added to each entity ``_children`` list.
:param tree: Element
:param sender: Payload sender id
:param sender_key_fetcher: Function to fetch sender public key. If not given, key will always be fetched
over network. The function should take sender handle as the only parameter.
:param user: Optional receiving user object. If given, should have an ``id``.
:returns: list of entities | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/entities/diaspora/mappers.py#L76-L135 |
jaywink/federation | federation/entities/diaspora/mappers.py | message_to_objects | def message_to_objects(
message: str, sender: str, sender_key_fetcher:Callable[[str], str]=None, user: UserType =None,
) -> List:
"""Takes in a message extracted by a protocol and maps it to entities.
:param message: XML payload
:type message: str
:param sender: Payload sender id
:type message: str
:param sender_key_fetcher: Function to fetch sender public key. If not given, key will always be fetched
over network. The function should take sender handle as the only parameter.
:param user: Optional receiving user object. If given, should have a `handle`.
:returns: list of entities
"""
doc = etree.fromstring(message)
if doc.tag in TAGS:
return element_to_objects(doc, sender, sender_key_fetcher, user)
return [] | python | def message_to_objects(
message: str, sender: str, sender_key_fetcher:Callable[[str], str]=None, user: UserType =None,
) -> List:
"""Takes in a message extracted by a protocol and maps it to entities.
:param message: XML payload
:type message: str
:param sender: Payload sender id
:type message: str
:param sender_key_fetcher: Function to fetch sender public key. If not given, key will always be fetched
over network. The function should take sender handle as the only parameter.
:param user: Optional receiving user object. If given, should have a `handle`.
:returns: list of entities
"""
doc = etree.fromstring(message)
if doc.tag in TAGS:
return element_to_objects(doc, sender, sender_key_fetcher, user)
return [] | Takes in a message extracted by a protocol and maps it to entities.
:param message: XML payload
:type message: str
:param sender: Payload sender id
:type message: str
:param sender_key_fetcher: Function to fetch sender public key. If not given, key will always be fetched
over network. The function should take sender handle as the only parameter.
:param user: Optional receiving user object. If given, should have a `handle`.
:returns: list of entities | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/entities/diaspora/mappers.py#L138-L155 |
jaywink/federation | federation/entities/diaspora/mappers.py | transform_attributes | def transform_attributes(attrs, cls):
"""Transform some attribute keys.
:param attrs: Properties from the XML
:type attrs: dict
:param cls: Class of the entity
:type cls: class
"""
transformed = {}
for key, value in attrs.items():
if value is None:
value = ""
if key == "text":
transformed["raw_content"] = value
elif key == "author":
if cls == DiasporaProfile:
# Diaspora Profile XML message contains no GUID. We need the guid. Fetch it.
profile = retrieve_and_parse_profile(value)
transformed['id'] = value
transformed["guid"] = profile.guid
else:
transformed["actor_id"] = value
transformed["handle"] = value
elif key == 'guid':
if cls != DiasporaProfile:
transformed["id"] = value
transformed["guid"] = value
elif key in ("root_author", "recipient"):
transformed["target_id"] = value
transformed["target_handle"] = value
elif key in ("target_guid", "root_guid", "parent_guid"):
transformed["target_id"] = value
transformed["target_guid"] = value
elif key in ("first_name", "last_name"):
values = [attrs.get('first_name'), attrs.get('last_name')]
values = [v for v in values if v]
transformed["name"] = " ".join(values)
elif key == "image_url":
if "image_urls" not in transformed:
transformed["image_urls"] = {}
transformed["image_urls"]["large"] = value
elif key == "image_url_small":
if "image_urls" not in transformed:
transformed["image_urls"] = {}
transformed["image_urls"]["small"] = value
elif key == "image_url_medium":
if "image_urls" not in transformed:
transformed["image_urls"] = {}
transformed["image_urls"]["medium"] = value
elif key == "tag_string":
if value:
transformed["tag_list"] = value.replace("#", "").split(" ")
elif key == "bio":
transformed["raw_content"] = value
elif key == "searchable":
transformed["public"] = True if value == "true" else False
elif key in ["target_type"] and cls == DiasporaRetraction:
transformed["entity_type"] = DiasporaRetraction.entity_type_from_remote(value)
elif key == "remote_photo_path":
transformed["remote_path"] = value
elif key == "remote_photo_name":
transformed["remote_name"] = value
elif key == "status_message_guid":
transformed["linked_guid"] = value
transformed["linked_type"] = "Post"
elif key == "author_signature":
transformed["signature"] = value
elif key in BOOLEAN_KEYS:
transformed[key] = True if value == "true" else False
elif key in DATETIME_KEYS:
transformed[key] = datetime.strptime(value, "%Y-%m-%dT%H:%M:%SZ")
elif key in INTEGER_KEYS:
transformed[key] = int(value)
else:
transformed[key] = value
return transformed | python | def transform_attributes(attrs, cls):
"""Transform some attribute keys.
:param attrs: Properties from the XML
:type attrs: dict
:param cls: Class of the entity
:type cls: class
"""
transformed = {}
for key, value in attrs.items():
if value is None:
value = ""
if key == "text":
transformed["raw_content"] = value
elif key == "author":
if cls == DiasporaProfile:
# Diaspora Profile XML message contains no GUID. We need the guid. Fetch it.
profile = retrieve_and_parse_profile(value)
transformed['id'] = value
transformed["guid"] = profile.guid
else:
transformed["actor_id"] = value
transformed["handle"] = value
elif key == 'guid':
if cls != DiasporaProfile:
transformed["id"] = value
transformed["guid"] = value
elif key in ("root_author", "recipient"):
transformed["target_id"] = value
transformed["target_handle"] = value
elif key in ("target_guid", "root_guid", "parent_guid"):
transformed["target_id"] = value
transformed["target_guid"] = value
elif key in ("first_name", "last_name"):
values = [attrs.get('first_name'), attrs.get('last_name')]
values = [v for v in values if v]
transformed["name"] = " ".join(values)
elif key == "image_url":
if "image_urls" not in transformed:
transformed["image_urls"] = {}
transformed["image_urls"]["large"] = value
elif key == "image_url_small":
if "image_urls" not in transformed:
transformed["image_urls"] = {}
transformed["image_urls"]["small"] = value
elif key == "image_url_medium":
if "image_urls" not in transformed:
transformed["image_urls"] = {}
transformed["image_urls"]["medium"] = value
elif key == "tag_string":
if value:
transformed["tag_list"] = value.replace("#", "").split(" ")
elif key == "bio":
transformed["raw_content"] = value
elif key == "searchable":
transformed["public"] = True if value == "true" else False
elif key in ["target_type"] and cls == DiasporaRetraction:
transformed["entity_type"] = DiasporaRetraction.entity_type_from_remote(value)
elif key == "remote_photo_path":
transformed["remote_path"] = value
elif key == "remote_photo_name":
transformed["remote_name"] = value
elif key == "status_message_guid":
transformed["linked_guid"] = value
transformed["linked_type"] = "Post"
elif key == "author_signature":
transformed["signature"] = value
elif key in BOOLEAN_KEYS:
transformed[key] = True if value == "true" else False
elif key in DATETIME_KEYS:
transformed[key] = datetime.strptime(value, "%Y-%m-%dT%H:%M:%SZ")
elif key in INTEGER_KEYS:
transformed[key] = int(value)
else:
transformed[key] = value
return transformed | Transform some attribute keys.
:param attrs: Properties from the XML
:type attrs: dict
:param cls: Class of the entity
:type cls: class | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/entities/diaspora/mappers.py#L158-L233 |
jaywink/federation | federation/entities/diaspora/mappers.py | get_outbound_entity | def get_outbound_entity(entity: BaseEntity, private_key: RsaKey):
"""Get the correct outbound entity for this protocol.
We might have to look at entity values to decide the correct outbound entity.
If we cannot find one, we should raise as conversion cannot be guaranteed to the given protocol.
Private key of author is needed to be passed for signing the outbound entity.
:arg entity: An entity instance which can be of a base or protocol entity class.
:arg private_key: Private key of sender as an RSA object
:returns: Protocol specific entity class instance.
:raises ValueError: If conversion cannot be done.
"""
if getattr(entity, "outbound_doc", None):
# If the entity already has an outbound doc, just return the entity as is
return entity
outbound = None
cls = entity.__class__
if cls in [DiasporaPost, DiasporaImage, DiasporaComment, DiasporaLike, DiasporaProfile, DiasporaRetraction,
DiasporaContact, DiasporaReshare]:
# Already fine
outbound = entity
elif cls == Post:
outbound = DiasporaPost.from_base(entity)
elif cls == Comment:
outbound = DiasporaComment.from_base(entity)
elif cls == Reaction:
if entity.reaction == "like":
outbound = DiasporaLike.from_base(entity)
elif cls == Follow:
outbound = DiasporaContact.from_base(entity)
elif cls == Profile:
outbound = DiasporaProfile.from_base(entity)
elif cls == Retraction:
outbound = DiasporaRetraction.from_base(entity)
elif cls == Share:
outbound = DiasporaReshare.from_base(entity)
if not outbound:
raise ValueError("Don't know how to convert this base entity to Diaspora protocol entities.")
if isinstance(outbound, DiasporaRelayableMixin) and not outbound.signature:
# Sign by author if not signed yet. We don't want to overwrite any existing signature in the case
# that this is being sent by the parent author
outbound.sign(private_key)
# If missing, also add same signature to `parent_author_signature`. This is required at the moment
# in all situations but is apparently being removed.
# TODO: remove this once Diaspora removes the extra signature
outbound.parent_signature = outbound.signature
return outbound | python | def get_outbound_entity(entity: BaseEntity, private_key: RsaKey):
"""Get the correct outbound entity for this protocol.
We might have to look at entity values to decide the correct outbound entity.
If we cannot find one, we should raise as conversion cannot be guaranteed to the given protocol.
Private key of author is needed to be passed for signing the outbound entity.
:arg entity: An entity instance which can be of a base or protocol entity class.
:arg private_key: Private key of sender as an RSA object
:returns: Protocol specific entity class instance.
:raises ValueError: If conversion cannot be done.
"""
if getattr(entity, "outbound_doc", None):
# If the entity already has an outbound doc, just return the entity as is
return entity
outbound = None
cls = entity.__class__
if cls in [DiasporaPost, DiasporaImage, DiasporaComment, DiasporaLike, DiasporaProfile, DiasporaRetraction,
DiasporaContact, DiasporaReshare]:
# Already fine
outbound = entity
elif cls == Post:
outbound = DiasporaPost.from_base(entity)
elif cls == Comment:
outbound = DiasporaComment.from_base(entity)
elif cls == Reaction:
if entity.reaction == "like":
outbound = DiasporaLike.from_base(entity)
elif cls == Follow:
outbound = DiasporaContact.from_base(entity)
elif cls == Profile:
outbound = DiasporaProfile.from_base(entity)
elif cls == Retraction:
outbound = DiasporaRetraction.from_base(entity)
elif cls == Share:
outbound = DiasporaReshare.from_base(entity)
if not outbound:
raise ValueError("Don't know how to convert this base entity to Diaspora protocol entities.")
if isinstance(outbound, DiasporaRelayableMixin) and not outbound.signature:
# Sign by author if not signed yet. We don't want to overwrite any existing signature in the case
# that this is being sent by the parent author
outbound.sign(private_key)
# If missing, also add same signature to `parent_author_signature`. This is required at the moment
# in all situations but is apparently being removed.
# TODO: remove this once Diaspora removes the extra signature
outbound.parent_signature = outbound.signature
return outbound | Get the correct outbound entity for this protocol.
We might have to look at entity values to decide the correct outbound entity.
If we cannot find one, we should raise as conversion cannot be guaranteed to the given protocol.
Private key of author is needed to be passed for signing the outbound entity.
:arg entity: An entity instance which can be of a base or protocol entity class.
:arg private_key: Private key of sender as an RSA object
:returns: Protocol specific entity class instance.
:raises ValueError: If conversion cannot be done. | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/entities/diaspora/mappers.py#L236-L283 |
jaywink/federation | federation/protocols/diaspora/signatures.py | get_element_child_info | def get_element_child_info(doc, attr):
"""Get information from child elements of this elementas a list since order is important.
Don't include signature tags.
:param doc: XML element
:param attr: Attribute to get from the elements, for example "tag" or "text".
"""
props = []
for child in doc:
if child.tag not in ["author_signature", "parent_author_signature"]:
props.append(getattr(child, attr))
return props | python | def get_element_child_info(doc, attr):
"""Get information from child elements of this elementas a list since order is important.
Don't include signature tags.
:param doc: XML element
:param attr: Attribute to get from the elements, for example "tag" or "text".
"""
props = []
for child in doc:
if child.tag not in ["author_signature", "parent_author_signature"]:
props.append(getattr(child, attr))
return props | Get information from child elements of this elementas a list since order is important.
Don't include signature tags.
:param doc: XML element
:param attr: Attribute to get from the elements, for example "tag" or "text". | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/protocols/diaspora/signatures.py#L9-L21 |
jaywink/federation | federation/protocols/diaspora/signatures.py | verify_relayable_signature | def verify_relayable_signature(public_key, doc, signature):
"""
Verify the signed XML elements to have confidence that the claimed
author did actually generate this message.
"""
sig_hash = _create_signature_hash(doc)
cipher = PKCS1_v1_5.new(RSA.importKey(public_key))
return cipher.verify(sig_hash, b64decode(signature)) | python | def verify_relayable_signature(public_key, doc, signature):
"""
Verify the signed XML elements to have confidence that the claimed
author did actually generate this message.
"""
sig_hash = _create_signature_hash(doc)
cipher = PKCS1_v1_5.new(RSA.importKey(public_key))
return cipher.verify(sig_hash, b64decode(signature)) | Verify the signed XML elements to have confidence that the claimed
author did actually generate this message. | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/protocols/diaspora/signatures.py#L30-L37 |
jaywink/federation | federation/hostmeta/django/generators.py | rfc7033_webfinger_view | def rfc7033_webfinger_view(request, *args, **kwargs):
"""
Django view to generate an RFC7033 webfinger.
"""
resource = request.GET.get("resource")
if not resource:
return HttpResponseBadRequest("No resource found")
if not resource.startswith("acct:"):
return HttpResponseBadRequest("Invalid resource")
handle = resource.replace("acct:", "").lower()
profile_func = get_function_from_config("get_profile_function")
try:
profile = profile_func(handle=handle, request=request)
except Exception as exc:
logger.warning("rfc7033_webfinger_view - Failed to get profile by handle %s: %s", handle, exc)
return HttpResponseNotFound()
config = get_configuration()
webfinger = RFC7033Webfinger(
id=profile.id,
handle=profile.handle,
guid=profile.guid,
base_url=config.get('base_url'),
profile_path=get_path_from_url(profile.url),
hcard_path=config.get('hcard_path'),
atom_path=get_path_from_url(profile.atom_url),
search_path=config.get('search_path'),
)
return JsonResponse(
webfinger.render(),
content_type="application/jrd+json",
) | python | def rfc7033_webfinger_view(request, *args, **kwargs):
"""
Django view to generate an RFC7033 webfinger.
"""
resource = request.GET.get("resource")
if not resource:
return HttpResponseBadRequest("No resource found")
if not resource.startswith("acct:"):
return HttpResponseBadRequest("Invalid resource")
handle = resource.replace("acct:", "").lower()
profile_func = get_function_from_config("get_profile_function")
try:
profile = profile_func(handle=handle, request=request)
except Exception as exc:
logger.warning("rfc7033_webfinger_view - Failed to get profile by handle %s: %s", handle, exc)
return HttpResponseNotFound()
config = get_configuration()
webfinger = RFC7033Webfinger(
id=profile.id,
handle=profile.handle,
guid=profile.guid,
base_url=config.get('base_url'),
profile_path=get_path_from_url(profile.url),
hcard_path=config.get('hcard_path'),
atom_path=get_path_from_url(profile.atom_url),
search_path=config.get('search_path'),
)
return JsonResponse(
webfinger.render(),
content_type="application/jrd+json",
) | Django view to generate an RFC7033 webfinger. | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/hostmeta/django/generators.py#L22-L56 |
jaywink/federation | federation/utils/diaspora.py | parse_diaspora_webfinger | def parse_diaspora_webfinger(document):
"""
Parse Diaspora webfinger which is either in JSON format (new) or XRD (old).
https://diaspora.github.io/diaspora_federation/discovery/webfinger.html
"""
webfinger = {
"hcard_url": None,
}
try:
doc = json.loads(document)
for link in doc["links"]:
if link["rel"] == "http://microformats.org/profile/hcard":
webfinger["hcard_url"] = link["href"]
break
else:
logger.warning("parse_diaspora_webfinger: found JSON webfinger but it has no hcard href")
raise ValueError
except Exception:
try:
xrd = XRD.parse_xrd(document)
webfinger["hcard_url"] = xrd.find_link(rels="http://microformats.org/profile/hcard").href
except xml.parsers.expat.ExpatError:
logger.warning("parse_diaspora_webfinger: found XML webfinger but it fails to parse (ExpatError)")
pass
return webfinger | python | def parse_diaspora_webfinger(document):
"""
Parse Diaspora webfinger which is either in JSON format (new) or XRD (old).
https://diaspora.github.io/diaspora_federation/discovery/webfinger.html
"""
webfinger = {
"hcard_url": None,
}
try:
doc = json.loads(document)
for link in doc["links"]:
if link["rel"] == "http://microformats.org/profile/hcard":
webfinger["hcard_url"] = link["href"]
break
else:
logger.warning("parse_diaspora_webfinger: found JSON webfinger but it has no hcard href")
raise ValueError
except Exception:
try:
xrd = XRD.parse_xrd(document)
webfinger["hcard_url"] = xrd.find_link(rels="http://microformats.org/profile/hcard").href
except xml.parsers.expat.ExpatError:
logger.warning("parse_diaspora_webfinger: found XML webfinger but it fails to parse (ExpatError)")
pass
return webfinger | Parse Diaspora webfinger which is either in JSON format (new) or XRD (old).
https://diaspora.github.io/diaspora_federation/discovery/webfinger.html | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/utils/diaspora.py#L28-L53 |
jaywink/federation | federation/utils/diaspora.py | retrieve_diaspora_hcard | def retrieve_diaspora_hcard(handle):
"""
Retrieve a remote Diaspora hCard document.
:arg handle: Remote handle to retrieve
:return: str (HTML document)
"""
webfinger = retrieve_and_parse_diaspora_webfinger(handle)
document, code, exception = fetch_document(webfinger.get("hcard_url"))
if exception:
return None
return document | python | def retrieve_diaspora_hcard(handle):
"""
Retrieve a remote Diaspora hCard document.
:arg handle: Remote handle to retrieve
:return: str (HTML document)
"""
webfinger = retrieve_and_parse_diaspora_webfinger(handle)
document, code, exception = fetch_document(webfinger.get("hcard_url"))
if exception:
return None
return document | Retrieve a remote Diaspora hCard document.
:arg handle: Remote handle to retrieve
:return: str (HTML document) | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/utils/diaspora.py#L56-L67 |
jaywink/federation | federation/utils/diaspora.py | retrieve_and_parse_diaspora_webfinger | def retrieve_and_parse_diaspora_webfinger(handle):
"""
Retrieve a and parse a remote Diaspora webfinger document.
:arg handle: Remote handle to retrieve
:returns: dict
"""
try:
host = handle.split("@")[1]
except AttributeError:
logger.warning("retrieve_and_parse_diaspora_webfinger: invalid handle given: %s", handle)
return None
document, code, exception = fetch_document(
host=host, path="/.well-known/webfinger?resource=acct:%s" % quote(handle),
)
if document:
return parse_diaspora_webfinger(document)
hostmeta = retrieve_diaspora_host_meta(host)
if not hostmeta:
return None
url = hostmeta.find_link(rels="lrdd").template.replace("{uri}", quote(handle))
document, code, exception = fetch_document(url)
if exception:
return None
return parse_diaspora_webfinger(document) | python | def retrieve_and_parse_diaspora_webfinger(handle):
"""
Retrieve a and parse a remote Diaspora webfinger document.
:arg handle: Remote handle to retrieve
:returns: dict
"""
try:
host = handle.split("@")[1]
except AttributeError:
logger.warning("retrieve_and_parse_diaspora_webfinger: invalid handle given: %s", handle)
return None
document, code, exception = fetch_document(
host=host, path="/.well-known/webfinger?resource=acct:%s" % quote(handle),
)
if document:
return parse_diaspora_webfinger(document)
hostmeta = retrieve_diaspora_host_meta(host)
if not hostmeta:
return None
url = hostmeta.find_link(rels="lrdd").template.replace("{uri}", quote(handle))
document, code, exception = fetch_document(url)
if exception:
return None
return parse_diaspora_webfinger(document) | Retrieve a and parse a remote Diaspora webfinger document.
:arg handle: Remote handle to retrieve
:returns: dict | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/utils/diaspora.py#L70-L94 |
jaywink/federation | federation/utils/diaspora.py | retrieve_diaspora_host_meta | def retrieve_diaspora_host_meta(host):
"""
Retrieve a remote Diaspora host-meta document.
:arg host: Host to retrieve from
:returns: ``XRD`` instance
"""
document, code, exception = fetch_document(host=host, path="/.well-known/host-meta")
if exception:
return None
xrd = XRD.parse_xrd(document)
return xrd | python | def retrieve_diaspora_host_meta(host):
"""
Retrieve a remote Diaspora host-meta document.
:arg host: Host to retrieve from
:returns: ``XRD`` instance
"""
document, code, exception = fetch_document(host=host, path="/.well-known/host-meta")
if exception:
return None
xrd = XRD.parse_xrd(document)
return xrd | Retrieve a remote Diaspora host-meta document.
:arg host: Host to retrieve from
:returns: ``XRD`` instance | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/utils/diaspora.py#L97-L108 |
jaywink/federation | federation/utils/diaspora.py | _get_element_text_or_none | def _get_element_text_or_none(document, selector):
"""
Using a CSS selector, get the element and return the text, or None if no element.
:arg document: ``HTMLElement`` document
:arg selector: CSS selector
:returns: str or None
"""
element = document.cssselect(selector)
if element:
return element[0].text
return None | python | def _get_element_text_or_none(document, selector):
"""
Using a CSS selector, get the element and return the text, or None if no element.
:arg document: ``HTMLElement`` document
:arg selector: CSS selector
:returns: str or None
"""
element = document.cssselect(selector)
if element:
return element[0].text
return None | Using a CSS selector, get the element and return the text, or None if no element.
:arg document: ``HTMLElement`` document
:arg selector: CSS selector
:returns: str or None | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/utils/diaspora.py#L111-L122 |
jaywink/federation | federation/utils/diaspora.py | _get_element_attr_or_none | def _get_element_attr_or_none(document, selector, attribute):
"""
Using a CSS selector, get the element and return the given attribute value, or None if no element.
Args:
document (HTMLElement) - HTMLElement document
selector (str) - CSS selector
attribute (str) - The attribute to get from the element
"""
element = document.cssselect(selector)
if element:
return element[0].get(attribute)
return None | python | def _get_element_attr_or_none(document, selector, attribute):
"""
Using a CSS selector, get the element and return the given attribute value, or None if no element.
Args:
document (HTMLElement) - HTMLElement document
selector (str) - CSS selector
attribute (str) - The attribute to get from the element
"""
element = document.cssselect(selector)
if element:
return element[0].get(attribute)
return None | Using a CSS selector, get the element and return the given attribute value, or None if no element.
Args:
document (HTMLElement) - HTMLElement document
selector (str) - CSS selector
attribute (str) - The attribute to get from the element | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/utils/diaspora.py#L125-L137 |
jaywink/federation | federation/utils/diaspora.py | parse_profile_from_hcard | def parse_profile_from_hcard(hcard: str, handle: str):
"""
Parse all the fields we can from a hCard document to get a Profile.
:arg hcard: HTML hcard document (str)
:arg handle: User handle in [email protected] format
:returns: ``federation.entities.diaspora.entities.DiasporaProfile`` instance
"""
from federation.entities.diaspora.entities import DiasporaProfile # Circulars
doc = html.fromstring(hcard)
profile = DiasporaProfile(
name=_get_element_text_or_none(doc, ".fn"),
image_urls={
"small": _get_element_attr_or_none(doc, ".entity_photo_small .photo", "src"),
"medium": _get_element_attr_or_none(doc, ".entity_photo_medium .photo", "src"),
"large": _get_element_attr_or_none(doc, ".entity_photo .photo", "src"),
},
public=True if _get_element_text_or_none(doc, ".searchable") == "true" else False,
id=handle,
handle=handle,
guid=_get_element_text_or_none(doc, ".uid"),
public_key=_get_element_text_or_none(doc, ".key"),
)
return profile | python | def parse_profile_from_hcard(hcard: str, handle: str):
"""
Parse all the fields we can from a hCard document to get a Profile.
:arg hcard: HTML hcard document (str)
:arg handle: User handle in [email protected] format
:returns: ``federation.entities.diaspora.entities.DiasporaProfile`` instance
"""
from federation.entities.diaspora.entities import DiasporaProfile # Circulars
doc = html.fromstring(hcard)
profile = DiasporaProfile(
name=_get_element_text_or_none(doc, ".fn"),
image_urls={
"small": _get_element_attr_or_none(doc, ".entity_photo_small .photo", "src"),
"medium": _get_element_attr_or_none(doc, ".entity_photo_medium .photo", "src"),
"large": _get_element_attr_or_none(doc, ".entity_photo .photo", "src"),
},
public=True if _get_element_text_or_none(doc, ".searchable") == "true" else False,
id=handle,
handle=handle,
guid=_get_element_text_or_none(doc, ".uid"),
public_key=_get_element_text_or_none(doc, ".key"),
)
return profile | Parse all the fields we can from a hCard document to get a Profile.
:arg hcard: HTML hcard document (str)
:arg handle: User handle in [email protected] format
:returns: ``federation.entities.diaspora.entities.DiasporaProfile`` instance | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/utils/diaspora.py#L140-L163 |
jaywink/federation | federation/utils/diaspora.py | retrieve_and_parse_content | def retrieve_and_parse_content(
guid: str, handle: str, entity_type: str, sender_key_fetcher: Callable[[str], str]=None,
):
"""Retrieve remote content and return an Entity class instance.
This is basically the inverse of receiving an entity. Instead, we fetch it, then call "handle_receive".
:param sender_key_fetcher: Function to use to fetch sender public key. If not given, network will be used
to fetch the profile and the key. Function must take handle as only parameter and return a public key.
:returns: Entity object instance or ``None``
"""
if not validate_handle(handle):
return
_username, domain = handle.split("@")
url = get_fetch_content_endpoint(domain, entity_type.lower(), guid)
document, status_code, error = fetch_document(url)
if status_code == 200:
request = RequestType(body=document)
_sender, _protocol, entities = handle_receive(request, sender_key_fetcher=sender_key_fetcher)
if len(entities) > 1:
logger.warning("retrieve_and_parse_content - more than one entity parsed from remote even though we"
"expected only one! ID %s", guid)
if entities:
return entities[0]
return
elif status_code == 404:
logger.warning("retrieve_and_parse_content - remote content %s not found", guid)
return
if error:
raise error
raise Exception("retrieve_and_parse_content - unknown problem when fetching document: %s, %s, %s" % (
document, status_code, error,
)) | python | def retrieve_and_parse_content(
guid: str, handle: str, entity_type: str, sender_key_fetcher: Callable[[str], str]=None,
):
"""Retrieve remote content and return an Entity class instance.
This is basically the inverse of receiving an entity. Instead, we fetch it, then call "handle_receive".
:param sender_key_fetcher: Function to use to fetch sender public key. If not given, network will be used
to fetch the profile and the key. Function must take handle as only parameter and return a public key.
:returns: Entity object instance or ``None``
"""
if not validate_handle(handle):
return
_username, domain = handle.split("@")
url = get_fetch_content_endpoint(domain, entity_type.lower(), guid)
document, status_code, error = fetch_document(url)
if status_code == 200:
request = RequestType(body=document)
_sender, _protocol, entities = handle_receive(request, sender_key_fetcher=sender_key_fetcher)
if len(entities) > 1:
logger.warning("retrieve_and_parse_content - more than one entity parsed from remote even though we"
"expected only one! ID %s", guid)
if entities:
return entities[0]
return
elif status_code == 404:
logger.warning("retrieve_and_parse_content - remote content %s not found", guid)
return
if error:
raise error
raise Exception("retrieve_and_parse_content - unknown problem when fetching document: %s, %s, %s" % (
document, status_code, error,
)) | Retrieve remote content and return an Entity class instance.
This is basically the inverse of receiving an entity. Instead, we fetch it, then call "handle_receive".
:param sender_key_fetcher: Function to use to fetch sender public key. If not given, network will be used
to fetch the profile and the key. Function must take handle as only parameter and return a public key.
:returns: Entity object instance or ``None`` | https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/utils/diaspora.py#L166-L198 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.