repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
gregmuellegger/django-autofixture
autofixture/__init__.py
get
def get(model, *args, **kwargs): ''' Get an autofixture instance for the passed in *model* sing the either an appropiate autofixture that was :ref:`registry <registry>` or fall back to the default:class:`AutoFixture` class. *model* can be a model class or its string representation (e.g. ``"app.ModelClass"``). All positional and keyword arguments are passed to the autofixture constructor. ''' from .compat import get_model if isinstance(model, string_types): model = get_model(*model.split('.', 1)) if model in REGISTRY: return REGISTRY[model](model, *args, **kwargs) else: return AutoFixture(model, *args, **kwargs)
python
def get(model, *args, **kwargs): ''' Get an autofixture instance for the passed in *model* sing the either an appropiate autofixture that was :ref:`registry <registry>` or fall back to the default:class:`AutoFixture` class. *model* can be a model class or its string representation (e.g. ``"app.ModelClass"``). All positional and keyword arguments are passed to the autofixture constructor. ''' from .compat import get_model if isinstance(model, string_types): model = get_model(*model.split('.', 1)) if model in REGISTRY: return REGISTRY[model](model, *args, **kwargs) else: return AutoFixture(model, *args, **kwargs)
Get an autofixture instance for the passed in *model* sing the either an appropiate autofixture that was :ref:`registry <registry>` or fall back to the default:class:`AutoFixture` class. *model* can be a model class or its string representation (e.g. ``"app.ModelClass"``). All positional and keyword arguments are passed to the autofixture constructor.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/__init__.py#L81-L98
gregmuellegger/django-autofixture
autofixture/__init__.py
create
def create(model, count, *args, **kwargs): ''' Create *count* instances of *model* using the either an appropiate autofixture that was :ref:`registry <registry>` or fall back to the default:class:`AutoFixture` class. *model* can be a model class or its string representation (e.g. ``"app.ModelClass"``). All positional and keyword arguments are passed to the autofixture constructor. It is demonstrated in the example below which will create ten superusers:: import autofixture admins = autofixture.create('auth.User', 10, field_values={'is_superuser': True}) .. note:: See :ref:`AutoFixture` for more information. :func:`create` will return a list of the created objects. ''' from .compat import get_model if isinstance(model, string_types): model = get_model(*model.split('.', 1)) if model in REGISTRY: autofixture_class = REGISTRY[model] else: autofixture_class = AutoFixture # Get keyword arguments that the create_one method accepts and pass them # into create_one instead of AutoFixture.__init__ argnames = set(getargnames(autofixture_class.create_one)) argnames -= set(['self']) create_kwargs = {} for argname in argnames: if argname in kwargs: create_kwargs[argname] = kwargs.pop(argname) autofixture = autofixture_class(model, *args, **kwargs) return autofixture.create(count, **create_kwargs)
python
def create(model, count, *args, **kwargs): ''' Create *count* instances of *model* using the either an appropiate autofixture that was :ref:`registry <registry>` or fall back to the default:class:`AutoFixture` class. *model* can be a model class or its string representation (e.g. ``"app.ModelClass"``). All positional and keyword arguments are passed to the autofixture constructor. It is demonstrated in the example below which will create ten superusers:: import autofixture admins = autofixture.create('auth.User', 10, field_values={'is_superuser': True}) .. note:: See :ref:`AutoFixture` for more information. :func:`create` will return a list of the created objects. ''' from .compat import get_model if isinstance(model, string_types): model = get_model(*model.split('.', 1)) if model in REGISTRY: autofixture_class = REGISTRY[model] else: autofixture_class = AutoFixture # Get keyword arguments that the create_one method accepts and pass them # into create_one instead of AutoFixture.__init__ argnames = set(getargnames(autofixture_class.create_one)) argnames -= set(['self']) create_kwargs = {} for argname in argnames: if argname in kwargs: create_kwargs[argname] = kwargs.pop(argname) autofixture = autofixture_class(model, *args, **kwargs) return autofixture.create(count, **create_kwargs)
Create *count* instances of *model* using the either an appropiate autofixture that was :ref:`registry <registry>` or fall back to the default:class:`AutoFixture` class. *model* can be a model class or its string representation (e.g. ``"app.ModelClass"``). All positional and keyword arguments are passed to the autofixture constructor. It is demonstrated in the example below which will create ten superusers:: import autofixture admins = autofixture.create('auth.User', 10, field_values={'is_superuser': True}) .. note:: See :ref:`AutoFixture` for more information. :func:`create` will return a list of the created objects.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/__init__.py#L101-L136
gregmuellegger/django-autofixture
autofixture/__init__.py
autodiscover
def autodiscover(): ''' Auto-discover INSTALLED_APPS autofixtures.py and tests.py modules and fail silently when not present. This forces an import on them to register any autofixture bits they may want. ''' from .compat import importlib # Bail out if autodiscover didn't finish loading from a previous call so # that we avoid running autodiscover again when the URLconf is loaded by # the exception handler to resolve the handler500 view. This prevents an # autofixtures.py module with errors from re-registering models and raising a # spurious AlreadyRegistered exception (see #8245). global LOADING if LOADING: return LOADING = True app_paths = {} # For each app, we need to look for an autofixture.py inside that app's # package. We can't use os.path here -- recall that modules may be # imported different ways (think zip files) -- so we need to get # the app's __path__ and look for autofixture.py on that path. # Step 1: find out the app's __path__ Import errors here will (and # should) bubble up, but a missing __path__ (which is legal, but weird) # fails silently -- apps that do weird things with __path__ might # need to roll their own autofixture registration. import imp try: from django.apps import apps for app_config in apps.get_app_configs(): app_paths[app_config.name] = [app_config.path] except ImportError: # Django < 1.7 from django.conf import settings for app in settings.INSTALLED_APPS: mod = importlib.import_module(app) try: app_paths[app] = mod.__path__ except AttributeError: continue for app, app_path in app_paths.items(): # Step 2: use imp.find_module to find the app's autofixtures.py. For some # reason imp.find_module raises ImportError if the app can't be found # but doesn't actually try to import the module. So skip this app if # its autofixtures.py doesn't exist try: file, _, _ = imp.find_module('autofixtures', app_path) except ImportError: continue else: if file: file.close() # Step 3: import the app's autofixtures file. If this has errors we want them # to bubble up. try: importlib.import_module("%s.autofixtures" % app) except Exception as e: warnings.warn(u'Error while importing %s.autofixtures: %r' % (app, e)) for app, app_path in app_paths.items(): try: file, _, _ = imp.find_module('tests', app_path) except ImportError: continue else: if file: file.close() try: importlib.import_module("%s.tests" % app) except Exception as e: warnings.warn(u'Error while importing %s.tests: %r' % (app, e)) # autodiscover was successful, reset loading flag. LOADING = False
python
def autodiscover(): ''' Auto-discover INSTALLED_APPS autofixtures.py and tests.py modules and fail silently when not present. This forces an import on them to register any autofixture bits they may want. ''' from .compat import importlib # Bail out if autodiscover didn't finish loading from a previous call so # that we avoid running autodiscover again when the URLconf is loaded by # the exception handler to resolve the handler500 view. This prevents an # autofixtures.py module with errors from re-registering models and raising a # spurious AlreadyRegistered exception (see #8245). global LOADING if LOADING: return LOADING = True app_paths = {} # For each app, we need to look for an autofixture.py inside that app's # package. We can't use os.path here -- recall that modules may be # imported different ways (think zip files) -- so we need to get # the app's __path__ and look for autofixture.py on that path. # Step 1: find out the app's __path__ Import errors here will (and # should) bubble up, but a missing __path__ (which is legal, but weird) # fails silently -- apps that do weird things with __path__ might # need to roll their own autofixture registration. import imp try: from django.apps import apps for app_config in apps.get_app_configs(): app_paths[app_config.name] = [app_config.path] except ImportError: # Django < 1.7 from django.conf import settings for app in settings.INSTALLED_APPS: mod = importlib.import_module(app) try: app_paths[app] = mod.__path__ except AttributeError: continue for app, app_path in app_paths.items(): # Step 2: use imp.find_module to find the app's autofixtures.py. For some # reason imp.find_module raises ImportError if the app can't be found # but doesn't actually try to import the module. So skip this app if # its autofixtures.py doesn't exist try: file, _, _ = imp.find_module('autofixtures', app_path) except ImportError: continue else: if file: file.close() # Step 3: import the app's autofixtures file. If this has errors we want them # to bubble up. try: importlib.import_module("%s.autofixtures" % app) except Exception as e: warnings.warn(u'Error while importing %s.autofixtures: %r' % (app, e)) for app, app_path in app_paths.items(): try: file, _, _ = imp.find_module('tests', app_path) except ImportError: continue else: if file: file.close() try: importlib.import_module("%s.tests" % app) except Exception as e: warnings.warn(u'Error while importing %s.tests: %r' % (app, e)) # autodiscover was successful, reset loading flag. LOADING = False
Auto-discover INSTALLED_APPS autofixtures.py and tests.py modules and fail silently when not present. This forces an import on them to register any autofixture bits they may want.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/__init__.py#L151-L235
gregmuellegger/django-autofixture
autofixture/placeholder.py
get_placeholder_image
def get_placeholder_image(width, height, name=None, fg_color=get_color('black'), bg_color=get_color('grey'), text=None, font=u'Verdana.ttf', fontsize=42, encoding=u'unic', mode='RGBA', fmt=u'PNG'): """Little spin-off from https://github.com/Visgean/python-placeholder that not saves an image and instead returns it.""" size = (width, height) text = text if text else '{0}x{1}'.format(width, height) try: font = ImageFont.truetype(font, size=fontsize, encoding=encoding) except IOError: font = ImageFont.load_default() result_img = Image.new(mode, size, bg_color) text_size = font.getsize(text) text_img = Image.new("RGBA", size, bg_color) #position for the text: left = size[0] / 2 - text_size[0] / 2 top = size[1] / 2 - text_size[1] / 2 drawing = ImageDraw.Draw(text_img) drawing.text((left, top), text, font=font, fill=fg_color) txt_img = ImageOps.fit(text_img, size, method=Image.BICUBIC, centering=(0.5, 0.5)) result_img.paste(txt_img) file_obj = io.BytesIO() txt_img.save(file_obj, fmt) return file_obj.getvalue()
python
def get_placeholder_image(width, height, name=None, fg_color=get_color('black'), bg_color=get_color('grey'), text=None, font=u'Verdana.ttf', fontsize=42, encoding=u'unic', mode='RGBA', fmt=u'PNG'): """Little spin-off from https://github.com/Visgean/python-placeholder that not saves an image and instead returns it.""" size = (width, height) text = text if text else '{0}x{1}'.format(width, height) try: font = ImageFont.truetype(font, size=fontsize, encoding=encoding) except IOError: font = ImageFont.load_default() result_img = Image.new(mode, size, bg_color) text_size = font.getsize(text) text_img = Image.new("RGBA", size, bg_color) #position for the text: left = size[0] / 2 - text_size[0] / 2 top = size[1] / 2 - text_size[1] / 2 drawing = ImageDraw.Draw(text_img) drawing.text((left, top), text, font=font, fill=fg_color) txt_img = ImageOps.fit(text_img, size, method=Image.BICUBIC, centering=(0.5, 0.5)) result_img.paste(txt_img) file_obj = io.BytesIO() txt_img.save(file_obj, fmt) return file_obj.getvalue()
Little spin-off from https://github.com/Visgean/python-placeholder that not saves an image and instead returns it.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/placeholder.py#L13-L47
gregmuellegger/django-autofixture
autofixture/base.py
AutoFixtureBase.is_inheritance_parent
def is_inheritance_parent(self, field): ''' Checks if the field is the automatically created OneToOneField used by django mulit-table inheritance ''' return ( isinstance(field, related.OneToOneField) and field.primary_key and issubclass(field.model, get_remote_field_to(field)) )
python
def is_inheritance_parent(self, field): ''' Checks if the field is the automatically created OneToOneField used by django mulit-table inheritance ''' return ( isinstance(field, related.OneToOneField) and field.primary_key and issubclass(field.model, get_remote_field_to(field)) )
Checks if the field is the automatically created OneToOneField used by django mulit-table inheritance
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/base.py#L249-L258
gregmuellegger/django-autofixture
autofixture/base.py
AutoFixtureBase.get_generator
def get_generator(self, field): ''' Return a value generator based on the field instance that is passed to this method. This function may return ``None`` which means that the specified field will be ignored (e.g. if no matching generator was found). ''' if isinstance(field, fields.AutoField): return None if self.is_inheritance_parent(field): return None if ( field.default is not fields.NOT_PROVIDED and not self.overwrite_defaults and field.name not in self.field_values): return None kwargs = {} if field.name in self.field_values: value = self.field_values[field.name] if isinstance(value, generators.Generator): return value elif isinstance(value, AutoFixture): return generators.InstanceGenerator(autofixture=value) elif callable(value): return generators.CallableGenerator(value=value) return generators.StaticGenerator(value=value) if field.null: kwargs['empty_p'] = self.none_p if field.choices: return generators.ChoicesGenerator(choices=field.choices, **kwargs) if isinstance(field, related.ForeignKey): # if generate_fk is set, follow_fk is ignored. is_self_fk = (get_remote_field_to(field)().__class__ == self.model) if field.name in self.generate_fk and not is_self_fk: return generators.InstanceGenerator( autofixture.get( get_remote_field_to(field), follow_fk=self.follow_fk.get_deep_links(field.name), generate_fk=self.generate_fk.get_deep_links(field.name)), limit_choices_to=get_remote_field(field).limit_choices_to) if field.name in self.follow_fk: selected = generators.InstanceSelector( get_remote_field_to(field), limit_choices_to=get_remote_field(field).limit_choices_to) if selected.get_value() is not None: return selected if field.blank or field.null: return generators.NoneGenerator() if is_self_fk and not field.null: raise CreateInstanceError( u'Cannot resolve self referencing field "%s" to "%s" without null=True' % ( field.name, '%s.%s' % ( get_remote_field_to(field)._meta.app_label, get_remote_field_to(field)._meta.object_name, ) )) raise CreateInstanceError( u'Cannot resolve ForeignKey "%s" to "%s". Provide either ' u'"follow_fk" or "generate_fk" parameters.' % ( field.name, '%s.%s' % ( get_remote_field_to(field)._meta.app_label, get_remote_field_to(field)._meta.object_name, ) )) if isinstance(field, related.ManyToManyField): if field.name in self.generate_m2m: min_count, max_count = self.generate_m2m[field.name] return generators.MultipleInstanceGenerator( autofixture.get(get_remote_field_to(field)), limit_choices_to=get_remote_field(field).limit_choices_to, min_count=min_count, max_count=max_count, **kwargs) if field.name in self.follow_m2m: min_count, max_count = self.follow_m2m[field.name] return generators.InstanceSelector( get_remote_field_to(field), limit_choices_to=get_remote_field(field).limit_choices_to, min_count=min_count, max_count=max_count, **kwargs) if field.blank or field.null: return generators.StaticGenerator([]) raise CreateInstanceError( u'Cannot assign instances of "%s" to ManyToManyField "%s". ' u'Provide either "follow_m2m" or "generate_m2m" argument.' % ( '%s.%s' % ( get_remote_field_to(field)._meta.app_label, get_remote_field_to(field)._meta.object_name, ), field.name, )) if isinstance(field, fields.FilePathField): return generators.FilePathGenerator( path=field.path, match=field.match, recursive=field.recursive, max_length=field.max_length, **kwargs) if isinstance(field, fields.CharField): if isinstance(field, fields.SlugField): generator = generators.SlugGenerator elif isinstance(field, fields.EmailField): return generators.EmailGenerator( max_length=min(field.max_length, 30)) elif isinstance(field, fields.URLField): return generators.URLGenerator( max_length=min(field.max_length, 25)) elif field.max_length > 15: return generators.LoremSentenceGenerator( common=False, max_length=field.max_length) else: generator = generators.StringGenerator return generator(max_length=field.max_length) if isinstance(field, fields.DecimalField): return generators.DecimalGenerator( decimal_places=field.decimal_places, max_digits=field.max_digits) if hasattr(fields, 'BigIntegerField'): if isinstance(field, fields.BigIntegerField): return generators.IntegerGenerator( min_value=-field.MAX_BIGINT - 1, max_value=field.MAX_BIGINT, **kwargs) if isinstance(field, ImageField): return generators.ImageGenerator(storage=field.storage, **kwargs) for field_class, generator in self.field_to_generator.items(): if isinstance(field, field_class): return generator(**kwargs) return None
python
def get_generator(self, field): ''' Return a value generator based on the field instance that is passed to this method. This function may return ``None`` which means that the specified field will be ignored (e.g. if no matching generator was found). ''' if isinstance(field, fields.AutoField): return None if self.is_inheritance_parent(field): return None if ( field.default is not fields.NOT_PROVIDED and not self.overwrite_defaults and field.name not in self.field_values): return None kwargs = {} if field.name in self.field_values: value = self.field_values[field.name] if isinstance(value, generators.Generator): return value elif isinstance(value, AutoFixture): return generators.InstanceGenerator(autofixture=value) elif callable(value): return generators.CallableGenerator(value=value) return generators.StaticGenerator(value=value) if field.null: kwargs['empty_p'] = self.none_p if field.choices: return generators.ChoicesGenerator(choices=field.choices, **kwargs) if isinstance(field, related.ForeignKey): # if generate_fk is set, follow_fk is ignored. is_self_fk = (get_remote_field_to(field)().__class__ == self.model) if field.name in self.generate_fk and not is_self_fk: return generators.InstanceGenerator( autofixture.get( get_remote_field_to(field), follow_fk=self.follow_fk.get_deep_links(field.name), generate_fk=self.generate_fk.get_deep_links(field.name)), limit_choices_to=get_remote_field(field).limit_choices_to) if field.name in self.follow_fk: selected = generators.InstanceSelector( get_remote_field_to(field), limit_choices_to=get_remote_field(field).limit_choices_to) if selected.get_value() is not None: return selected if field.blank or field.null: return generators.NoneGenerator() if is_self_fk and not field.null: raise CreateInstanceError( u'Cannot resolve self referencing field "%s" to "%s" without null=True' % ( field.name, '%s.%s' % ( get_remote_field_to(field)._meta.app_label, get_remote_field_to(field)._meta.object_name, ) )) raise CreateInstanceError( u'Cannot resolve ForeignKey "%s" to "%s". Provide either ' u'"follow_fk" or "generate_fk" parameters.' % ( field.name, '%s.%s' % ( get_remote_field_to(field)._meta.app_label, get_remote_field_to(field)._meta.object_name, ) )) if isinstance(field, related.ManyToManyField): if field.name in self.generate_m2m: min_count, max_count = self.generate_m2m[field.name] return generators.MultipleInstanceGenerator( autofixture.get(get_remote_field_to(field)), limit_choices_to=get_remote_field(field).limit_choices_to, min_count=min_count, max_count=max_count, **kwargs) if field.name in self.follow_m2m: min_count, max_count = self.follow_m2m[field.name] return generators.InstanceSelector( get_remote_field_to(field), limit_choices_to=get_remote_field(field).limit_choices_to, min_count=min_count, max_count=max_count, **kwargs) if field.blank or field.null: return generators.StaticGenerator([]) raise CreateInstanceError( u'Cannot assign instances of "%s" to ManyToManyField "%s". ' u'Provide either "follow_m2m" or "generate_m2m" argument.' % ( '%s.%s' % ( get_remote_field_to(field)._meta.app_label, get_remote_field_to(field)._meta.object_name, ), field.name, )) if isinstance(field, fields.FilePathField): return generators.FilePathGenerator( path=field.path, match=field.match, recursive=field.recursive, max_length=field.max_length, **kwargs) if isinstance(field, fields.CharField): if isinstance(field, fields.SlugField): generator = generators.SlugGenerator elif isinstance(field, fields.EmailField): return generators.EmailGenerator( max_length=min(field.max_length, 30)) elif isinstance(field, fields.URLField): return generators.URLGenerator( max_length=min(field.max_length, 25)) elif field.max_length > 15: return generators.LoremSentenceGenerator( common=False, max_length=field.max_length) else: generator = generators.StringGenerator return generator(max_length=field.max_length) if isinstance(field, fields.DecimalField): return generators.DecimalGenerator( decimal_places=field.decimal_places, max_digits=field.max_digits) if hasattr(fields, 'BigIntegerField'): if isinstance(field, fields.BigIntegerField): return generators.IntegerGenerator( min_value=-field.MAX_BIGINT - 1, max_value=field.MAX_BIGINT, **kwargs) if isinstance(field, ImageField): return generators.ImageGenerator(storage=field.storage, **kwargs) for field_class, generator in self.field_to_generator.items(): if isinstance(field, field_class): return generator(**kwargs) return None
Return a value generator based on the field instance that is passed to this method. This function may return ``None`` which means that the specified field will be ignored (e.g. if no matching generator was found).
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/base.py#L260-L391
gregmuellegger/django-autofixture
autofixture/base.py
AutoFixtureBase.get_value
def get_value(self, field): ''' Return a random value that can be assigned to the passed *field* instance. ''' if field not in self._field_generators: self._field_generators[field] = self.get_generator(field) generator = self._field_generators[field] if generator is None: return self.IGNORE_FIELD value = generator() return value
python
def get_value(self, field): ''' Return a random value that can be assigned to the passed *field* instance. ''' if field not in self._field_generators: self._field_generators[field] = self.get_generator(field) generator = self._field_generators[field] if generator is None: return self.IGNORE_FIELD value = generator() return value
Return a random value that can be assigned to the passed *field* instance.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/base.py#L393-L404
gregmuellegger/django-autofixture
autofixture/base.py
AutoFixtureBase.check_constraints
def check_constraints(self, instance): ''' Return fieldnames which need recalculation. ''' recalc_fields = [] for constraint in self.constraints: try: constraint(self.model, instance) except constraints.InvalidConstraint as e: recalc_fields.extend(e.fields) return recalc_fields
python
def check_constraints(self, instance): ''' Return fieldnames which need recalculation. ''' recalc_fields = [] for constraint in self.constraints: try: constraint(self.model, instance) except constraints.InvalidConstraint as e: recalc_fields.extend(e.fields) return recalc_fields
Return fieldnames which need recalculation.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/base.py#L458-L468
gregmuellegger/django-autofixture
autofixture/base.py
AutoFixtureBase.create_one
def create_one(self, commit=True): ''' Create and return one model instance. If *commit* is ``False`` the instance will not be saved and many to many relations will not be processed. Subclasses that override ``create_one`` can specify arbitrary keyword arguments. They will be passed through by the :meth:`autofixture.base.AutoFixture.create` method and the helper functions :func:`autofixture.create` and :func:`autofixture.create_one`. May raise :exc:`CreateInstanceError` if constraints are not satisfied. ''' tries = self.tries instance = self.model() process = instance._meta.fields while process and tries > 0: for field in process: self.process_field(instance, field) process = self.check_constraints(instance) tries -= 1 if tries == 0: raise CreateInstanceError( u'Cannot solve constraints for "%s", tried %d times. ' u'Please check value generators or model constraints. ' u'At least the following fields are involved: %s' % ( '%s.%s' % ( self.model._meta.app_label, self.model._meta.object_name), self.tries, ', '.join([field.name for field in process]), )) instance = self.pre_process_instance(instance) if commit: instance.save() #to handle particular case of GenericRelation #in Django pre 1.6 it appears in .many_to_many many_to_many = [f for f in instance._meta.many_to_many if not isinstance(f, get_GenericRelation())] for field in many_to_many: self.process_m2m(instance, field) signals.instance_created.send( sender=self, model=self.model, instance=instance, committed=commit) post_process_kwargs = {} if 'commit' in getargnames(self.post_process_instance): post_process_kwargs['commit'] = commit else: warnings.warn( "Subclasses of AutoFixture need to provide a `commit` " "argument for post_process_instance methods", DeprecationWarning) return self.post_process_instance(instance, **post_process_kwargs)
python
def create_one(self, commit=True): ''' Create and return one model instance. If *commit* is ``False`` the instance will not be saved and many to many relations will not be processed. Subclasses that override ``create_one`` can specify arbitrary keyword arguments. They will be passed through by the :meth:`autofixture.base.AutoFixture.create` method and the helper functions :func:`autofixture.create` and :func:`autofixture.create_one`. May raise :exc:`CreateInstanceError` if constraints are not satisfied. ''' tries = self.tries instance = self.model() process = instance._meta.fields while process and tries > 0: for field in process: self.process_field(instance, field) process = self.check_constraints(instance) tries -= 1 if tries == 0: raise CreateInstanceError( u'Cannot solve constraints for "%s", tried %d times. ' u'Please check value generators or model constraints. ' u'At least the following fields are involved: %s' % ( '%s.%s' % ( self.model._meta.app_label, self.model._meta.object_name), self.tries, ', '.join([field.name for field in process]), )) instance = self.pre_process_instance(instance) if commit: instance.save() #to handle particular case of GenericRelation #in Django pre 1.6 it appears in .many_to_many many_to_many = [f for f in instance._meta.many_to_many if not isinstance(f, get_GenericRelation())] for field in many_to_many: self.process_m2m(instance, field) signals.instance_created.send( sender=self, model=self.model, instance=instance, committed=commit) post_process_kwargs = {} if 'commit' in getargnames(self.post_process_instance): post_process_kwargs['commit'] = commit else: warnings.warn( "Subclasses of AutoFixture need to provide a `commit` " "argument for post_process_instance methods", DeprecationWarning) return self.post_process_instance(instance, **post_process_kwargs)
Create and return one model instance. If *commit* is ``False`` the instance will not be saved and many to many relations will not be processed. Subclasses that override ``create_one`` can specify arbitrary keyword arguments. They will be passed through by the :meth:`autofixture.base.AutoFixture.create` method and the helper functions :func:`autofixture.create` and :func:`autofixture.create_one`. May raise :exc:`CreateInstanceError` if constraints are not satisfied.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/base.py#L489-L547
gregmuellegger/django-autofixture
autofixture/base.py
AutoFixtureBase.create
def create(self, count=1, commit=True, **kwargs): ''' Create and return ``count`` model instances. If *commit* is ``False`` the instances will not be saved and many to many relations will not be processed. May raise ``CreateInstanceError`` if constraints are not satisfied. The method internally calls :meth:`create_one` to generate instances. ''' object_list = [] for i in range(count): instance = self.create_one(commit=commit, **kwargs) object_list.append(instance) return object_list
python
def create(self, count=1, commit=True, **kwargs): ''' Create and return ``count`` model instances. If *commit* is ``False`` the instances will not be saved and many to many relations will not be processed. May raise ``CreateInstanceError`` if constraints are not satisfied. The method internally calls :meth:`create_one` to generate instances. ''' object_list = [] for i in range(count): instance = self.create_one(commit=commit, **kwargs) object_list.append(instance) return object_list
Create and return ``count`` model instances. If *commit* is ``False`` the instances will not be saved and many to many relations will not be processed. May raise ``CreateInstanceError`` if constraints are not satisfied. The method internally calls :meth:`create_one` to generate instances.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/autofixture/base.py#L549-L563
gregmuellegger/django-autofixture
docs/conf.py
get_release
def get_release(package): """ Return package version as listed in `__version__` in `init.py`. """ init_path = os.path.join(PROJECT_PATH, package, '__init__.py') init_py = open(init_path).read() return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
python
def get_release(package): """ Return package version as listed in `__version__` in `init.py`. """ init_path = os.path.join(PROJECT_PATH, package, '__init__.py') init_py = open(init_path).read() return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
Return package version as listed in `__version__` in `init.py`.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/docs/conf.py#L59-L65
gregmuellegger/django-autofixture
fabfile.py
opendocs
def opendocs(where='index', how='default'): ''' Rebuild documentation and opens it in your browser. Use the first argument to specify how it should be opened: `d` or `default`: Open in new tab or new window, using the default method of your browser. `t` or `tab`: Open documentation in new tab. `n`, `w` or `window`: Open documentation in new window. ''' import webbrowser docs_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'docs') index = os.path.join(docs_dir, '_build/html/%s.html' % where) builddocs('html') url = 'file://%s' % os.path.abspath(index) if how in ('d', 'default'): webbrowser.open(url) elif how in ('t', 'tab'): webbrowser.open_new_tab(url) elif how in ('n', 'w', 'window'): webbrowser.open_new(url)
python
def opendocs(where='index', how='default'): ''' Rebuild documentation and opens it in your browser. Use the first argument to specify how it should be opened: `d` or `default`: Open in new tab or new window, using the default method of your browser. `t` or `tab`: Open documentation in new tab. `n`, `w` or `window`: Open documentation in new window. ''' import webbrowser docs_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'docs') index = os.path.join(docs_dir, '_build/html/%s.html' % where) builddocs('html') url = 'file://%s' % os.path.abspath(index) if how in ('d', 'default'): webbrowser.open(url) elif how in ('t', 'tab'): webbrowser.open_new_tab(url) elif how in ('n', 'w', 'window'): webbrowser.open_new(url)
Rebuild documentation and opens it in your browser. Use the first argument to specify how it should be opened: `d` or `default`: Open in new tab or new window, using the default method of your browser. `t` or `tab`: Open documentation in new tab. `n`, `w` or `window`: Open documentation in new window.
https://github.com/gregmuellegger/django-autofixture/blob/0b696fd3a06747459981e4269aff427676f84ae0/fabfile.py#L24-L49
plumdog/flask_table
examples/column_html_attrs.py
main
def main(): items = [Item('Name1', 'Description1'), Item('Name2', 'Description2'), Item('Name3', 'Description3')] table = ItemTable(items) # or {{ table }} in jinja print(table.__html__()) """Outputs: <table> <thead> <tr> <th class="my-name-class">Name</th> <th class="my-description-class" data-something="my-data" data-something-else="my-description-th-class"> Description </th> </tr> </thead> <tbody> <tr> <td class="my-name-class">Name1</td> <td class="my-description-class" data-something="my-td-only-data"> Description1 </td> </tr> <tr> <td class="my-name-class">Name2</td> <td class="my-description-class" data-something="my-td-only-data"> Description2 </td> </tr> <tr> <td class="my-name-class">Name3</td> <td class="my-description-class" data-something="my-td-only-data"> Description3 </td> </tr> </tbody> </table> Except it doesn't bother to prettify the output. """
python
def main(): items = [Item('Name1', 'Description1'), Item('Name2', 'Description2'), Item('Name3', 'Description3')] table = ItemTable(items) # or {{ table }} in jinja print(table.__html__()) """Outputs: <table> <thead> <tr> <th class="my-name-class">Name</th> <th class="my-description-class" data-something="my-data" data-something-else="my-description-th-class"> Description </th> </tr> </thead> <tbody> <tr> <td class="my-name-class">Name1</td> <td class="my-description-class" data-something="my-td-only-data"> Description1 </td> </tr> <tr> <td class="my-name-class">Name2</td> <td class="my-description-class" data-something="my-td-only-data"> Description2 </td> </tr> <tr> <td class="my-name-class">Name3</td> <td class="my-description-class" data-something="my-td-only-data"> Description3 </td> </tr> </tbody> </table> Except it doesn't bother to prettify the output. """
Outputs: <table> <thead> <tr> <th class="my-name-class">Name</th> <th class="my-description-class" data-something="my-data" data-something-else="my-description-th-class"> Description </th> </tr> </thead> <tbody> <tr> <td class="my-name-class">Name1</td> <td class="my-description-class" data-something="my-td-only-data"> Description1 </td> </tr> <tr> <td class="my-name-class">Name2</td> <td class="my-description-class" data-something="my-td-only-data"> Description2 </td> </tr> <tr> <td class="my-name-class">Name3</td> <td class="my-description-class" data-something="my-td-only-data"> Description3 </td> </tr> </tbody> </table> Except it doesn't bother to prettify the output.
https://github.com/plumdog/flask_table/blob/1eae252c6b26037a6aa19fcd787a981ddb3a9191/examples/column_html_attrs.py#L44-L93
plumdog/flask_table
examples/simple_nested.py
main
def main(): items = [Item('Name1', 'Description1', [SubItem('r1sr1c1', 'r1sr1c2'), SubItem('r1sr2c1', 'r1sr2c2')]), Item('Name2', 'Description2', [SubItem('r2sr1c1', 'r2sr1c2'), SubItem('r2sr2c1', 'r2sr2c2')]), ] table = ItemTable(items) # or {{ table }} in jinja print(table.__html__()) """Outputs: <table> <thead> <tr><th>Name</th><th>Description</th><th>Subtable</th></tr> </thead> <tbody> <tr><td>Name1</td><td>Description1</td><td><table> <thead> <tr><th>Sub-column 1</th><th>Sub-column 2</th></tr> </thead> <tbody> <tr><td>r1sr1c1</td><td>r1sr1c2</td></tr> <tr><td>r1sr2c1</td><td>r1sr2c2</td></tr> </tbody> </table></td></tr> <tr><td>Name2</td><td>Description2</td><td><table> <thead> <tr><th>Sub-column 1</th><th>Sub-column 2</th></tr> </thead> <tbody> <tr><td>r2sr1c1</td><td>r2sr1c2</td></tr> <tr><td>r2sr2c1</td><td>r2sr2c2</td></tr> </tbody> </table></td></tr> </tbody> </table> Except it doesn't bother to prettify the output. """
python
def main(): items = [Item('Name1', 'Description1', [SubItem('r1sr1c1', 'r1sr1c2'), SubItem('r1sr2c1', 'r1sr2c2')]), Item('Name2', 'Description2', [SubItem('r2sr1c1', 'r2sr1c2'), SubItem('r2sr2c1', 'r2sr2c2')]), ] table = ItemTable(items) # or {{ table }} in jinja print(table.__html__()) """Outputs: <table> <thead> <tr><th>Name</th><th>Description</th><th>Subtable</th></tr> </thead> <tbody> <tr><td>Name1</td><td>Description1</td><td><table> <thead> <tr><th>Sub-column 1</th><th>Sub-column 2</th></tr> </thead> <tbody> <tr><td>r1sr1c1</td><td>r1sr1c2</td></tr> <tr><td>r1sr2c1</td><td>r1sr2c2</td></tr> </tbody> </table></td></tr> <tr><td>Name2</td><td>Description2</td><td><table> <thead> <tr><th>Sub-column 1</th><th>Sub-column 2</th></tr> </thead> <tbody> <tr><td>r2sr1c1</td><td>r2sr1c2</td></tr> <tr><td>r2sr2c1</td><td>r2sr2c2</td></tr> </tbody> </table></td></tr> </tbody> </table> Except it doesn't bother to prettify the output. """
Outputs: <table> <thead> <tr><th>Name</th><th>Description</th><th>Subtable</th></tr> </thead> <tbody> <tr><td>Name1</td><td>Description1</td><td><table> <thead> <tr><th>Sub-column 1</th><th>Sub-column 2</th></tr> </thead> <tbody> <tr><td>r1sr1c1</td><td>r1sr1c2</td></tr> <tr><td>r1sr2c1</td><td>r1sr2c2</td></tr> </tbody> </table></td></tr> <tr><td>Name2</td><td>Description2</td><td><table> <thead> <tr><th>Sub-column 1</th><th>Sub-column 2</th></tr> </thead> <tbody> <tr><td>r2sr1c1</td><td>r2sr1c2</td></tr> <tr><td>r2sr2c1</td><td>r2sr2c2</td></tr> </tbody> </table></td></tr> </tbody> </table> Except it doesn't bother to prettify the output.
https://github.com/plumdog/flask_table/blob/1eae252c6b26037a6aa19fcd787a981ddb3a9191/examples/simple_nested.py#L37-L78
plumdog/flask_table
examples/dynamic.py
main
def main(): TableCls = create_table()\ .add_column('name', Col('Name'))\ .add_column('description', Col('Description')) items = [dict(name='Name1', description='Description1'), dict(name='Name2', description='Description2'), dict(name='Name3', description='Description3')] table = TableCls(items) print(table.__html__()) """Outputs: <table> <thead> <tr> <th>Name</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td>Name1</td> <td>Description1</td> </tr> <tr> <td>Name2</td> <td>Description2</td> </tr> <tr> <td>Name3</td> <td>Description3</td> </tr> </tbody> </table> """
python
def main(): TableCls = create_table()\ .add_column('name', Col('Name'))\ .add_column('description', Col('Description')) items = [dict(name='Name1', description='Description1'), dict(name='Name2', description='Description2'), dict(name='Name3', description='Description3')] table = TableCls(items) print(table.__html__()) """Outputs: <table> <thead> <tr> <th>Name</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td>Name1</td> <td>Description1</td> </tr> <tr> <td>Name2</td> <td>Description2</td> </tr> <tr> <td>Name3</td> <td>Description3</td> </tr> </tbody> </table> """
Outputs: <table> <thead> <tr> <th>Name</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td>Name1</td> <td>Description1</td> </tr> <tr> <td>Name2</td> <td>Description2</td> </tr> <tr> <td>Name3</td> <td>Description3</td> </tr> </tbody> </table>
https://github.com/plumdog/flask_table/blob/1eae252c6b26037a6aa19fcd787a981ddb3a9191/examples/dynamic.py#L4-L41
plumdog/flask_table
flask_table/table.py
create_table
def create_table(name=str('_Table'), base=Table, options=None): """Creates and returns a new table class. You can specify a name for you class if you wish. You can also set the base class (or classes) that should be used when creating the class. """ try: base = tuple(base) except TypeError: # Then assume that what we have is a single class, so make it # into a 1-tuple. base = (base,) return TableMeta(name, base, options or {})
python
def create_table(name=str('_Table'), base=Table, options=None): """Creates and returns a new table class. You can specify a name for you class if you wish. You can also set the base class (or classes) that should be used when creating the class. """ try: base = tuple(base) except TypeError: # Then assume that what we have is a single class, so make it # into a 1-tuple. base = (base,) return TableMeta(name, base, options or {})
Creates and returns a new table class. You can specify a name for you class if you wish. You can also set the base class (or classes) that should be used when creating the class.
https://github.com/plumdog/flask_table/blob/1eae252c6b26037a6aa19fcd787a981ddb3a9191/flask_table/table.py#L182-L195
plumdog/flask_table
flask_table/columns.py
Col.td_contents
def td_contents(self, item, attr_list): """Given an item and an attr, return the contents of the td. This method is a likely candidate to override when extending the Col class, which is done in LinkCol and ButtonCol. Override this method if you need to get some extra data from the item. Note that the output of this function is NOT escaped. """ return self.td_format(self.from_attr_list(item, attr_list))
python
def td_contents(self, item, attr_list): """Given an item and an attr, return the contents of the td. This method is a likely candidate to override when extending the Col class, which is done in LinkCol and ButtonCol. Override this method if you need to get some extra data from the item. Note that the output of this function is NOT escaped. """ return self.td_format(self.from_attr_list(item, attr_list))
Given an item and an attr, return the contents of the td. This method is a likely candidate to override when extending the Col class, which is done in LinkCol and ButtonCol. Override this method if you need to get some extra data from the item. Note that the output of this function is NOT escaped.
https://github.com/plumdog/flask_table/blob/1eae252c6b26037a6aa19fcd787a981ddb3a9191/flask_table/columns.py#L102-L113
peterjc/backports.lzma
backports/lzma/__init__.py
open
def open(filename, mode="rb", format=None, check=-1, preset=None, filters=None, encoding=None, errors=None, newline=None): """Open an LZMA-compressed file in binary or text mode. filename can be either an actual file name (given as a str or bytes object), in which case the named file is opened, or it can be an existing file object to read from or write to. The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for binary mode, or "rt", "wt" or "at" for text mode. The format, check, preset and filters arguments specify the compression settings, as for LZMACompressor, LZMADecompressor and LZMAFile. For binary mode, this function is equivalent to the LZMAFile constructor: LZMAFile(filename, mode, ...). In this case, the encoding, errors and newline arguments must not be provided. For text mode, a LZMAFile object is created, and wrapped in an io.TextIOWrapper instance with the specified encoding, error handling behavior, and line ending(s). """ if "t" in mode: if "b" in mode: raise ValueError("Invalid mode: %r" % (mode,)) else: if encoding is not None: raise ValueError("Argument 'encoding' not supported in binary mode") if errors is not None: raise ValueError("Argument 'errors' not supported in binary mode") if newline is not None: raise ValueError("Argument 'newline' not supported in binary mode") lz_mode = mode.replace("t", "") binary_file = LZMAFile(filename, lz_mode, format=format, check=check, preset=preset, filters=filters) if "t" in mode: return io.TextIOWrapper(binary_file, encoding, errors, newline) else: return binary_file
python
def open(filename, mode="rb", format=None, check=-1, preset=None, filters=None, encoding=None, errors=None, newline=None): """Open an LZMA-compressed file in binary or text mode. filename can be either an actual file name (given as a str or bytes object), in which case the named file is opened, or it can be an existing file object to read from or write to. The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for binary mode, or "rt", "wt" or "at" for text mode. The format, check, preset and filters arguments specify the compression settings, as for LZMACompressor, LZMADecompressor and LZMAFile. For binary mode, this function is equivalent to the LZMAFile constructor: LZMAFile(filename, mode, ...). In this case, the encoding, errors and newline arguments must not be provided. For text mode, a LZMAFile object is created, and wrapped in an io.TextIOWrapper instance with the specified encoding, error handling behavior, and line ending(s). """ if "t" in mode: if "b" in mode: raise ValueError("Invalid mode: %r" % (mode,)) else: if encoding is not None: raise ValueError("Argument 'encoding' not supported in binary mode") if errors is not None: raise ValueError("Argument 'errors' not supported in binary mode") if newline is not None: raise ValueError("Argument 'newline' not supported in binary mode") lz_mode = mode.replace("t", "") binary_file = LZMAFile(filename, lz_mode, format=format, check=check, preset=preset, filters=filters) if "t" in mode: return io.TextIOWrapper(binary_file, encoding, errors, newline) else: return binary_file
Open an LZMA-compressed file in binary or text mode. filename can be either an actual file name (given as a str or bytes object), in which case the named file is opened, or it can be an existing file object to read from or write to. The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for binary mode, or "rt", "wt" or "at" for text mode. The format, check, preset and filters arguments specify the compression settings, as for LZMACompressor, LZMADecompressor and LZMAFile. For binary mode, this function is equivalent to the LZMAFile constructor: LZMAFile(filename, mode, ...). In this case, the encoding, errors and newline arguments must not be provided. For text mode, a LZMAFile object is created, and wrapped in an io.TextIOWrapper instance with the specified encoding, error handling behavior, and line ending(s).
https://github.com/peterjc/backports.lzma/blob/6555d8b8e493a35159025b4cfc204dfb54c33d3e/backports/lzma/__init__.py#L396-L438
peterjc/backports.lzma
backports/lzma/__init__.py
compress
def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None): """Compress a block of data. Refer to LZMACompressor's docstring for a description of the optional arguments *format*, *check*, *preset* and *filters*. For incremental compression, use an LZMACompressor object instead. """ comp = LZMACompressor(format, check, preset, filters) return comp.compress(data) + comp.flush()
python
def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None): """Compress a block of data. Refer to LZMACompressor's docstring for a description of the optional arguments *format*, *check*, *preset* and *filters*. For incremental compression, use an LZMACompressor object instead. """ comp = LZMACompressor(format, check, preset, filters) return comp.compress(data) + comp.flush()
Compress a block of data. Refer to LZMACompressor's docstring for a description of the optional arguments *format*, *check*, *preset* and *filters*. For incremental compression, use an LZMACompressor object instead.
https://github.com/peterjc/backports.lzma/blob/6555d8b8e493a35159025b4cfc204dfb54c33d3e/backports/lzma/__init__.py#L441-L450
peterjc/backports.lzma
backports/lzma/__init__.py
decompress
def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None): """Decompress a block of data. Refer to LZMADecompressor's docstring for a description of the optional arguments *format*, *check* and *filters*. For incremental decompression, use a LZMADecompressor object instead. """ results = [] while True: decomp = LZMADecompressor(format, memlimit, filters) try: res = decomp.decompress(data) except LZMAError: if results: break # Leftover data is not a valid LZMA/XZ stream; ignore it. else: raise # Error on the first iteration; bail out. results.append(res) if not decomp.eof: raise LZMAError("Compressed data ended before the " "end-of-stream marker was reached") data = decomp.unused_data if not data: break return b"".join(results)
python
def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None): """Decompress a block of data. Refer to LZMADecompressor's docstring for a description of the optional arguments *format*, *check* and *filters*. For incremental decompression, use a LZMADecompressor object instead. """ results = [] while True: decomp = LZMADecompressor(format, memlimit, filters) try: res = decomp.decompress(data) except LZMAError: if results: break # Leftover data is not a valid LZMA/XZ stream; ignore it. else: raise # Error on the first iteration; bail out. results.append(res) if not decomp.eof: raise LZMAError("Compressed data ended before the " "end-of-stream marker was reached") data = decomp.unused_data if not data: break return b"".join(results)
Decompress a block of data. Refer to LZMADecompressor's docstring for a description of the optional arguments *format*, *check* and *filters*. For incremental decompression, use a LZMADecompressor object instead.
https://github.com/peterjc/backports.lzma/blob/6555d8b8e493a35159025b4cfc204dfb54c33d3e/backports/lzma/__init__.py#L453-L478
peterjc/backports.lzma
backports/lzma/__init__.py
LZMAFile.close
def close(self): """Flush and close the file. May be called more than once without error. Once the file is closed, any other operation on it will raise a ValueError. """ if self._mode == _MODE_CLOSED: return try: if self._mode in (_MODE_READ, _MODE_READ_EOF): self._decompressor = None self._buffer = None elif self._mode == _MODE_WRITE: self._fp.write(self._compressor.flush()) self._compressor = None finally: try: if self._closefp: self._fp.close() finally: self._fp = None self._closefp = False self._mode = _MODE_CLOSED
python
def close(self): """Flush and close the file. May be called more than once without error. Once the file is closed, any other operation on it will raise a ValueError. """ if self._mode == _MODE_CLOSED: return try: if self._mode in (_MODE_READ, _MODE_READ_EOF): self._decompressor = None self._buffer = None elif self._mode == _MODE_WRITE: self._fp.write(self._compressor.flush()) self._compressor = None finally: try: if self._closefp: self._fp.close() finally: self._fp = None self._closefp = False self._mode = _MODE_CLOSED
Flush and close the file. May be called more than once without error. Once the file is closed, any other operation on it will raise a ValueError.
https://github.com/peterjc/backports.lzma/blob/6555d8b8e493a35159025b4cfc204dfb54c33d3e/backports/lzma/__init__.py#L134-L156
peterjc/backports.lzma
backports/lzma/__init__.py
LZMAFile.peek
def peek(self, size=-1): """Return buffered data without advancing the file position. Always returns at least one byte of data, unless at EOF. The exact number of bytes returned is unspecified. """ self._check_can_read() if self._mode == _MODE_READ_EOF or not self._fill_buffer(): return b"" return self._buffer
python
def peek(self, size=-1): """Return buffered data without advancing the file position. Always returns at least one byte of data, unless at EOF. The exact number of bytes returned is unspecified. """ self._check_can_read() if self._mode == _MODE_READ_EOF or not self._fill_buffer(): return b"" return self._buffer
Return buffered data without advancing the file position. Always returns at least one byte of data, unless at EOF. The exact number of bytes returned is unspecified.
https://github.com/peterjc/backports.lzma/blob/6555d8b8e493a35159025b4cfc204dfb54c33d3e/backports/lzma/__init__.py#L268-L277
peterjc/backports.lzma
backports/lzma/__init__.py
LZMAFile.read
def read(self, size=-1): """Read up to size uncompressed bytes from the file. If size is negative or omitted, read until EOF is reached. Returns b"" if the file is already at EOF. """ self._check_can_read() if size is None: #This is not needed on Python 3 where the comparison to zeo #will fail with a TypeError. raise TypeError("Read size should be an integer, not None") if self._mode == _MODE_READ_EOF or size == 0: return b"" elif size < 0: return self._read_all() else: return self._read_block(size)
python
def read(self, size=-1): """Read up to size uncompressed bytes from the file. If size is negative or omitted, read until EOF is reached. Returns b"" if the file is already at EOF. """ self._check_can_read() if size is None: #This is not needed on Python 3 where the comparison to zeo #will fail with a TypeError. raise TypeError("Read size should be an integer, not None") if self._mode == _MODE_READ_EOF or size == 0: return b"" elif size < 0: return self._read_all() else: return self._read_block(size)
Read up to size uncompressed bytes from the file. If size is negative or omitted, read until EOF is reached. Returns b"" if the file is already at EOF.
https://github.com/peterjc/backports.lzma/blob/6555d8b8e493a35159025b4cfc204dfb54c33d3e/backports/lzma/__init__.py#L279-L295
peterjc/backports.lzma
backports/lzma/__init__.py
LZMAFile.read1
def read1(self, size=-1): """Read up to size uncompressed bytes, while trying to avoid making multiple reads from the underlying stream. Returns b"" if the file is at EOF. """ # Usually, read1() calls _fp.read() at most once. However, sometimes # this does not give enough data for the decompressor to make progress. # In this case we make multiple reads, to avoid returning b"". self._check_can_read() if size is None: #This is not needed on Python 3 where the comparison to zero #will fail with a TypeError. raise TypeError("Read size should be an integer, not None") if (size == 0 or self._mode == _MODE_READ_EOF or not self._fill_buffer()): return b"" if 0 < size < len(self._buffer): data = self._buffer[:size] self._buffer = self._buffer[size:] else: data = self._buffer self._buffer = None self._pos += len(data) return data
python
def read1(self, size=-1): """Read up to size uncompressed bytes, while trying to avoid making multiple reads from the underlying stream. Returns b"" if the file is at EOF. """ # Usually, read1() calls _fp.read() at most once. However, sometimes # this does not give enough data for the decompressor to make progress. # In this case we make multiple reads, to avoid returning b"". self._check_can_read() if size is None: #This is not needed on Python 3 where the comparison to zero #will fail with a TypeError. raise TypeError("Read size should be an integer, not None") if (size == 0 or self._mode == _MODE_READ_EOF or not self._fill_buffer()): return b"" if 0 < size < len(self._buffer): data = self._buffer[:size] self._buffer = self._buffer[size:] else: data = self._buffer self._buffer = None self._pos += len(data) return data
Read up to size uncompressed bytes, while trying to avoid making multiple reads from the underlying stream. Returns b"" if the file is at EOF.
https://github.com/peterjc/backports.lzma/blob/6555d8b8e493a35159025b4cfc204dfb54c33d3e/backports/lzma/__init__.py#L297-L321
peterjc/backports.lzma
backports/lzma/__init__.py
LZMAFile.write
def write(self, data): """Write a bytes object to the file. Returns the number of uncompressed bytes written, which is always len(data). Note that due to buffering, the file on disk may not reflect the data written until close() is called. """ self._check_can_write() compressed = self._compressor.compress(data) self._fp.write(compressed) self._pos += len(data) return len(data)
python
def write(self, data): """Write a bytes object to the file. Returns the number of uncompressed bytes written, which is always len(data). Note that due to buffering, the file on disk may not reflect the data written until close() is called. """ self._check_can_write() compressed = self._compressor.compress(data) self._fp.write(compressed) self._pos += len(data) return len(data)
Write a bytes object to the file. Returns the number of uncompressed bytes written, which is always len(data). Note that due to buffering, the file on disk may not reflect the data written until close() is called.
https://github.com/peterjc/backports.lzma/blob/6555d8b8e493a35159025b4cfc204dfb54c33d3e/backports/lzma/__init__.py#L323-L334
peterjc/backports.lzma
backports/lzma/__init__.py
LZMAFile.seek
def seek(self, offset, whence=0): """Change the file position. The new position is specified by offset, relative to the position indicated by whence. Possible values for whence are: 0: start of stream (default): offset must not be negative 1: current stream position 2: end of stream; offset must not be positive Returns the new file position. Note that seeking is emulated, sp depending on the parameters, this operation may be extremely slow. """ self._check_can_seek() # Recalculate offset as an absolute file position. if whence == 0: pass elif whence == 1: offset = self._pos + offset elif whence == 2: # Seeking relative to EOF - we need to know the file's size. if self._size < 0: self._read_all(return_data=False) offset = self._size + offset else: raise ValueError("Invalid value for whence: {}".format(whence)) # Make it so that offset is the number of bytes to skip forward. if offset is None: #This is not needed on Python 3 where the comparison to self._pos #will fail with a TypeError. raise TypeError("Seek offset should be an integer, not None") if offset < self._pos: self._rewind() else: offset -= self._pos # Read and discard data until we reach the desired position. if self._mode != _MODE_READ_EOF: self._read_block(offset, return_data=False) return self._pos
python
def seek(self, offset, whence=0): """Change the file position. The new position is specified by offset, relative to the position indicated by whence. Possible values for whence are: 0: start of stream (default): offset must not be negative 1: current stream position 2: end of stream; offset must not be positive Returns the new file position. Note that seeking is emulated, sp depending on the parameters, this operation may be extremely slow. """ self._check_can_seek() # Recalculate offset as an absolute file position. if whence == 0: pass elif whence == 1: offset = self._pos + offset elif whence == 2: # Seeking relative to EOF - we need to know the file's size. if self._size < 0: self._read_all(return_data=False) offset = self._size + offset else: raise ValueError("Invalid value for whence: {}".format(whence)) # Make it so that offset is the number of bytes to skip forward. if offset is None: #This is not needed on Python 3 where the comparison to self._pos #will fail with a TypeError. raise TypeError("Seek offset should be an integer, not None") if offset < self._pos: self._rewind() else: offset -= self._pos # Read and discard data until we reach the desired position. if self._mode != _MODE_READ_EOF: self._read_block(offset, return_data=False) return self._pos
Change the file position. The new position is specified by offset, relative to the position indicated by whence. Possible values for whence are: 0: start of stream (default): offset must not be negative 1: current stream position 2: end of stream; offset must not be positive Returns the new file position. Note that seeking is emulated, sp depending on the parameters, this operation may be extremely slow.
https://github.com/peterjc/backports.lzma/blob/6555d8b8e493a35159025b4cfc204dfb54c33d3e/backports/lzma/__init__.py#L344-L388
inveniosoftware/invenio-github
invenio_github/models.py
Repository.create
def create(cls, user_id, github_id=None, name=None, **kwargs): """Create the repository.""" with db.session.begin_nested(): obj = cls(user_id=user_id, github_id=github_id, name=name, **kwargs) db.session.add(obj) return obj
python
def create(cls, user_id, github_id=None, name=None, **kwargs): """Create the repository.""" with db.session.begin_nested(): obj = cls(user_id=user_id, github_id=github_id, name=name, **kwargs) db.session.add(obj) return obj
Create the repository.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/models.py#L172-L178
inveniosoftware/invenio-github
invenio_github/models.py
Repository.get
def get(cls, user_id, github_id=None, name=None, check_owner=True): """Return a repository. :param integer user_id: User identifier. :param integer github_id: GitHub repository identifier. :param str name: GitHub repository full name. :returns: The repository object. :raises: :py:exc:`~sqlalchemy.orm.exc.NoResultFound`: if the repository doesn't exist. :raises: :py:exc:`~sqlalchemy.orm.exc.MultipleResultsFound`: if multiple repositories with the specified GitHub id and/or name exist. :raises: :py:exc:`RepositoryAccessError`: if the user is not the owner of the repository. """ repo = cls.query.filter((Repository.github_id == github_id) | (Repository.name == name)).one() if (check_owner and repo and repo.user_id and repo.user_id != int(user_id)): raise RepositoryAccessError( u'User {user} cannot access repository {repo}({repo_id}).' .format(user=user_id, repo=name, repo_id=github_id) ) return repo
python
def get(cls, user_id, github_id=None, name=None, check_owner=True): """Return a repository. :param integer user_id: User identifier. :param integer github_id: GitHub repository identifier. :param str name: GitHub repository full name. :returns: The repository object. :raises: :py:exc:`~sqlalchemy.orm.exc.NoResultFound`: if the repository doesn't exist. :raises: :py:exc:`~sqlalchemy.orm.exc.MultipleResultsFound`: if multiple repositories with the specified GitHub id and/or name exist. :raises: :py:exc:`RepositoryAccessError`: if the user is not the owner of the repository. """ repo = cls.query.filter((Repository.github_id == github_id) | (Repository.name == name)).one() if (check_owner and repo and repo.user_id and repo.user_id != int(user_id)): raise RepositoryAccessError( u'User {user} cannot access repository {repo}({repo_id}).' .format(user=user_id, repo=name, repo_id=github_id) ) return repo
Return a repository. :param integer user_id: User identifier. :param integer github_id: GitHub repository identifier. :param str name: GitHub repository full name. :returns: The repository object. :raises: :py:exc:`~sqlalchemy.orm.exc.NoResultFound`: if the repository doesn't exist. :raises: :py:exc:`~sqlalchemy.orm.exc.MultipleResultsFound`: if multiple repositories with the specified GitHub id and/or name exist. :raises: :py:exc:`RepositoryAccessError`: if the user is not the owner of the repository.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/models.py#L181-L204
inveniosoftware/invenio-github
invenio_github/models.py
Repository.enable
def enable(cls, user_id, github_id, name, hook): """Enable webhooks for a repository. If the repository does not exist it will create one. :param user_id: User identifier. :param repo_id: GitHub repository identifier. :param name: Fully qualified name of the repository. :param hook: GitHub hook identifier. """ try: repo = cls.get(user_id, github_id=github_id, name=name) except NoResultFound: repo = cls.create(user_id=user_id, github_id=github_id, name=name) repo.hook = hook repo.user_id = user_id return repo
python
def enable(cls, user_id, github_id, name, hook): """Enable webhooks for a repository. If the repository does not exist it will create one. :param user_id: User identifier. :param repo_id: GitHub repository identifier. :param name: Fully qualified name of the repository. :param hook: GitHub hook identifier. """ try: repo = cls.get(user_id, github_id=github_id, name=name) except NoResultFound: repo = cls.create(user_id=user_id, github_id=github_id, name=name) repo.hook = hook repo.user_id = user_id return repo
Enable webhooks for a repository. If the repository does not exist it will create one. :param user_id: User identifier. :param repo_id: GitHub repository identifier. :param name: Fully qualified name of the repository. :param hook: GitHub hook identifier.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/models.py#L207-L223
inveniosoftware/invenio-github
invenio_github/models.py
Repository.disable
def disable(cls, user_id, github_id, name): """Disable webhooks for a repository. Disables the webhook from a repository if it exists in the DB. :param user_id: User identifier. :param repo_id: GitHub id of the repository. :param name: Fully qualified name of the repository. """ repo = cls.get(user_id, github_id=github_id, name=name) repo.hook = None repo.user_id = None return repo
python
def disable(cls, user_id, github_id, name): """Disable webhooks for a repository. Disables the webhook from a repository if it exists in the DB. :param user_id: User identifier. :param repo_id: GitHub id of the repository. :param name: Fully qualified name of the repository. """ repo = cls.get(user_id, github_id=github_id, name=name) repo.hook = None repo.user_id = None return repo
Disable webhooks for a repository. Disables the webhook from a repository if it exists in the DB. :param user_id: User identifier. :param repo_id: GitHub id of the repository. :param name: Fully qualified name of the repository.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/models.py#L226-L238
inveniosoftware/invenio-github
invenio_github/models.py
Release.create
def create(cls, event): """Create a new Release model.""" # Check if the release has already been received release_id = event.payload['release']['id'] existing_release = Release.query.filter_by( release_id=release_id, ).first() if existing_release: raise ReleaseAlreadyReceivedError( u'{release} has already been received.' .format(release=existing_release) ) # Create the Release repo_id = event.payload['repository']['id'] repo = Repository.get(user_id=event.user_id, github_id=repo_id) if repo.enabled: with db.session.begin_nested(): release = cls( release_id=release_id, tag=event.payload['release']['tag_name'], repository=repo, event=event, status=ReleaseStatus.RECEIVED, ) db.session.add(release) return release else: current_app.logger.warning( u'Release creation attempt on disabled {repo}.' .format(repo=repo) ) raise RepositoryDisabledError( u'{repo} is not enabled for webhooks.'.format(repo=repo) )
python
def create(cls, event): """Create a new Release model.""" # Check if the release has already been received release_id = event.payload['release']['id'] existing_release = Release.query.filter_by( release_id=release_id, ).first() if existing_release: raise ReleaseAlreadyReceivedError( u'{release} has already been received.' .format(release=existing_release) ) # Create the Release repo_id = event.payload['repository']['id'] repo = Repository.get(user_id=event.user_id, github_id=repo_id) if repo.enabled: with db.session.begin_nested(): release = cls( release_id=release_id, tag=event.payload['release']['tag_name'], repository=repo, event=event, status=ReleaseStatus.RECEIVED, ) db.session.add(release) return release else: current_app.logger.warning( u'Release creation attempt on disabled {repo}.' .format(repo=repo) ) raise RepositoryDisabledError( u'{repo} is not enabled for webhooks.'.format(repo=repo) )
Create a new Release model.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/models.py#L314-L348
inveniosoftware/invenio-github
invenio_github/models.py
Release.record
def record(self): """Get Record object.""" if self.recordmetadata: return Record(self.recordmetadata.json, model=self.recordmetadata) else: return None
python
def record(self): """Get Record object.""" if self.recordmetadata: return Record(self.recordmetadata.json, model=self.recordmetadata) else: return None
Get Record object.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/models.py#L351-L356
inveniosoftware/invenio-github
invenio_github/views/badge.py
get_badge_image_url
def get_badge_image_url(pid, ext='svg'): """Return the badge for a DOI.""" return url_for('invenio_formatter_badges.badge', title=pid.pid_type, value=pid.pid_value, ext=ext)
python
def get_badge_image_url(pid, ext='svg'): """Return the badge for a DOI.""" return url_for('invenio_formatter_badges.badge', title=pid.pid_type, value=pid.pid_value, ext=ext)
Return the badge for a DOI.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/views/badge.py#L51-L54
inveniosoftware/invenio-github
invenio_github/views/badge.py
index_old
def index_old(user_id, repo_name): """Generate a badge for a specific GitHub repository.""" pid = get_pid_of_latest_release_or_404(name=repo_name) return redirect(get_badge_image_url(pid))
python
def index_old(user_id, repo_name): """Generate a badge for a specific GitHub repository.""" pid = get_pid_of_latest_release_or_404(name=repo_name) return redirect(get_badge_image_url(pid))
Generate a badge for a specific GitHub repository.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/views/badge.py#L73-L76
inveniosoftware/invenio-github
invenio_github/tasks.py
disconnect_github
def disconnect_github(access_token, repo_hooks): """Uninstall webhooks.""" # Note at this point the remote account and all associated data have # already been deleted. The celery task is passed the access_token to make # some last cleanup and afterwards delete itself remotely. import github3 from .api import GitHubAPI try: gh = github3.login(token=access_token) for repo_id, repo_hook in repo_hooks: ghrepo = gh.repository_with_id(repo_id) if ghrepo: hook = ghrepo.hook(repo_hook) if hook and hook.delete(): info_msg = u'Deleted hook {hook} from {repo}'.format( hook=hook.id, repo=ghrepo.full_name) current_app.logger.info(info_msg) # If we finished our clean-up successfully, we can revoke the token GitHubAPI.revoke_token(access_token) except Exception as exc: # Retry in case GitHub may be down... disconnect_github.retry(exc=exc)
python
def disconnect_github(access_token, repo_hooks): """Uninstall webhooks.""" # Note at this point the remote account and all associated data have # already been deleted. The celery task is passed the access_token to make # some last cleanup and afterwards delete itself remotely. import github3 from .api import GitHubAPI try: gh = github3.login(token=access_token) for repo_id, repo_hook in repo_hooks: ghrepo = gh.repository_with_id(repo_id) if ghrepo: hook = ghrepo.hook(repo_hook) if hook and hook.delete(): info_msg = u'Deleted hook {hook} from {repo}'.format( hook=hook.id, repo=ghrepo.full_name) current_app.logger.info(info_msg) # If we finished our clean-up successfully, we can revoke the token GitHubAPI.revoke_token(access_token) except Exception as exc: # Retry in case GitHub may be down... disconnect_github.retry(exc=exc)
Uninstall webhooks.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/tasks.py#L38-L60
inveniosoftware/invenio-github
invenio_github/tasks.py
sync_hooks
def sync_hooks(user_id, repositories): """Sync repository hooks for a user.""" from .api import GitHubAPI try: # Sync hooks gh = GitHubAPI(user_id=user_id) for repo_id in repositories: try: with db.session.begin_nested(): gh.sync_repo_hook(repo_id) # We commit per repository, because while the task is running # the user might enable/disable a hook. db.session.commit() except RepositoryAccessError as e: current_app.logger.warning(e.message, exc_info=True) except NoResultFound: pass # Repository not in DB yet except Exception as exc: sync_hooks.retry(exc=exc)
python
def sync_hooks(user_id, repositories): """Sync repository hooks for a user.""" from .api import GitHubAPI try: # Sync hooks gh = GitHubAPI(user_id=user_id) for repo_id in repositories: try: with db.session.begin_nested(): gh.sync_repo_hook(repo_id) # We commit per repository, because while the task is running # the user might enable/disable a hook. db.session.commit() except RepositoryAccessError as e: current_app.logger.warning(e.message, exc_info=True) except NoResultFound: pass # Repository not in DB yet except Exception as exc: sync_hooks.retry(exc=exc)
Sync repository hooks for a user.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/tasks.py#L64-L83
inveniosoftware/invenio-github
invenio_github/tasks.py
process_release
def process_release(release_id, verify_sender=False): """Process a received Release.""" from invenio_db import db from invenio_rest.errors import RESTException from .errors import InvalidSenderError from .models import Release, ReleaseStatus from .proxies import current_github release_model = Release.query.filter( Release.release_id == release_id, Release.status.in_([ReleaseStatus.RECEIVED, ReleaseStatus.FAILED]), ).one() release_model.status = ReleaseStatus.PROCESSING db.session.commit() release = current_github.release_api_class(release_model) if verify_sender and not release.verify_sender(): raise InvalidSenderError( u'Invalid sender for event {event} for user {user}' .format(event=release.event.id, user=release.event.user_id) ) def _get_err_obj(msg): """Generate the error entry with a Sentry ID.""" err = {'errors': msg} if hasattr(g, 'sentry_event_id'): err['error_id'] = str(g.sentry_event_id) return err try: release.publish() release.model.status = ReleaseStatus.PUBLISHED except RESTException as rest_ex: release.model.errors = json.loads(rest_ex.get_body()) release.model.status = ReleaseStatus.FAILED current_app.logger.exception( u'Error while processing {release}'.format(release=release.model)) # TODO: We may want to handle GitHub errors differently in the future # except GitHubError as github_ex: # release.model.errors = {'error': str(e)} # release.model.status = ReleaseStatus.FAILED # current_app.logger.exception( # 'Error while processing {release}' # .format(release=release.model)) except CustomGitHubMetadataError as e: release.model.errors = _get_err_obj(str(e)) release.model.status = ReleaseStatus.FAILED current_app.logger.exception( u'Error while processing {release}'.format(release=release.model)) except Exception: release.model.errors = _get_err_obj('Unknown error occured.') release.model.status = ReleaseStatus.FAILED current_app.logger.exception( u'Error while processing {release}'.format(release=release.model)) finally: db.session.commit()
python
def process_release(release_id, verify_sender=False): """Process a received Release.""" from invenio_db import db from invenio_rest.errors import RESTException from .errors import InvalidSenderError from .models import Release, ReleaseStatus from .proxies import current_github release_model = Release.query.filter( Release.release_id == release_id, Release.status.in_([ReleaseStatus.RECEIVED, ReleaseStatus.FAILED]), ).one() release_model.status = ReleaseStatus.PROCESSING db.session.commit() release = current_github.release_api_class(release_model) if verify_sender and not release.verify_sender(): raise InvalidSenderError( u'Invalid sender for event {event} for user {user}' .format(event=release.event.id, user=release.event.user_id) ) def _get_err_obj(msg): """Generate the error entry with a Sentry ID.""" err = {'errors': msg} if hasattr(g, 'sentry_event_id'): err['error_id'] = str(g.sentry_event_id) return err try: release.publish() release.model.status = ReleaseStatus.PUBLISHED except RESTException as rest_ex: release.model.errors = json.loads(rest_ex.get_body()) release.model.status = ReleaseStatus.FAILED current_app.logger.exception( u'Error while processing {release}'.format(release=release.model)) # TODO: We may want to handle GitHub errors differently in the future # except GitHubError as github_ex: # release.model.errors = {'error': str(e)} # release.model.status = ReleaseStatus.FAILED # current_app.logger.exception( # 'Error while processing {release}' # .format(release=release.model)) except CustomGitHubMetadataError as e: release.model.errors = _get_err_obj(str(e)) release.model.status = ReleaseStatus.FAILED current_app.logger.exception( u'Error while processing {release}'.format(release=release.model)) except Exception: release.model.errors = _get_err_obj('Unknown error occured.') release.model.status = ReleaseStatus.FAILED current_app.logger.exception( u'Error while processing {release}'.format(release=release.model)) finally: db.session.commit()
Process a received Release.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/tasks.py#L87-L143
inveniosoftware/invenio-github
invenio_github/views/github.py
naturaltime
def naturaltime(val): """Get humanized version of time.""" val = val.replace(tzinfo=pytz.utc) \ if isinstance(val, datetime) else parse(val) now = datetime.utcnow().replace(tzinfo=pytz.utc) return humanize.naturaltime(now - val)
python
def naturaltime(val): """Get humanized version of time.""" val = val.replace(tzinfo=pytz.utc) \ if isinstance(val, datetime) else parse(val) now = datetime.utcnow().replace(tzinfo=pytz.utc) return humanize.naturaltime(now - val)
Get humanized version of time.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/views/github.py#L61-L67
inveniosoftware/invenio-github
invenio_github/views/github.py
index
def index(): """Display list of the user's repositories.""" github = GitHubAPI(user_id=current_user.id) token = github.session_token ctx = dict(connected=False) if token: # The user is authenticated and the token we have is still valid. if github.account.extra_data.get('login') is None: github.init_account() db.session.commit() # Sync if needed if request.method == 'POST' or github.check_sync(): # When we're in an XHR request, we want to synchronously sync hooks github.sync(async_hooks=(not request.is_xhr)) db.session.commit() # Generate the repositories view object extra_data = github.account.extra_data repos = extra_data['repos'] if repos: # 'Enhance' our repos dict, from our database model db_repos = Repository.query.filter( Repository.github_id.in_([int(k) for k in repos.keys()]), ).all() for repo in db_repos: repos[str(repo.github_id)]['instance'] = repo repos[str(repo.github_id)]['latest'] = GitHubRelease( repo.latest_release()) last_sync = humanize.naturaltime( (utcnow() - parse_timestamp(extra_data['last_sync']))) ctx.update({ 'connected': True, 'repos': sorted(repos.items(), key=lambda x: x[1]['full_name']), 'last_sync': last_sync, }) return render_template(current_app.config['GITHUB_TEMPLATE_INDEX'], **ctx)
python
def index(): """Display list of the user's repositories.""" github = GitHubAPI(user_id=current_user.id) token = github.session_token ctx = dict(connected=False) if token: # The user is authenticated and the token we have is still valid. if github.account.extra_data.get('login') is None: github.init_account() db.session.commit() # Sync if needed if request.method == 'POST' or github.check_sync(): # When we're in an XHR request, we want to synchronously sync hooks github.sync(async_hooks=(not request.is_xhr)) db.session.commit() # Generate the repositories view object extra_data = github.account.extra_data repos = extra_data['repos'] if repos: # 'Enhance' our repos dict, from our database model db_repos = Repository.query.filter( Repository.github_id.in_([int(k) for k in repos.keys()]), ).all() for repo in db_repos: repos[str(repo.github_id)]['instance'] = repo repos[str(repo.github_id)]['latest'] = GitHubRelease( repo.latest_release()) last_sync = humanize.naturaltime( (utcnow() - parse_timestamp(extra_data['last_sync']))) ctx.update({ 'connected': True, 'repos': sorted(repos.items(), key=lambda x: x[1]['full_name']), 'last_sync': last_sync, }) return render_template(current_app.config['GITHUB_TEMPLATE_INDEX'], **ctx)
Display list of the user's repositories.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/views/github.py#L94-L134
inveniosoftware/invenio-github
invenio_github/views/github.py
repository
def repository(name): """Display selected repository.""" user_id = current_user.id github = GitHubAPI(user_id=user_id) token = github.session_token if token: repos = github.account.extra_data.get('repos', []) repo = next((repo for repo_id, repo in repos.items() if repo.get('full_name') == name), {}) if not repo: abort(403) try: # NOTE: Here we do not check for repository ownership, since it # might have changed even though the user might have made releases # in the past. repo_instance = Repository.get(user_id=user_id, github_id=repo['id'], check_owner=False) except RepositoryAccessError: abort(403) except NoResultFound: repo_instance = Repository(name=repo['full_name'], github_id=repo['id']) releases = [ current_github.release_api_class(r) for r in ( repo_instance.releases.order_by(db.desc(Release.created)).all() if repo_instance.id else [] ) ] return render_template( current_app.config['GITHUB_TEMPLATE_VIEW'], repo=repo_instance, releases=releases, serializer=current_github.record_serializer, ) abort(403)
python
def repository(name): """Display selected repository.""" user_id = current_user.id github = GitHubAPI(user_id=user_id) token = github.session_token if token: repos = github.account.extra_data.get('repos', []) repo = next((repo for repo_id, repo in repos.items() if repo.get('full_name') == name), {}) if not repo: abort(403) try: # NOTE: Here we do not check for repository ownership, since it # might have changed even though the user might have made releases # in the past. repo_instance = Repository.get(user_id=user_id, github_id=repo['id'], check_owner=False) except RepositoryAccessError: abort(403) except NoResultFound: repo_instance = Repository(name=repo['full_name'], github_id=repo['id']) releases = [ current_github.release_api_class(r) for r in ( repo_instance.releases.order_by(db.desc(Release.created)).all() if repo_instance.id else [] ) ] return render_template( current_app.config['GITHUB_TEMPLATE_VIEW'], repo=repo_instance, releases=releases, serializer=current_github.record_serializer, ) abort(403)
Display selected repository.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/views/github.py#L141-L180
inveniosoftware/invenio-github
invenio_github/views/github.py
hook
def hook(): """Install or remove GitHub webhook.""" repo_id = request.json['id'] github = GitHubAPI(user_id=current_user.id) repos = github.account.extra_data['repos'] if repo_id not in repos: abort(404) if request.method == 'DELETE': try: if github.remove_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return '', 204 else: abort(400) except Exception: abort(403) elif request.method == 'POST': try: if github.create_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return '', 201 else: abort(400) except Exception: abort(403) else: abort(400)
python
def hook(): """Install or remove GitHub webhook.""" repo_id = request.json['id'] github = GitHubAPI(user_id=current_user.id) repos = github.account.extra_data['repos'] if repo_id not in repos: abort(404) if request.method == 'DELETE': try: if github.remove_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return '', 204 else: abort(400) except Exception: abort(403) elif request.method == 'POST': try: if github.create_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return '', 201 else: abort(400) except Exception: abort(403) else: abort(400)
Install or remove GitHub webhook.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/views/github.py#L192-L221
inveniosoftware/invenio-github
invenio_github/views/github.py
hook_action
def hook_action(action, repo_id): """Display selected repository.""" github = GitHubAPI(user_id=current_user.id) repos = github.account.extra_data['repos'] if repo_id not in repos: abort(404) if action == 'disable': if github.remove_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return redirect(url_for('.index')) else: abort(400) elif action == 'enable': if github.create_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return redirect(url_for('.index')) else: abort(400) else: abort(400)
python
def hook_action(action, repo_id): """Display selected repository.""" github = GitHubAPI(user_id=current_user.id) repos = github.account.extra_data['repos'] if repo_id not in repos: abort(404) if action == 'disable': if github.remove_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return redirect(url_for('.index')) else: abort(400) elif action == 'enable': if github.create_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return redirect(url_for('.index')) else: abort(400) else: abort(400)
Display selected repository.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/views/github.py#L226-L247
inveniosoftware/invenio-github
invenio_github/ext.py
InvenioGitHub.release_api_class
def release_api_class(self): """Github Release API class.""" cls = current_app.config['GITHUB_RELEASE_CLASS'] if isinstance(cls, string_types): cls = import_string(cls) assert issubclass(cls, GitHubRelease) return cls
python
def release_api_class(self): """Github Release API class.""" cls = current_app.config['GITHUB_RELEASE_CLASS'] if isinstance(cls, string_types): cls = import_string(cls) assert issubclass(cls, GitHubRelease) return cls
Github Release API class.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/ext.py#L47-L53
inveniosoftware/invenio-github
invenio_github/ext.py
InvenioGitHub.record_serializer
def record_serializer(self): """Github Release API class.""" imp = current_app.config['GITHUB_RECORD_SERIALIZER'] if isinstance(imp, string_types): return import_string(imp) return imp
python
def record_serializer(self): """Github Release API class.""" imp = current_app.config['GITHUB_RECORD_SERIALIZER'] if isinstance(imp, string_types): return import_string(imp) return imp
Github Release API class.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/ext.py#L56-L61
inveniosoftware/invenio-github
invenio_github/ext.py
InvenioGitHub.init_app
def init_app(self, app): """Flask application initialization.""" self.init_config(app) app.extensions['invenio-github'] = self @app.before_first_request def connect_signals(): """Connect OAuthClient signals.""" from invenio_oauthclient.models import RemoteAccount from invenio_oauthclient.signals import account_setup_committed from .api import GitHubAPI from .handlers import account_post_init account_setup_committed.connect( account_post_init, sender=GitHubAPI.remote._get_current_object() ) @event.listens_for(RemoteAccount, 'before_delete') def receive_before_delete(mapper, connection, target): """Listen for the 'before_delete' event."""
python
def init_app(self, app): """Flask application initialization.""" self.init_config(app) app.extensions['invenio-github'] = self @app.before_first_request def connect_signals(): """Connect OAuthClient signals.""" from invenio_oauthclient.models import RemoteAccount from invenio_oauthclient.signals import account_setup_committed from .api import GitHubAPI from .handlers import account_post_init account_setup_committed.connect( account_post_init, sender=GitHubAPI.remote._get_current_object() ) @event.listens_for(RemoteAccount, 'before_delete') def receive_before_delete(mapper, connection, target): """Listen for the 'before_delete' event."""
Flask application initialization.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/ext.py#L63-L84
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI.access_token
def access_token(self): """Return OAuth access token.""" if self.user_id: return RemoteToken.get( self.user_id, self.remote.consumer_key ).access_token return self.remote.get_request_token()[0]
python
def access_token(self): """Return OAuth access token.""" if self.user_id: return RemoteToken.get( self.user_id, self.remote.consumer_key ).access_token return self.remote.get_request_token()[0]
Return OAuth access token.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L60-L66
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI.session_token
def session_token(self): """Return OAuth session token.""" session_token = None if self.user_id is not None: session_token = token_getter(self.remote) if session_token: token = RemoteToken.get( self.user_id, self.remote.consumer_key, access_token=session_token[0] ) return token return None
python
def session_token(self): """Return OAuth session token.""" session_token = None if self.user_id is not None: session_token = token_getter(self.remote) if session_token: token = RemoteToken.get( self.user_id, self.remote.consumer_key, access_token=session_token[0] ) return token return None
Return OAuth session token.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L69-L80
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI.webhook_url
def webhook_url(self): """Return the url to be used by a GitHub webhook.""" webhook_token = ProviderToken.query.filter_by( id=self.account.extra_data['tokens']['webhook'] ).first() if webhook_token: wh_url = current_app.config.get('GITHUB_WEBHOOK_RECEIVER_URL') if wh_url: return wh_url.format(token=webhook_token.access_token) else: raise RuntimeError('You must set GITHUB_WEBHOOK_RECEIVER_URL.')
python
def webhook_url(self): """Return the url to be used by a GitHub webhook.""" webhook_token = ProviderToken.query.filter_by( id=self.account.extra_data['tokens']['webhook'] ).first() if webhook_token: wh_url = current_app.config.get('GITHUB_WEBHOOK_RECEIVER_URL') if wh_url: return wh_url.format(token=webhook_token.access_token) else: raise RuntimeError('You must set GITHUB_WEBHOOK_RECEIVER_URL.')
Return the url to be used by a GitHub webhook.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L95-L105
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI.init_account
def init_account(self): """Setup a new GitHub account.""" ghuser = self.api.me() # Setup local access tokens to be used by the webhooks hook_token = ProviderToken.create_personal( 'github-webhook', self.user_id, scopes=['webhooks:event'], is_internal=True, ) # Initial structure of extra data self.account.extra_data = dict( id=ghuser.id, login=ghuser.login, name=ghuser.name, tokens=dict( webhook=hook_token.id, ), repos=dict(), last_sync=iso_utcnow(), ) db.session.add(self.account) # Sync data from GitHub, but don't check repository hooks yet. self.sync(hooks=False)
python
def init_account(self): """Setup a new GitHub account.""" ghuser = self.api.me() # Setup local access tokens to be used by the webhooks hook_token = ProviderToken.create_personal( 'github-webhook', self.user_id, scopes=['webhooks:event'], is_internal=True, ) # Initial structure of extra data self.account.extra_data = dict( id=ghuser.id, login=ghuser.login, name=ghuser.name, tokens=dict( webhook=hook_token.id, ), repos=dict(), last_sync=iso_utcnow(), ) db.session.add(self.account) # Sync data from GitHub, but don't check repository hooks yet. self.sync(hooks=False)
Setup a new GitHub account.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L107-L131
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI.sync
def sync(self, hooks=True, async_hooks=True): """Synchronize user repositories. :param bool hooks: True for syncing hooks. :param bool async_hooks: True for sending of an asynchronous task to sync hooks. .. note:: Syncing happens from GitHub's direction only. This means that we consider the information on GitHub as valid, and we overwrite our own state based on this information. """ active_repos = {} github_repos = {repo.id: repo for repo in self.api.repositories() if repo.permissions['admin']} for gh_repo_id, gh_repo in github_repos.items(): active_repos[gh_repo_id] = { 'id': gh_repo_id, 'full_name': gh_repo.full_name, 'description': gh_repo.description, } if hooks: self._sync_hooks(list(active_repos.keys()), asynchronous=async_hooks) # Update changed names for repositories stored in DB db_repos = Repository.query.filter( Repository.user_id == self.user_id, Repository.github_id.in_(github_repos.keys()) ) for repo in db_repos: gh_repo = github_repos.get(repo.github_id) if gh_repo and repo.name != gh_repo.full_name: repo.name = gh_repo.full_name db.session.add(repo) # Remove ownership from repositories that the user has no longer # 'admin' permissions, or have been deleted. Repository.query.filter( Repository.user_id == self.user_id, ~Repository.github_id.in_(github_repos.keys()) ).update(dict(user_id=None, hook=None), synchronize_session=False) # Update repos and last sync self.account.extra_data.update(dict( repos=active_repos, last_sync=iso_utcnow(), )) self.account.extra_data.changed() db.session.add(self.account)
python
def sync(self, hooks=True, async_hooks=True): """Synchronize user repositories. :param bool hooks: True for syncing hooks. :param bool async_hooks: True for sending of an asynchronous task to sync hooks. .. note:: Syncing happens from GitHub's direction only. This means that we consider the information on GitHub as valid, and we overwrite our own state based on this information. """ active_repos = {} github_repos = {repo.id: repo for repo in self.api.repositories() if repo.permissions['admin']} for gh_repo_id, gh_repo in github_repos.items(): active_repos[gh_repo_id] = { 'id': gh_repo_id, 'full_name': gh_repo.full_name, 'description': gh_repo.description, } if hooks: self._sync_hooks(list(active_repos.keys()), asynchronous=async_hooks) # Update changed names for repositories stored in DB db_repos = Repository.query.filter( Repository.user_id == self.user_id, Repository.github_id.in_(github_repos.keys()) ) for repo in db_repos: gh_repo = github_repos.get(repo.github_id) if gh_repo and repo.name != gh_repo.full_name: repo.name = gh_repo.full_name db.session.add(repo) # Remove ownership from repositories that the user has no longer # 'admin' permissions, or have been deleted. Repository.query.filter( Repository.user_id == self.user_id, ~Repository.github_id.in_(github_repos.keys()) ).update(dict(user_id=None, hook=None), synchronize_session=False) # Update repos and last sync self.account.extra_data.update(dict( repos=active_repos, last_sync=iso_utcnow(), )) self.account.extra_data.changed() db.session.add(self.account)
Synchronize user repositories. :param bool hooks: True for syncing hooks. :param bool async_hooks: True for sending of an asynchronous task to sync hooks. .. note:: Syncing happens from GitHub's direction only. This means that we consider the information on GitHub as valid, and we overwrite our own state based on this information.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L133-L185
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI._sync_hooks
def _sync_hooks(self, repos, asynchronous=True): """Check if a hooks sync task needs to be started.""" if not asynchronous: for repo_id in repos: try: with db.session.begin_nested(): self.sync_repo_hook(repo_id) db.session.commit() except RepositoryAccessError as e: current_app.logger.warning(e.message, exc_info=True) except NoResultFound: pass # Repository not in DB yet else: # FIXME: We have to commit, in order to have all necessary data? db.session.commit() sync_hooks.delay(self.user_id, repos)
python
def _sync_hooks(self, repos, asynchronous=True): """Check if a hooks sync task needs to be started.""" if not asynchronous: for repo_id in repos: try: with db.session.begin_nested(): self.sync_repo_hook(repo_id) db.session.commit() except RepositoryAccessError as e: current_app.logger.warning(e.message, exc_info=True) except NoResultFound: pass # Repository not in DB yet else: # FIXME: We have to commit, in order to have all necessary data? db.session.commit() sync_hooks.delay(self.user_id, repos)
Check if a hooks sync task needs to be started.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L187-L202
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI.sync_repo_hook
def sync_repo_hook(self, repo_id): """Sync a GitHub repo's hook with the locally stored repo.""" # Get the hook that we may have set in the past gh_repo = self.api.repository_with_id(repo_id) hooks = (hook.id for hook in gh_repo.hooks() if hook.config.get('url', '') == self.webhook_url) hook_id = next(hooks, None) # If hook on GitHub exists, get or create corresponding db object and # enable the hook. Otherwise remove the old hook information. if hook_id: Repository.enable(user_id=self.user_id, github_id=gh_repo.id, name=gh_repo.full_name, hook=hook_id) else: Repository.disable(user_id=self.user_id, github_id=gh_repo.id, name=gh_repo.full_name)
python
def sync_repo_hook(self, repo_id): """Sync a GitHub repo's hook with the locally stored repo.""" # Get the hook that we may have set in the past gh_repo = self.api.repository_with_id(repo_id) hooks = (hook.id for hook in gh_repo.hooks() if hook.config.get('url', '') == self.webhook_url) hook_id = next(hooks, None) # If hook on GitHub exists, get or create corresponding db object and # enable the hook. Otherwise remove the old hook information. if hook_id: Repository.enable(user_id=self.user_id, github_id=gh_repo.id, name=gh_repo.full_name, hook=hook_id) else: Repository.disable(user_id=self.user_id, github_id=gh_repo.id, name=gh_repo.full_name)
Sync a GitHub repo's hook with the locally stored repo.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L204-L222
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI.check_sync
def check_sync(self): """Check if sync is required based on last sync date.""" # If refresh interval is not specified, we should refresh every time. expiration = utcnow() refresh_td = current_app.config.get('GITHUB_REFRESH_TIMEDELTA') if refresh_td: expiration -= refresh_td last_sync = parse_timestamp(self.account.extra_data['last_sync']) return last_sync < expiration
python
def check_sync(self): """Check if sync is required based on last sync date.""" # If refresh interval is not specified, we should refresh every time. expiration = utcnow() refresh_td = current_app.config.get('GITHUB_REFRESH_TIMEDELTA') if refresh_td: expiration -= refresh_td last_sync = parse_timestamp(self.account.extra_data['last_sync']) return last_sync < expiration
Check if sync is required based on last sync date.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L224-L232
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI.create_hook
def create_hook(self, repo_id, repo_name): """Create repository hook.""" config = dict( url=self.webhook_url, content_type='json', secret=current_app.config['GITHUB_SHARED_SECRET'], insecure_ssl='1' if current_app.config['GITHUB_INSECURE_SSL'] else '0', ) ghrepo = self.api.repository_with_id(repo_id) if ghrepo: try: hook = ghrepo.create_hook( 'web', # GitHub identifier for webhook service config, events=['release'], ) except github3.GitHubError as e: # Check if hook is already installed hook_errors = (m for m in e.errors if m['code'] == 'custom' and m['resource'] == 'Hook') if next(hook_errors, None): hooks = (h for h in ghrepo.hooks() if h.config.get('url', '') == config['url']) hook = next(hooks, None) if hook: hook.edit(config=config, events=['release']) finally: if hook: Repository.enable(user_id=self.user_id, github_id=repo_id, name=repo_name, hook=hook.id) return True return False
python
def create_hook(self, repo_id, repo_name): """Create repository hook.""" config = dict( url=self.webhook_url, content_type='json', secret=current_app.config['GITHUB_SHARED_SECRET'], insecure_ssl='1' if current_app.config['GITHUB_INSECURE_SSL'] else '0', ) ghrepo = self.api.repository_with_id(repo_id) if ghrepo: try: hook = ghrepo.create_hook( 'web', # GitHub identifier for webhook service config, events=['release'], ) except github3.GitHubError as e: # Check if hook is already installed hook_errors = (m for m in e.errors if m['code'] == 'custom' and m['resource'] == 'Hook') if next(hook_errors, None): hooks = (h for h in ghrepo.hooks() if h.config.get('url', '') == config['url']) hook = next(hooks, None) if hook: hook.edit(config=config, events=['release']) finally: if hook: Repository.enable(user_id=self.user_id, github_id=repo_id, name=repo_name, hook=hook.id) return True return False
Create repository hook.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L234-L270
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI.remove_hook
def remove_hook(self, repo_id, name): """Remove repository hook.""" ghrepo = self.api.repository_with_id(repo_id) if ghrepo: hooks = (h for h in ghrepo.hooks() if h.config.get('url', '') == self.webhook_url) hook = next(hooks, None) if not hook or hook.delete(): Repository.disable(user_id=self.user_id, github_id=repo_id, name=name) return True return False
python
def remove_hook(self, repo_id, name): """Remove repository hook.""" ghrepo = self.api.repository_with_id(repo_id) if ghrepo: hooks = (h for h in ghrepo.hooks() if h.config.get('url', '') == self.webhook_url) hook = next(hooks, None) if not hook or hook.delete(): Repository.disable(user_id=self.user_id, github_id=repo_id, name=name) return True return False
Remove repository hook.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L272-L284
inveniosoftware/invenio-github
invenio_github/api.py
GitHubAPI._dev_api
def _dev_api(cls): """Get a developer instance for GitHub API access.""" gh = github3.GitHub() gh.set_client_id(cls.remote.consumer_key, cls.remote.consumer_secret) return gh
python
def _dev_api(cls): """Get a developer instance for GitHub API access.""" gh = github3.GitHub() gh.set_client_id(cls.remote.consumer_key, cls.remote.consumer_secret) return gh
Get a developer instance for GitHub API access.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L287-L291
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.deposit_class
def deposit_class(self): """Return a class implementing `publish` method.""" cls = current_app.config['GITHUB_DEPOSIT_CLASS'] if isinstance(cls, string_types): cls = import_string(cls) assert isinstance(cls, type) return cls
python
def deposit_class(self): """Return a class implementing `publish` method.""" cls = current_app.config['GITHUB_DEPOSIT_CLASS'] if isinstance(cls, string_types): cls = import_string(cls) assert isinstance(cls, type) return cls
Return a class implementing `publish` method.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L317-L323
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.repo_model
def repo_model(self): """Return repository model from database.""" return Repository.query.filter_by( user_id=self.event.user_id, github_id=self.repository['id'], ).one()
python
def repo_model(self): """Return repository model from database.""" return Repository.query.filter_by( user_id=self.event.user_id, github_id=self.repository['id'], ).one()
Return repository model from database.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L346-L351
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.title
def title(self): """Extract title from a release.""" if self.event: if self.release['name']: return u'{0}: {1}'.format( self.repository['full_name'], self.release['name'] ) return u'{0} {1}'.format(self.repo_model.name, self.model.tag)
python
def title(self): """Extract title from a release.""" if self.event: if self.release['name']: return u'{0}: {1}'.format( self.repository['full_name'], self.release['name'] ) return u'{0} {1}'.format(self.repo_model.name, self.model.tag)
Extract title from a release.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L354-L361
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.description
def description(self): """Extract description from a release.""" if self.release.get('body'): return markdown(self.release['body']) elif self.repository.get('description'): return self.repository['description'] return 'No description provided.'
python
def description(self): """Extract description from a release.""" if self.release.get('body'): return markdown(self.release['body']) elif self.repository.get('description'): return self.repository['description'] return 'No description provided.'
Extract description from a release.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L364-L370
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.related_identifiers
def related_identifiers(self): """Yield related identifiers.""" yield dict( identifier=u'https://github.com/{0}/tree/{1}'.format( self.repository['full_name'], self.release['tag_name'] ), relation='isSupplementTo', )
python
def related_identifiers(self): """Yield related identifiers.""" yield dict( identifier=u'https://github.com/{0}/tree/{1}'.format( self.repository['full_name'], self.release['tag_name'] ), relation='isSupplementTo', )
Yield related identifiers.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L378-L385
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.defaults
def defaults(self): """Return default metadata.""" return dict( access_right='open', description=self.description, license='other-open', publication_date=self.release['published_at'][:10], related_identifiers=list(self.related_identifiers), version=self.version, title=self.title, upload_type='software', )
python
def defaults(self): """Return default metadata.""" return dict( access_right='open', description=self.description, license='other-open', publication_date=self.release['published_at'][:10], related_identifiers=list(self.related_identifiers), version=self.version, title=self.title, upload_type='software', )
Return default metadata.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L393-L404
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.extra_metadata
def extra_metadata(self): """Get extra metadata for file in repository.""" return get_extra_metadata( self.gh.api, self.repository['owner']['login'], self.repository['name'], self.release['tag_name'], )
python
def extra_metadata(self): """Get extra metadata for file in repository.""" return get_extra_metadata( self.gh.api, self.repository['owner']['login'], self.repository['name'], self.release['tag_name'], )
Get extra metadata for file in repository.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L407-L414
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.files
def files(self): """Extract files to download from GitHub payload.""" tag_name = self.release['tag_name'] repo_name = self.repository['full_name'] zipball_url = self.release['zipball_url'] filename = u'{name}-{tag}.zip'.format(name=repo_name, tag=tag_name) response = self.gh.api.session.head(zipball_url) assert response.status_code == 302, \ u'Could not retrieve archive from GitHub: {0}'.format(zipball_url) yield filename, zipball_url
python
def files(self): """Extract files to download from GitHub payload.""" tag_name = self.release['tag_name'] repo_name = self.repository['full_name'] zipball_url = self.release['zipball_url'] filename = u'{name}-{tag}.zip'.format(name=repo_name, tag=tag_name) response = self.gh.api.session.head(zipball_url) assert response.status_code == 302, \ u'Could not retrieve archive from GitHub: {0}'.format(zipball_url) yield filename, zipball_url
Extract files to download from GitHub payload.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L417-L429
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.metadata
def metadata(self): """Return extracted metadata.""" output = dict(self.defaults) output.update(self.extra_metadata) return output
python
def metadata(self): """Return extracted metadata.""" output = dict(self.defaults) output.update(self.extra_metadata) return output
Return extracted metadata.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L432-L436
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.pid
def pid(self): """Get PID object for the Release record.""" if self.model.status == ReleaseStatus.PUBLISHED and self.record: fetcher = current_pidstore.fetchers[ current_app.config.get('GITHUB_PID_FETCHER')] return fetcher(self.record.id, self.record)
python
def pid(self): """Get PID object for the Release record.""" if self.model.status == ReleaseStatus.PUBLISHED and self.record: fetcher = current_pidstore.fetchers[ current_app.config.get('GITHUB_PID_FETCHER')] return fetcher(self.record.id, self.record)
Get PID object for the Release record.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L449-L454
inveniosoftware/invenio-github
invenio_github/api.py
GitHubRelease.publish
def publish(self): """Publish GitHub release as record.""" with db.session.begin_nested(): deposit = self.deposit_class.create(self.metadata) deposit['_deposit']['created_by'] = self.event.user_id deposit['_deposit']['owners'] = [self.event.user_id] # Fetch the deposit files for key, url in self.files: deposit.files[key] = self.gh.api.session.get( url, stream=True).raw deposit.publish() recid, record = deposit.fetch_published() self.model.recordmetadata = record.model
python
def publish(self): """Publish GitHub release as record.""" with db.session.begin_nested(): deposit = self.deposit_class.create(self.metadata) deposit['_deposit']['created_by'] = self.event.user_id deposit['_deposit']['owners'] = [self.event.user_id] # Fetch the deposit files for key, url in self.files: deposit.files[key] = self.gh.api.session.get( url, stream=True).raw deposit.publish() recid, record = deposit.fetch_published() self.model.recordmetadata = record.model
Publish GitHub release as record.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/api.py#L461-L475
inveniosoftware/invenio-github
invenio_github/receivers.py
GitHubReceiver.run
def run(self, event): """Process an event. .. note:: We should only do basic server side operation here, since we send the rest of the processing to a Celery task which will be mainly accessing the GitHub API. """ repo_id = event.payload['repository']['id'] # Ping event - update the ping timestamp of the repository if 'hook_id' in event.payload and 'zen' in event.payload: repository = Repository.query.filter_by( github_id=repo_id ).one() repository.ping = datetime.utcnow() db.session.commit() return # Release event if 'release' in event.payload and \ event.payload.get('action') == 'published': try: release = Release.create(event) db.session.commit() # FIXME: If we want to skip the processing, we should do it # here (eg. We're in the middle of a migration). # if current_app.config['GITHUB_PROCESS_RELEASES']: process_release.delay( release.release_id, verify_sender=self.verify_sender ) except (ReleaseAlreadyReceivedError, RepositoryDisabledError) as e: event.response_code = 409 event.response = dict(message=str(e), status=409) except RepositoryAccessError as e: event.response = 403 event.response = dict(message=str(e), status=403)
python
def run(self, event): """Process an event. .. note:: We should only do basic server side operation here, since we send the rest of the processing to a Celery task which will be mainly accessing the GitHub API. """ repo_id = event.payload['repository']['id'] # Ping event - update the ping timestamp of the repository if 'hook_id' in event.payload and 'zen' in event.payload: repository = Repository.query.filter_by( github_id=repo_id ).one() repository.ping = datetime.utcnow() db.session.commit() return # Release event if 'release' in event.payload and \ event.payload.get('action') == 'published': try: release = Release.create(event) db.session.commit() # FIXME: If we want to skip the processing, we should do it # here (eg. We're in the middle of a migration). # if current_app.config['GITHUB_PROCESS_RELEASES']: process_release.delay( release.release_id, verify_sender=self.verify_sender ) except (ReleaseAlreadyReceivedError, RepositoryDisabledError) as e: event.response_code = 409 event.response = dict(message=str(e), status=409) except RepositoryAccessError as e: event.response = 403 event.response = dict(message=str(e), status=403)
Process an event. .. note:: We should only do basic server side operation here, since we send the rest of the processing to a Celery task which will be mainly accessing the GitHub API.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/receivers.py#L43-L82
inveniosoftware/invenio-github
invenio_github/utils.py
parse_timestamp
def parse_timestamp(x): """Parse ISO8601 formatted timestamp.""" dt = dateutil.parser.parse(x) if dt.tzinfo is None: dt = dt.replace(tzinfo=pytz.utc) return dt
python
def parse_timestamp(x): """Parse ISO8601 formatted timestamp.""" dt = dateutil.parser.parse(x) if dt.tzinfo is None: dt = dt.replace(tzinfo=pytz.utc) return dt
Parse ISO8601 formatted timestamp.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/utils.py#L44-L49
inveniosoftware/invenio-github
invenio_github/utils.py
get_extra_metadata
def get_extra_metadata(gh, owner, repo_name, ref): """Get the metadata file.""" try: content = gh.repository(owner, repo_name).file_contents( path=current_app.config['GITHUB_METADATA_FILE'], ref=ref ) if not content: # File does not exists in the given ref return {} return json.loads(content.decoded.decode('utf-8')) except ValueError: raise CustomGitHubMetadataError( u'Metadata file "{file}" is not valid JSON.' .format(file=current_app.config['GITHUB_METADATA_FILE']) )
python
def get_extra_metadata(gh, owner, repo_name, ref): """Get the metadata file.""" try: content = gh.repository(owner, repo_name).file_contents( path=current_app.config['GITHUB_METADATA_FILE'], ref=ref ) if not content: # File does not exists in the given ref return {} return json.loads(content.decoded.decode('utf-8')) except ValueError: raise CustomGitHubMetadataError( u'Metadata file "{file}" is not valid JSON.' .format(file=current_app.config['GITHUB_METADATA_FILE']) )
Get the metadata file.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/utils.py#L52-L66
inveniosoftware/invenio-github
invenio_github/utils.py
get_owner
def get_owner(gh, owner): """Get owner of repository as a creator.""" try: u = gh.user(owner) name = u.name or u.login company = u.company or '' return [dict(name=name, affiliation=company)] except Exception: return None
python
def get_owner(gh, owner): """Get owner of repository as a creator.""" try: u = gh.user(owner) name = u.name or u.login company = u.company or '' return [dict(name=name, affiliation=company)] except Exception: return None
Get owner of repository as a creator.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/utils.py#L69-L77
inveniosoftware/invenio-github
invenio_github/utils.py
get_contributors
def get_contributors(gh, repo_id): """Get list of contributors to a repository.""" try: # FIXME: Use `github3.Repository.contributors` to get this information contrib_url = gh.repository_with_id(repo_id).contributors_url r = requests.get(contrib_url) if r.status_code == 200: contributors = r.json() def get_author(contributor): r = requests.get(contributor['url']) if r.status_code == 200: data = r.json() return dict( name=(data['name'] if 'name' in data and data['name'] else data['login']), affiliation=data.get('company') or '', ) # Sort according to number of contributions contributors.sort(key=itemgetter('contributions')) contributors = [get_author(x) for x in reversed(contributors) if x['type'] == 'User'] contributors = filter(lambda x: x is not None, contributors) return contributors except Exception: return None
python
def get_contributors(gh, repo_id): """Get list of contributors to a repository.""" try: # FIXME: Use `github3.Repository.contributors` to get this information contrib_url = gh.repository_with_id(repo_id).contributors_url r = requests.get(contrib_url) if r.status_code == 200: contributors = r.json() def get_author(contributor): r = requests.get(contributor['url']) if r.status_code == 200: data = r.json() return dict( name=(data['name'] if 'name' in data and data['name'] else data['login']), affiliation=data.get('company') or '', ) # Sort according to number of contributions contributors.sort(key=itemgetter('contributions')) contributors = [get_author(x) for x in reversed(contributors) if x['type'] == 'User'] contributors = filter(lambda x: x is not None, contributors) return contributors except Exception: return None
Get list of contributors to a repository.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/utils.py#L80-L108
inveniosoftware/invenio-github
invenio_github/handlers.py
account_setup
def account_setup(remote, token=None, response=None, account_setup=None): """Setup user account.""" gh = GitHubAPI(user_id=token.remote_account.user_id) with db.session.begin_nested(): gh.init_account() # Create user <-> external id link. oauth_link_external_id( token.remote_account.user, dict(id=str(gh.account.extra_data['id']), method="github") )
python
def account_setup(remote, token=None, response=None, account_setup=None): """Setup user account.""" gh = GitHubAPI(user_id=token.remote_account.user_id) with db.session.begin_nested(): gh.init_account() # Create user <-> external id link. oauth_link_external_id( token.remote_account.user, dict(id=str(gh.account.extra_data['id']), method="github") )
Setup user account.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/handlers.py#L41-L52
inveniosoftware/invenio-github
invenio_github/handlers.py
account_post_init
def account_post_init(remote, token=None): """Perform post initialization.""" gh = GitHubAPI(user_id=token.remote_account.user_id) repos = [r.id for r in gh.api.repositories() if r.permissions['admin']] sync_hooks.delay(token.remote_account.user_id, repos)
python
def account_post_init(remote, token=None): """Perform post initialization.""" gh = GitHubAPI(user_id=token.remote_account.user_id) repos = [r.id for r in gh.api.repositories() if r.permissions['admin']] sync_hooks.delay(token.remote_account.user_id, repos)
Perform post initialization.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/handlers.py#L55-L59
inveniosoftware/invenio-github
invenio_github/handlers.py
disconnect
def disconnect(remote): """Disconnect callback handler for GitHub.""" # User must be authenticated if not current_user.is_authenticated: return current_app.login_manager.unauthorized() external_method = 'github' external_ids = [i.id for i in current_user.external_identifiers if i.method == external_method] if external_ids: oauth_unlink_external_id(dict(id=external_ids[0], method=external_method)) user_id = int(current_user.get_id()) token = RemoteToken.get(user_id, remote.consumer_key) if token: extra_data = token.remote_account.extra_data # Delete the token that we issued for GitHub to deliver webhooks webhook_token_id = extra_data.get('tokens', {}).get('webhook') ProviderToken.query.filter_by(id=webhook_token_id).delete() # Disable GitHub webhooks from our side db_repos = Repository.query.filter_by(user_id=user_id).all() # Keep repositories with hooks to pass to the celery task later on repos_with_hooks = [(r.github_id, r.hook) for r in db_repos if r.hook] for repo in db_repos: try: Repository.disable(user_id=user_id, github_id=repo.github_id, name=repo.name) except NoResultFound: # If the repository doesn't exist, no action is necessary pass db.session.commit() # Send Celery task for webhooks removal and token revocation disconnect_github.delay(token.access_token, repos_with_hooks) # Delete the RemoteAccount (along with the associated RemoteToken) token.remote_account.delete() return redirect(url_for('invenio_oauthclient_settings.index'))
python
def disconnect(remote): """Disconnect callback handler for GitHub.""" # User must be authenticated if not current_user.is_authenticated: return current_app.login_manager.unauthorized() external_method = 'github' external_ids = [i.id for i in current_user.external_identifiers if i.method == external_method] if external_ids: oauth_unlink_external_id(dict(id=external_ids[0], method=external_method)) user_id = int(current_user.get_id()) token = RemoteToken.get(user_id, remote.consumer_key) if token: extra_data = token.remote_account.extra_data # Delete the token that we issued for GitHub to deliver webhooks webhook_token_id = extra_data.get('tokens', {}).get('webhook') ProviderToken.query.filter_by(id=webhook_token_id).delete() # Disable GitHub webhooks from our side db_repos = Repository.query.filter_by(user_id=user_id).all() # Keep repositories with hooks to pass to the celery task later on repos_with_hooks = [(r.github_id, r.hook) for r in db_repos if r.hook] for repo in db_repos: try: Repository.disable(user_id=user_id, github_id=repo.github_id, name=repo.name) except NoResultFound: # If the repository doesn't exist, no action is necessary pass db.session.commit() # Send Celery task for webhooks removal and token revocation disconnect_github.delay(token.access_token, repos_with_hooks) # Delete the RemoteAccount (along with the associated RemoteToken) token.remote_account.delete() return redirect(url_for('invenio_oauthclient_settings.index'))
Disconnect callback handler for GitHub.
https://github.com/inveniosoftware/invenio-github/blob/ec42fd6a06079310dcbe2c46d9fd79d5197bbe26/invenio_github/handlers.py#L62-L103
django-fluent/django-fluent-comments
fluent_comments/moderation.py
moderate_model
def moderate_model(ParentModel, publication_date_field=None, enable_comments_field=None): """ Register a parent model (e.g. ``Blog`` or ``Article``) that should receive comment moderation. :param ParentModel: The parent model, e.g. a ``Blog`` or ``Article`` model. :param publication_date_field: The field name of a :class:`~django.db.models.DateTimeField` in the parent model which stores the publication date. :type publication_date_field: str :param enable_comments_field: The field name of a :class:`~django.db.models.BooleanField` in the parent model which stores the whether comments are enabled. :type enable_comments_field: str """ attrs = { 'auto_close_field': publication_date_field, 'auto_moderate_field': publication_date_field, 'enable_field': enable_comments_field, } ModerationClass = type(ParentModel.__name__ + 'Moderator', (FluentCommentsModerator,), attrs) moderator.register(ParentModel, ModerationClass)
python
def moderate_model(ParentModel, publication_date_field=None, enable_comments_field=None): """ Register a parent model (e.g. ``Blog`` or ``Article``) that should receive comment moderation. :param ParentModel: The parent model, e.g. a ``Blog`` or ``Article`` model. :param publication_date_field: The field name of a :class:`~django.db.models.DateTimeField` in the parent model which stores the publication date. :type publication_date_field: str :param enable_comments_field: The field name of a :class:`~django.db.models.BooleanField` in the parent model which stores the whether comments are enabled. :type enable_comments_field: str """ attrs = { 'auto_close_field': publication_date_field, 'auto_moderate_field': publication_date_field, 'enable_field': enable_comments_field, } ModerationClass = type(ParentModel.__name__ + 'Moderator', (FluentCommentsModerator,), attrs) moderator.register(ParentModel, ModerationClass)
Register a parent model (e.g. ``Blog`` or ``Article``) that should receive comment moderation. :param ParentModel: The parent model, e.g. a ``Blog`` or ``Article`` model. :param publication_date_field: The field name of a :class:`~django.db.models.DateTimeField` in the parent model which stores the publication date. :type publication_date_field: str :param enable_comments_field: The field name of a :class:`~django.db.models.BooleanField` in the parent model which stores the whether comments are enabled. :type enable_comments_field: str
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/moderation.py#L154-L170
django-fluent/django-fluent-comments
fluent_comments/moderation.py
comments_are_open
def comments_are_open(content_object): """ Return whether comments are still open for a given target object. """ moderator = get_model_moderator(content_object.__class__) if moderator is None: return True # Check the 'enable_field', 'auto_close_field' and 'close_after', # by reusing the basic Django policies. return CommentModerator.allow(moderator, None, content_object, None)
python
def comments_are_open(content_object): """ Return whether comments are still open for a given target object. """ moderator = get_model_moderator(content_object.__class__) if moderator is None: return True # Check the 'enable_field', 'auto_close_field' and 'close_after', # by reusing the basic Django policies. return CommentModerator.allow(moderator, None, content_object, None)
Return whether comments are still open for a given target object.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/moderation.py#L189-L199
django-fluent/django-fluent-comments
fluent_comments/moderation.py
comments_are_moderated
def comments_are_moderated(content_object): """ Return whether comments are moderated for a given target object. """ moderator = get_model_moderator(content_object.__class__) if moderator is None: return False # Check the 'auto_moderate_field', 'moderate_after', # by reusing the basic Django policies. return CommentModerator.moderate(moderator, None, content_object, None)
python
def comments_are_moderated(content_object): """ Return whether comments are moderated for a given target object. """ moderator = get_model_moderator(content_object.__class__) if moderator is None: return False # Check the 'auto_moderate_field', 'moderate_after', # by reusing the basic Django policies. return CommentModerator.moderate(moderator, None, content_object, None)
Return whether comments are moderated for a given target object.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/moderation.py#L202-L212
django-fluent/django-fluent-comments
fluent_comments/moderation.py
FluentCommentsModerator.allow
def allow(self, comment, content_object, request): """ Determine whether a given comment is allowed to be posted on a given object. Returns ``True`` if the comment should be allowed, ``False`` otherwise. """ # Parent class check if not super(FluentCommentsModerator, self).allow(comment, content_object, request): return False # Akismet check if self.akismet_check: akismet_result = akismet_check(comment, content_object, request) if self.akismet_check_action == 'delete' and akismet_result in (SpamStatus.ProbableSpam, SpamStatus.DefiniteSpam): return False # Akismet marked the comment as spam. elif self.akismet_check_action == 'auto' and akismet_result == SpamStatus.DefiniteSpam: return False # Clearly spam return True
python
def allow(self, comment, content_object, request): """ Determine whether a given comment is allowed to be posted on a given object. Returns ``True`` if the comment should be allowed, ``False`` otherwise. """ # Parent class check if not super(FluentCommentsModerator, self).allow(comment, content_object, request): return False # Akismet check if self.akismet_check: akismet_result = akismet_check(comment, content_object, request) if self.akismet_check_action == 'delete' and akismet_result in (SpamStatus.ProbableSpam, SpamStatus.DefiniteSpam): return False # Akismet marked the comment as spam. elif self.akismet_check_action == 'auto' and akismet_result == SpamStatus.DefiniteSpam: return False # Clearly spam return True
Determine whether a given comment is allowed to be posted on a given object. Returns ``True`` if the comment should be allowed, ``False`` otherwise.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/moderation.py#L46-L64
django-fluent/django-fluent-comments
fluent_comments/moderation.py
FluentCommentsModerator.moderate
def moderate(self, comment, content_object, request): """ Determine whether a given comment on a given object should be allowed to show up immediately, or should be marked non-public and await approval. Returns ``True`` if the comment should be moderated (marked non-public), ``False`` otherwise. """ # Soft delete checks are done first, so these comments are not mistakenly "just moderated" # for expiring the `close_after` date, but correctly get marked as spam instead. # This helps staff to quickly see which comments need real moderation. if self.akismet_check: akismet_result = akismet_check(comment, content_object, request) if akismet_result: # Typically action=delete never gets here, unless the service was having problems. if akismet_result in (SpamStatus.ProbableSpam, SpamStatus.DefiniteSpam) and \ self.akismet_check_action in ('auto', 'soft_delete', 'delete'): comment.is_removed = True # Set extra marker # SpamStatus.Unknown or action=moderate will end up in the moderation queue return True # Parent class check if super(FluentCommentsModerator, self).moderate(comment, content_object, request): return True # Bad words check if self.moderate_bad_words: input_words = split_words(comment.comment) if self.moderate_bad_words.intersection(input_words): return True # Akismet check if self.akismet_check and self.akismet_check_action not in ('soft_delete', 'delete'): # Return True if akismet marks this comment as spam and we want to moderate it. if akismet_check(comment, content_object, request): return True return False
python
def moderate(self, comment, content_object, request): """ Determine whether a given comment on a given object should be allowed to show up immediately, or should be marked non-public and await approval. Returns ``True`` if the comment should be moderated (marked non-public), ``False`` otherwise. """ # Soft delete checks are done first, so these comments are not mistakenly "just moderated" # for expiring the `close_after` date, but correctly get marked as spam instead. # This helps staff to quickly see which comments need real moderation. if self.akismet_check: akismet_result = akismet_check(comment, content_object, request) if akismet_result: # Typically action=delete never gets here, unless the service was having problems. if akismet_result in (SpamStatus.ProbableSpam, SpamStatus.DefiniteSpam) and \ self.akismet_check_action in ('auto', 'soft_delete', 'delete'): comment.is_removed = True # Set extra marker # SpamStatus.Unknown or action=moderate will end up in the moderation queue return True # Parent class check if super(FluentCommentsModerator, self).moderate(comment, content_object, request): return True # Bad words check if self.moderate_bad_words: input_words = split_words(comment.comment) if self.moderate_bad_words.intersection(input_words): return True # Akismet check if self.akismet_check and self.akismet_check_action not in ('soft_delete', 'delete'): # Return True if akismet marks this comment as spam and we want to moderate it. if akismet_check(comment, content_object, request): return True return False
Determine whether a given comment on a given object should be allowed to show up immediately, or should be marked non-public and await approval. Returns ``True`` if the comment should be moderated (marked non-public), ``False`` otherwise.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/moderation.py#L66-L104
django-fluent/django-fluent-comments
fluent_comments/moderation.py
FluentCommentsModerator.email
def email(self, comment, content_object, request): """ Overwritten for a better email notification. """ if not self.email_notification: return send_comment_posted(comment, request)
python
def email(self, comment, content_object, request): """ Overwritten for a better email notification. """ if not self.email_notification: return send_comment_posted(comment, request)
Overwritten for a better email notification.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/moderation.py#L106-L113
django-fluent/django-fluent-comments
fluent_comments/email.py
send_comment_posted
def send_comment_posted(comment, request): """ Send the email to staff that an comment was posted. While the django_comments module has email support, it doesn't pass the 'request' to the context. This also changes the subject to show the page title. """ recipient_list = [manager_tuple[1] for manager_tuple in settings.MANAGERS] site = get_current_site(request) content_object = comment.content_object content_title = force_text(content_object) if comment.is_removed: subject = u'[{0}] Spam comment on "{1}"'.format(site.name, content_title) elif not comment.is_public: subject = u'[{0}] Moderated comment on "{1}"'.format(site.name, content_title) else: subject = u'[{0}] New comment posted on "{1}"'.format(site.name, content_title) context = { 'site': site, 'comment': comment, 'content_object': content_object } message = render_to_string("comments/comment_notification_email.txt", context, request=request) if appsettings.FLUENT_COMMENTS_MULTIPART_EMAILS: html_message = render_to_string("comments/comment_notification_email.html", context, request=request) else: html_message = None send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, recipient_list, fail_silently=True, html_message=html_message)
python
def send_comment_posted(comment, request): """ Send the email to staff that an comment was posted. While the django_comments module has email support, it doesn't pass the 'request' to the context. This also changes the subject to show the page title. """ recipient_list = [manager_tuple[1] for manager_tuple in settings.MANAGERS] site = get_current_site(request) content_object = comment.content_object content_title = force_text(content_object) if comment.is_removed: subject = u'[{0}] Spam comment on "{1}"'.format(site.name, content_title) elif not comment.is_public: subject = u'[{0}] Moderated comment on "{1}"'.format(site.name, content_title) else: subject = u'[{0}] New comment posted on "{1}"'.format(site.name, content_title) context = { 'site': site, 'comment': comment, 'content_object': content_object } message = render_to_string("comments/comment_notification_email.txt", context, request=request) if appsettings.FLUENT_COMMENTS_MULTIPART_EMAILS: html_message = render_to_string("comments/comment_notification_email.html", context, request=request) else: html_message = None send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, recipient_list, fail_silently=True, html_message=html_message)
Send the email to staff that an comment was posted. While the django_comments module has email support, it doesn't pass the 'request' to the context. This also changes the subject to show the page title.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/email.py#L9-L42
django-fluent/django-fluent-comments
fluent_comments/templatetags/fluent_comments_tags.py
AjaxCommentTags.parse
def parse(cls, parser, token): """ Custom parsing for the ``{% ajax_comment_tags for ... %}`` tag. """ # Process the template line. tag_name, args, kwargs = parse_token_kwargs( parser, token, allowed_kwargs=cls.allowed_kwargs, compile_args=False, # Only overrule here, keep at render() phase. compile_kwargs=cls.compile_kwargs ) # remove "for" keyword, so all other args can be resolved in render(). if args[0] == 'for': args.pop(0) # And apply the compilation afterwards for i in range(len(args)): args[i] = parser.compile_filter(args[i]) cls.validate_args(tag_name, *args, **kwargs) return cls(tag_name, *args, **kwargs)
python
def parse(cls, parser, token): """ Custom parsing for the ``{% ajax_comment_tags for ... %}`` tag. """ # Process the template line. tag_name, args, kwargs = parse_token_kwargs( parser, token, allowed_kwargs=cls.allowed_kwargs, compile_args=False, # Only overrule here, keep at render() phase. compile_kwargs=cls.compile_kwargs ) # remove "for" keyword, so all other args can be resolved in render(). if args[0] == 'for': args.pop(0) # And apply the compilation afterwards for i in range(len(args)): args[i] = parser.compile_filter(args[i]) cls.validate_args(tag_name, *args, **kwargs) return cls(tag_name, *args, **kwargs)
Custom parsing for the ``{% ajax_comment_tags for ... %}`` tag.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/templatetags/fluent_comments_tags.py#L26-L47
django-fluent/django-fluent-comments
fluent_comments/templatetags/fluent_comments_tags.py
AjaxCommentTags.get_context_data
def get_context_data(self, parent_context, *tag_args, **tag_kwargs): """ The main logic for the inclusion node, analogous to ``@register.inclusion_node``. """ target_object = tag_args[0] # moved one spot due to .pop(0) new_context = { 'STATIC_URL': parent_context.get('STATIC_URL', None), 'USE_THREADEDCOMMENTS': appsettings.USE_THREADEDCOMMENTS, 'target_object': target_object, } # Be configuration independent: if new_context['STATIC_URL'] is None: try: request = parent_context['request'] except KeyError: new_context.update({'STATIC_URL': settings.STATIC_URL}) else: new_context.update(context_processors.static(request)) return new_context
python
def get_context_data(self, parent_context, *tag_args, **tag_kwargs): """ The main logic for the inclusion node, analogous to ``@register.inclusion_node``. """ target_object = tag_args[0] # moved one spot due to .pop(0) new_context = { 'STATIC_URL': parent_context.get('STATIC_URL', None), 'USE_THREADEDCOMMENTS': appsettings.USE_THREADEDCOMMENTS, 'target_object': target_object, } # Be configuration independent: if new_context['STATIC_URL'] is None: try: request = parent_context['request'] except KeyError: new_context.update({'STATIC_URL': settings.STATIC_URL}) else: new_context.update(context_processors.static(request)) return new_context
The main logic for the inclusion node, analogous to ``@register.inclusion_node``.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/templatetags/fluent_comments_tags.py#L49-L69
django-fluent/django-fluent-comments
fluent_comments/utils.py
get_comment_template_name
def get_comment_template_name(comment): """ Internal function for the rendering of comments. """ ctype = ContentType.objects.get_for_id(comment.content_type_id) return [ "comments/%s/%s/comment.html" % (ctype.app_label, ctype.model), "comments/%s/comment.html" % ctype.app_label, "comments/comment.html" ]
python
def get_comment_template_name(comment): """ Internal function for the rendering of comments. """ ctype = ContentType.objects.get_for_id(comment.content_type_id) return [ "comments/%s/%s/comment.html" % (ctype.app_label, ctype.model), "comments/%s/comment.html" % ctype.app_label, "comments/comment.html" ]
Internal function for the rendering of comments.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/utils.py#L14-L23
django-fluent/django-fluent-comments
fluent_comments/forms/helper.py
CompactLabelsCommentFormHelper.render_layout
def render_layout(self, form, context, template_pack=TEMPLATE_PACK): """ Copy any field label to the ``placeholder`` attribute. Note, this method is called when :attr:`layout` is defined. """ # Writing the label values into the field placeholders. # This is done at rendering time, so the Form.__init__() could update any labels before. # Django 1.11 no longer lets EmailInput or URLInput inherit from TextInput, # so checking for `Input` instead while excluding `HiddenInput`. for field in form.fields.values(): if field.label and \ isinstance(field.widget, (Input, forms.Textarea)) and \ not isinstance(field.widget, forms.HiddenInput): field.widget.attrs['placeholder'] = u"{0}:".format(field.label) return super(CompactLabelsCommentFormHelper, self).render_layout(form, context, template_pack=template_pack)
python
def render_layout(self, form, context, template_pack=TEMPLATE_PACK): """ Copy any field label to the ``placeholder`` attribute. Note, this method is called when :attr:`layout` is defined. """ # Writing the label values into the field placeholders. # This is done at rendering time, so the Form.__init__() could update any labels before. # Django 1.11 no longer lets EmailInput or URLInput inherit from TextInput, # so checking for `Input` instead while excluding `HiddenInput`. for field in form.fields.values(): if field.label and \ isinstance(field.widget, (Input, forms.Textarea)) and \ not isinstance(field.widget, forms.HiddenInput): field.widget.attrs['placeholder'] = u"{0}:".format(field.label) return super(CompactLabelsCommentFormHelper, self).render_layout(form, context, template_pack=template_pack)
Copy any field label to the ``placeholder`` attribute. Note, this method is called when :attr:`layout` is defined.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/forms/helper.py#L67-L82
django-fluent/django-fluent-comments
fluent_comments/models.py
get_comments_for_model
def get_comments_for_model(content_object, include_moderated=False): """ Return the QuerySet with all comments for a given model. """ qs = get_comments_model().objects.for_model(content_object) if not include_moderated: qs = qs.filter(is_public=True, is_removed=False) return qs
python
def get_comments_for_model(content_object, include_moderated=False): """ Return the QuerySet with all comments for a given model. """ qs = get_comments_model().objects.for_model(content_object) if not include_moderated: qs = qs.filter(is_public=True, is_removed=False) return qs
Return the QuerySet with all comments for a given model.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/models.py#L36-L45
django-fluent/django-fluent-comments
fluent_comments/forms/_captcha.py
CaptchaFormMixin._reorder_fields
def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha')
python
def _reorder_fields(self, ordering): """ Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration. """ if 'captcha' not in ordering: raise ImproperlyConfigured( "When using 'FLUENT_COMMENTS_FIELD_ORDER', " "make sure the 'captcha' field included too to use '{}' form. ".format( self.__class__.__name__ ) ) super(CaptchaFormMixin, self)._reorder_fields(ordering) # Avoid making captcha required for previews. if self.is_preview: self.fields.pop('captcha')
Test that the 'captcha' field is really present. This could be broken by a bad FLUENT_COMMENTS_FIELD_ORDER configuration.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/forms/_captcha.py#L5-L21
django-fluent/django-fluent-comments
fluent_comments/views.py
post_comment_ajax
def post_comment_ajax(request, using=None): """ Post a comment, via an Ajax call. """ if not request.is_ajax(): return HttpResponseBadRequest("Expecting Ajax call") # This is copied from django_comments. # Basically that view does too much, and doesn't offer a hook to change the rendering. # The request object is not passed to next_redirect for example. # # This is a separate view to integrate both features. Previously this used django-ajaxcomments # which is unfortunately not thread-safe (it it changes the comment view per request). # Fill out some initial data fields from an authenticated user, if present data = request.POST.copy() if request.user.is_authenticated: if not data.get('name', ''): data["name"] = request.user.get_full_name() or request.user.username if not data.get('email', ''): data["email"] = request.user.email # Look up the object we're trying to comment about ctype = data.get("content_type") object_pk = data.get("object_pk") if ctype is None or object_pk is None: return CommentPostBadRequest("Missing content_type or object_pk field.") try: model = apps.get_model(*ctype.split(".", 1)) target = model._default_manager.using(using).get(pk=object_pk) except ValueError: return CommentPostBadRequest("Invalid object_pk value: {0}".format(escape(object_pk))) except (TypeError, LookupError): return CommentPostBadRequest("Invalid content_type value: {0}".format(escape(ctype))) except AttributeError: return CommentPostBadRequest("The given content-type {0} does not resolve to a valid model.".format(escape(ctype))) except ObjectDoesNotExist: return CommentPostBadRequest("No object matching content-type {0} and object PK {1} exists.".format(escape(ctype), escape(object_pk))) except (ValueError, ValidationError) as e: return CommentPostBadRequest("Attempting go get content-type {0!r} and object PK {1!r} exists raised {2}".format(escape(ctype), escape(object_pk), e.__class__.__name__)) # Do we want to preview the comment? is_preview = "preview" in data # Construct the comment form form = django_comments.get_form()(target, data=data, is_preview=is_preview) # Check security information if form.security_errors(): return CommentPostBadRequest("The comment form failed security verification: {0}".format(form.security_errors())) # If there are errors or if we requested a preview show the comment if is_preview: comment = form.get_comment_object() if not form.errors else None return _ajax_result(request, form, "preview", comment, object_id=object_pk) if form.errors: return _ajax_result(request, form, "post", object_id=object_pk) # Otherwise create the comment comment = form.get_comment_object() comment.ip_address = request.META.get("REMOTE_ADDR", None) if request.user.is_authenticated: comment.user = request.user # Signal that the comment is about to be saved responses = signals.comment_will_be_posted.send( sender=comment.__class__, comment=comment, request=request ) for (receiver, response) in responses: if response is False: return CommentPostBadRequest("comment_will_be_posted receiver {0} killed the comment".format(receiver.__name__)) # Save the comment and signal that it was saved comment.save() signals.comment_was_posted.send( sender = comment.__class__, comment = comment, request = request ) return _ajax_result(request, form, "post", comment, object_id=object_pk)
python
def post_comment_ajax(request, using=None): """ Post a comment, via an Ajax call. """ if not request.is_ajax(): return HttpResponseBadRequest("Expecting Ajax call") # This is copied from django_comments. # Basically that view does too much, and doesn't offer a hook to change the rendering. # The request object is not passed to next_redirect for example. # # This is a separate view to integrate both features. Previously this used django-ajaxcomments # which is unfortunately not thread-safe (it it changes the comment view per request). # Fill out some initial data fields from an authenticated user, if present data = request.POST.copy() if request.user.is_authenticated: if not data.get('name', ''): data["name"] = request.user.get_full_name() or request.user.username if not data.get('email', ''): data["email"] = request.user.email # Look up the object we're trying to comment about ctype = data.get("content_type") object_pk = data.get("object_pk") if ctype is None or object_pk is None: return CommentPostBadRequest("Missing content_type or object_pk field.") try: model = apps.get_model(*ctype.split(".", 1)) target = model._default_manager.using(using).get(pk=object_pk) except ValueError: return CommentPostBadRequest("Invalid object_pk value: {0}".format(escape(object_pk))) except (TypeError, LookupError): return CommentPostBadRequest("Invalid content_type value: {0}".format(escape(ctype))) except AttributeError: return CommentPostBadRequest("The given content-type {0} does not resolve to a valid model.".format(escape(ctype))) except ObjectDoesNotExist: return CommentPostBadRequest("No object matching content-type {0} and object PK {1} exists.".format(escape(ctype), escape(object_pk))) except (ValueError, ValidationError) as e: return CommentPostBadRequest("Attempting go get content-type {0!r} and object PK {1!r} exists raised {2}".format(escape(ctype), escape(object_pk), e.__class__.__name__)) # Do we want to preview the comment? is_preview = "preview" in data # Construct the comment form form = django_comments.get_form()(target, data=data, is_preview=is_preview) # Check security information if form.security_errors(): return CommentPostBadRequest("The comment form failed security verification: {0}".format(form.security_errors())) # If there are errors or if we requested a preview show the comment if is_preview: comment = form.get_comment_object() if not form.errors else None return _ajax_result(request, form, "preview", comment, object_id=object_pk) if form.errors: return _ajax_result(request, form, "post", object_id=object_pk) # Otherwise create the comment comment = form.get_comment_object() comment.ip_address = request.META.get("REMOTE_ADDR", None) if request.user.is_authenticated: comment.user = request.user # Signal that the comment is about to be saved responses = signals.comment_will_be_posted.send( sender=comment.__class__, comment=comment, request=request ) for (receiver, response) in responses: if response is False: return CommentPostBadRequest("comment_will_be_posted receiver {0} killed the comment".format(receiver.__name__)) # Save the comment and signal that it was saved comment.save() signals.comment_was_posted.send( sender = comment.__class__, comment = comment, request = request ) return _ajax_result(request, form, "post", comment, object_id=object_pk)
Post a comment, via an Ajax call.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/views.py#L24-L107
django-fluent/django-fluent-comments
fluent_comments/views.py
_render_errors
def _render_errors(field): """ Render form errors in crispy-forms style. """ template = '{0}/layout/field_errors.html'.format(appsettings.CRISPY_TEMPLATE_PACK) return render_to_string(template, { 'field': field, 'form_show_errors': True, })
python
def _render_errors(field): """ Render form errors in crispy-forms style. """ template = '{0}/layout/field_errors.html'.format(appsettings.CRISPY_TEMPLATE_PACK) return render_to_string(template, { 'field': field, 'form_show_errors': True, })
Render form errors in crispy-forms style.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/views.py#L154-L162
django-fluent/django-fluent-comments
fluent_comments/__init__.py
get_form
def get_form(): """ Return the form to use for commenting. """ global form_class from fluent_comments import appsettings if form_class is None: if appsettings.FLUENT_COMMENTS_FORM_CLASS: from django.utils.module_loading import import_string form_class = import_string(appsettings.FLUENT_COMMENTS_FORM_CLASS) else: from fluent_comments.forms import FluentCommentForm form_class = FluentCommentForm return form_class
python
def get_form(): """ Return the form to use for commenting. """ global form_class from fluent_comments import appsettings if form_class is None: if appsettings.FLUENT_COMMENTS_FORM_CLASS: from django.utils.module_loading import import_string form_class = import_string(appsettings.FLUENT_COMMENTS_FORM_CLASS) else: from fluent_comments.forms import FluentCommentForm form_class = FluentCommentForm return form_class
Return the form to use for commenting.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/__init__.py#L26-L40
django-fluent/django-fluent-comments
fluent_comments/receivers.py
load_default_moderator
def load_default_moderator(): """ Find a moderator object """ if appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR == 'default': # Perform spam checks return moderation.FluentCommentsModerator(None) elif appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR == 'deny': # Deny all comments not from known registered models. return moderation.AlwaysDeny(None) elif str(appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR).lower() == 'none': # Disables default moderator return moderation.NullModerator(None) elif '.' in appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR: return import_string(appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR)(None) else: raise ImproperlyConfigured( "Bad FLUENT_COMMENTS_DEFAULT_MODERATOR value. Provide default/deny/none or a dotted path" )
python
def load_default_moderator(): """ Find a moderator object """ if appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR == 'default': # Perform spam checks return moderation.FluentCommentsModerator(None) elif appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR == 'deny': # Deny all comments not from known registered models. return moderation.AlwaysDeny(None) elif str(appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR).lower() == 'none': # Disables default moderator return moderation.NullModerator(None) elif '.' in appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR: return import_string(appsettings.FLUENT_COMMENTS_DEFAULT_MODERATOR)(None) else: raise ImproperlyConfigured( "Bad FLUENT_COMMENTS_DEFAULT_MODERATOR value. Provide default/deny/none or a dotted path" )
Find a moderator object
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/receivers.py#L24-L42
django-fluent/django-fluent-comments
fluent_comments/receivers.py
on_comment_will_be_posted
def on_comment_will_be_posted(sender, comment, request, **kwargs): """ Make sure both the Ajax and regular comments are checked for moderation. This signal is also used to link moderators to the comment posting. """ content_object = comment.content_object moderator = moderation.get_model_moderator(content_object.__class__) if moderator and comment.__class__ is not CommentModel: # Help with some hard to diagnose problems. The default Django moderator connects # to the configured comment model. When this model differs from the signal sender, # the the form stores a different model then COMMENTS_APP provides. moderator = None logger.warning( "Comment of type '%s' was not moderated by '%s', " "because the parent '%s' has a moderator installed for '%s' instead", comment.__class__.__name__, moderator.__class__.__name__, content_object.__class__.__name__, CommentModel.__name__ ) if moderator is None: logger.info( "Using default moderator for comment '%s' on parent '%s'", comment.__class__.__name__, content_object.__class__.__name__ ) _run_default_moderator(comment, content_object, request)
python
def on_comment_will_be_posted(sender, comment, request, **kwargs): """ Make sure both the Ajax and regular comments are checked for moderation. This signal is also used to link moderators to the comment posting. """ content_object = comment.content_object moderator = moderation.get_model_moderator(content_object.__class__) if moderator and comment.__class__ is not CommentModel: # Help with some hard to diagnose problems. The default Django moderator connects # to the configured comment model. When this model differs from the signal sender, # the the form stores a different model then COMMENTS_APP provides. moderator = None logger.warning( "Comment of type '%s' was not moderated by '%s', " "because the parent '%s' has a moderator installed for '%s' instead", comment.__class__.__name__, moderator.__class__.__name__, content_object.__class__.__name__, CommentModel.__name__ ) if moderator is None: logger.info( "Using default moderator for comment '%s' on parent '%s'", comment.__class__.__name__, content_object.__class__.__name__ ) _run_default_moderator(comment, content_object, request)
Make sure both the Ajax and regular comments are checked for moderation. This signal is also used to link moderators to the comment posting.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/receivers.py#L50-L74
django-fluent/django-fluent-comments
fluent_comments/receivers.py
_run_default_moderator
def _run_default_moderator(comment, content_object, request): """ Run the default moderator """ # The default moderator will likely not check things like "auto close". # It can still provide akismet and bad word checking. if not default_moderator.allow(comment, content_object, request): # Comment will be disallowed outright (HTTP 403 response) return False if default_moderator.moderate(comment, content_object, request): comment.is_public = False
python
def _run_default_moderator(comment, content_object, request): """ Run the default moderator """ # The default moderator will likely not check things like "auto close". # It can still provide akismet and bad word checking. if not default_moderator.allow(comment, content_object, request): # Comment will be disallowed outright (HTTP 403 response) return False if default_moderator.moderate(comment, content_object, request): comment.is_public = False
Run the default moderator
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/receivers.py#L77-L88
django-fluent/django-fluent-comments
fluent_comments/receivers.py
on_comment_posted
def on_comment_posted(sender, comment, request, **kwargs): """ Send email notification of a new comment to site staff when email notifications have been requested. """ content_object = comment.content_object moderator = moderation.get_model_moderator(content_object.__class__) if moderator is None or comment.__class__ is not CommentModel: # No custom moderator means no email would be sent. # This still pass the comment to the default moderator. default_moderator.email(comment, content_object, request)
python
def on_comment_posted(sender, comment, request, **kwargs): """ Send email notification of a new comment to site staff when email notifications have been requested. """ content_object = comment.content_object moderator = moderation.get_model_moderator(content_object.__class__) if moderator is None or comment.__class__ is not CommentModel: # No custom moderator means no email would be sent. # This still pass the comment to the default moderator. default_moderator.email(comment, content_object, request)
Send email notification of a new comment to site staff when email notifications have been requested.
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/receivers.py#L92-L102
django-fluent/django-fluent-comments
fluent_comments/akismet.py
akismet_check
def akismet_check(comment, content_object, request): """ Connects to Akismet and evaluates to True if Akismet marks this comment as spam. :rtype: akismet.SpamStatus """ # Return previously cached response akismet_result = getattr(comment, '_akismet_result_', None) if akismet_result is not None: return akismet_result # Get Akismet data AKISMET_API_KEY = appsettings.AKISMET_API_KEY if not AKISMET_API_KEY: raise ImproperlyConfigured('You must set AKISMET_API_KEY to use comment moderation with Akismet.') current_domain = get_current_site(request).domain auto_blog_url = '{0}://{1}/'.format(request.is_secure() and 'https' or 'http', current_domain) blog_url = appsettings.AKISMET_BLOG_URL or auto_blog_url akismet = Akismet( AKISMET_API_KEY, blog=blog_url, is_test=int(bool(appsettings.AKISMET_IS_TEST)), application_user_agent='django-fluent-comments/{0}'.format(fluent_comments.__version__), ) akismet_data = _get_akismet_data(blog_url, comment, content_object, request) akismet_result = akismet.check(**akismet_data) # raises AkismetServerError when key is invalid setattr(comment, "_akismet_result_", akismet_result) return akismet_result
python
def akismet_check(comment, content_object, request): """ Connects to Akismet and evaluates to True if Akismet marks this comment as spam. :rtype: akismet.SpamStatus """ # Return previously cached response akismet_result = getattr(comment, '_akismet_result_', None) if akismet_result is not None: return akismet_result # Get Akismet data AKISMET_API_KEY = appsettings.AKISMET_API_KEY if not AKISMET_API_KEY: raise ImproperlyConfigured('You must set AKISMET_API_KEY to use comment moderation with Akismet.') current_domain = get_current_site(request).domain auto_blog_url = '{0}://{1}/'.format(request.is_secure() and 'https' or 'http', current_domain) blog_url = appsettings.AKISMET_BLOG_URL or auto_blog_url akismet = Akismet( AKISMET_API_KEY, blog=blog_url, is_test=int(bool(appsettings.AKISMET_IS_TEST)), application_user_agent='django-fluent-comments/{0}'.format(fluent_comments.__version__), ) akismet_data = _get_akismet_data(blog_url, comment, content_object, request) akismet_result = akismet.check(**akismet_data) # raises AkismetServerError when key is invalid setattr(comment, "_akismet_result_", akismet_result) return akismet_result
Connects to Akismet and evaluates to True if Akismet marks this comment as spam. :rtype: akismet.SpamStatus
https://github.com/django-fluent/django-fluent-comments/blob/bfe98d55b56fedd8ca2e2659eed53a6390e53adf/fluent_comments/akismet.py#L17-L47
DinoTools/python-overpy
overpy/__init__.py
Overpass._handle_remark_msg
def _handle_remark_msg(self, msg): """ Try to parse the message provided with the remark tag or element. :param str msg: The message :raises overpy.exception.OverpassRuntimeError: If message starts with 'runtime error:' :raises overpy.exception.OverpassRuntimeRemark: If message starts with 'runtime remark:' :raises overpy.exception.OverpassUnknownError: If we are unable to identify the error """ msg = msg.strip() if msg.startswith("runtime error:"): raise exception.OverpassRuntimeError(msg=msg) elif msg.startswith("runtime remark:"): raise exception.OverpassRuntimeRemark(msg=msg) raise exception.OverpassUnknownError(msg=msg)
python
def _handle_remark_msg(self, msg): """ Try to parse the message provided with the remark tag or element. :param str msg: The message :raises overpy.exception.OverpassRuntimeError: If message starts with 'runtime error:' :raises overpy.exception.OverpassRuntimeRemark: If message starts with 'runtime remark:' :raises overpy.exception.OverpassUnknownError: If we are unable to identify the error """ msg = msg.strip() if msg.startswith("runtime error:"): raise exception.OverpassRuntimeError(msg=msg) elif msg.startswith("runtime remark:"): raise exception.OverpassRuntimeRemark(msg=msg) raise exception.OverpassUnknownError(msg=msg)
Try to parse the message provided with the remark tag or element. :param str msg: The message :raises overpy.exception.OverpassRuntimeError: If message starts with 'runtime error:' :raises overpy.exception.OverpassRuntimeRemark: If message starts with 'runtime remark:' :raises overpy.exception.OverpassUnknownError: If we are unable to identify the error
https://github.com/DinoTools/python-overpy/blob/db8f80eeb1b4d1405816bd62c16ddb3364e0c46d/overpy/__init__.py#L97-L111
DinoTools/python-overpy
overpy/__init__.py
Overpass.query
def query(self, query): """ Query the Overpass API :param String|Bytes query: The query string in Overpass QL :return: The parsed result :rtype: overpy.Result """ if not isinstance(query, bytes): query = query.encode("utf-8") retry_num = 0 retry_exceptions = [] do_retry = True if self.max_retry_count > 0 else False while retry_num <= self.max_retry_count: if retry_num > 0: time.sleep(self.retry_timeout) retry_num += 1 try: f = urlopen(self.url, query) except HTTPError as e: f = e response = f.read(self.read_chunk_size) while True: data = f.read(self.read_chunk_size) if len(data) == 0: break response = response + data f.close() if f.code == 200: if PY2: http_info = f.info() content_type = http_info.getheader("content-type") else: content_type = f.getheader("Content-Type") if content_type == "application/json": return self.parse_json(response) if content_type == "application/osm3s+xml": return self.parse_xml(response) e = exception.OverpassUnknownContentType(content_type) if not do_retry: raise e retry_exceptions.append(e) continue if f.code == 400: msgs = [] for msg in self._regex_extract_error_msg.finditer(response): tmp = self._regex_remove_tag.sub(b"", msg.group("msg")) try: tmp = tmp.decode("utf-8") except UnicodeDecodeError: tmp = repr(tmp) msgs.append(tmp) e = exception.OverpassBadRequest( query, msgs=msgs ) if not do_retry: raise e retry_exceptions.append(e) continue if f.code == 429: e = exception.OverpassTooManyRequests if not do_retry: raise e retry_exceptions.append(e) continue if f.code == 504: e = exception.OverpassGatewayTimeout if not do_retry: raise e retry_exceptions.append(e) continue e = exception.OverpassUnknownHTTPStatusCode(f.code) if not do_retry: raise e retry_exceptions.append(e) continue raise exception.MaxRetriesReached(retry_count=retry_num, exceptions=retry_exceptions)
python
def query(self, query): """ Query the Overpass API :param String|Bytes query: The query string in Overpass QL :return: The parsed result :rtype: overpy.Result """ if not isinstance(query, bytes): query = query.encode("utf-8") retry_num = 0 retry_exceptions = [] do_retry = True if self.max_retry_count > 0 else False while retry_num <= self.max_retry_count: if retry_num > 0: time.sleep(self.retry_timeout) retry_num += 1 try: f = urlopen(self.url, query) except HTTPError as e: f = e response = f.read(self.read_chunk_size) while True: data = f.read(self.read_chunk_size) if len(data) == 0: break response = response + data f.close() if f.code == 200: if PY2: http_info = f.info() content_type = http_info.getheader("content-type") else: content_type = f.getheader("Content-Type") if content_type == "application/json": return self.parse_json(response) if content_type == "application/osm3s+xml": return self.parse_xml(response) e = exception.OverpassUnknownContentType(content_type) if not do_retry: raise e retry_exceptions.append(e) continue if f.code == 400: msgs = [] for msg in self._regex_extract_error_msg.finditer(response): tmp = self._regex_remove_tag.sub(b"", msg.group("msg")) try: tmp = tmp.decode("utf-8") except UnicodeDecodeError: tmp = repr(tmp) msgs.append(tmp) e = exception.OverpassBadRequest( query, msgs=msgs ) if not do_retry: raise e retry_exceptions.append(e) continue if f.code == 429: e = exception.OverpassTooManyRequests if not do_retry: raise e retry_exceptions.append(e) continue if f.code == 504: e = exception.OverpassGatewayTimeout if not do_retry: raise e retry_exceptions.append(e) continue e = exception.OverpassUnknownHTTPStatusCode(f.code) if not do_retry: raise e retry_exceptions.append(e) continue raise exception.MaxRetriesReached(retry_count=retry_num, exceptions=retry_exceptions)
Query the Overpass API :param String|Bytes query: The query string in Overpass QL :return: The parsed result :rtype: overpy.Result
https://github.com/DinoTools/python-overpy/blob/db8f80eeb1b4d1405816bd62c16ddb3364e0c46d/overpy/__init__.py#L113-L202
DinoTools/python-overpy
overpy/__init__.py
Overpass.parse_json
def parse_json(self, data, encoding="utf-8"): """ Parse raw response from Overpass service. :param data: Raw JSON Data :type data: String or Bytes :param encoding: Encoding to decode byte string :type encoding: String :return: Result object :rtype: overpy.Result """ if isinstance(data, bytes): data = data.decode(encoding) data = json.loads(data, parse_float=Decimal) if "remark" in data: self._handle_remark_msg(msg=data.get("remark")) return Result.from_json(data, api=self)
python
def parse_json(self, data, encoding="utf-8"): """ Parse raw response from Overpass service. :param data: Raw JSON Data :type data: String or Bytes :param encoding: Encoding to decode byte string :type encoding: String :return: Result object :rtype: overpy.Result """ if isinstance(data, bytes): data = data.decode(encoding) data = json.loads(data, parse_float=Decimal) if "remark" in data: self._handle_remark_msg(msg=data.get("remark")) return Result.from_json(data, api=self)
Parse raw response from Overpass service. :param data: Raw JSON Data :type data: String or Bytes :param encoding: Encoding to decode byte string :type encoding: String :return: Result object :rtype: overpy.Result
https://github.com/DinoTools/python-overpy/blob/db8f80eeb1b4d1405816bd62c16ddb3364e0c46d/overpy/__init__.py#L204-L220
DinoTools/python-overpy
overpy/__init__.py
Result.expand
def expand(self, other): """ Add all elements from an other result to the list of elements of this result object. It is used by the auto resolve feature. :param other: Expand the result with the elements from this result. :type other: overpy.Result :raises ValueError: If provided parameter is not instance of :class:`overpy.Result` """ if not isinstance(other, Result): raise ValueError("Provided argument has to be instance of overpy:Result()") other_collection_map = {Node: other.nodes, Way: other.ways, Relation: other.relations, Area: other.areas} for element_type, own_collection in self._class_collection_map.items(): for element in other_collection_map[element_type]: if is_valid_type(element, element_type) and element.id not in own_collection: own_collection[element.id] = element
python
def expand(self, other): """ Add all elements from an other result to the list of elements of this result object. It is used by the auto resolve feature. :param other: Expand the result with the elements from this result. :type other: overpy.Result :raises ValueError: If provided parameter is not instance of :class:`overpy.Result` """ if not isinstance(other, Result): raise ValueError("Provided argument has to be instance of overpy:Result()") other_collection_map = {Node: other.nodes, Way: other.ways, Relation: other.relations, Area: other.areas} for element_type, own_collection in self._class_collection_map.items(): for element in other_collection_map[element_type]: if is_valid_type(element, element_type) and element.id not in own_collection: own_collection[element.id] = element
Add all elements from an other result to the list of elements of this result object. It is used by the auto resolve feature. :param other: Expand the result with the elements from this result. :type other: overpy.Result :raises ValueError: If provided parameter is not instance of :class:`overpy.Result`
https://github.com/DinoTools/python-overpy/blob/db8f80eeb1b4d1405816bd62c16ddb3364e0c46d/overpy/__init__.py#L270-L287