hash
stringlengths
64
64
content
stringlengths
0
1.51M
cbcad3e4ed0820b93d274a0e0eefc28fe6cb726260acf2110683bba767c5c570
import getpass from django.contrib.auth import get_user_model from django.contrib.auth.password_validation import validate_password from django.core.exceptions import ValidationError from django.core.management.base import BaseCommand, CommandError from django.db import DEFAULT_DB_ALIAS UserModel = get_user_model() class Command(BaseCommand): help = "Change a user's password for django.contrib.auth." requires_migrations_checks = True requires_system_checks = False def _get_pass(self, prompt="Password: "): p = getpass.getpass(prompt=prompt) if not p: raise CommandError("aborted") return p def add_arguments(self, parser): parser.add_argument( 'username', nargs='?', help='Username to change password for; by default, it\'s the current username.', ) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Specifies the database to use. Default is "default".', ) def handle(self, *args, **options): if options['username']: username = options['username'] else: username = getpass.getuser() try: u = UserModel._default_manager.using(options['database']).get(**{ UserModel.USERNAME_FIELD: username }) except UserModel.DoesNotExist: raise CommandError("user '%s' does not exist" % username) self.stdout.write("Changing password for user '%s'\n" % u) MAX_TRIES = 3 count = 0 p1, p2 = 1, 2 # To make them initially mismatch. password_validated = False while (p1 != p2 or not password_validated) and count < MAX_TRIES: p1 = self._get_pass() p2 = self._get_pass("Password (again): ") if p1 != p2: self.stdout.write("Passwords do not match. Please try again.\n") count += 1 # Don't validate passwords that don't match. continue try: validate_password(p2, u) except ValidationError as err: self.stderr.write('\n'.join(err.messages)) count += 1 else: password_validated = True if count == MAX_TRIES: raise CommandError("Aborting password change for user '%s' after %s attempts" % (u, count)) u.set_password(p1) u.save() return "Password changed successfully for user '%s'" % u
f081bc936c315190a439517fec2832da1a2ec1a2672f64a8deaf09a8c6a33446
""" Management utility to create superusers. """ import getpass import sys from django.contrib.auth import get_user_model from django.contrib.auth.management import get_default_username from django.contrib.auth.password_validation import validate_password from django.core import exceptions from django.core.management.base import BaseCommand, CommandError from django.db import DEFAULT_DB_ALIAS from django.utils.text import capfirst class NotRunningInTTYException(Exception): pass PASSWORD_FIELD = 'password' class Command(BaseCommand): help = 'Used to create a superuser.' requires_migrations_checks = True stealth_options = ('stdin',) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.UserModel = get_user_model() self.username_field = self.UserModel._meta.get_field(self.UserModel.USERNAME_FIELD) def add_arguments(self, parser): parser.add_argument( '--%s' % self.UserModel.USERNAME_FIELD, help='Specifies the login for the superuser.', ) parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', help=( 'Tells Django to NOT prompt the user for input of any kind. ' 'You must use --%s with --noinput, along with an option for ' 'any other required field. Superusers created with --noinput will ' 'not be able to log in until they\'re given a valid password.' % self.UserModel.USERNAME_FIELD ), ) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Specifies the database to use. Default is "default".', ) for field in self.UserModel.REQUIRED_FIELDS: parser.add_argument( '--%s' % field, help='Specifies the %s for the superuser.' % field, ) def execute(self, *args, **options): self.stdin = options.get('stdin', sys.stdin) # Used for testing return super().execute(*args, **options) def handle(self, *args, **options): username = options[self.UserModel.USERNAME_FIELD] database = options['database'] user_data = {} verbose_field_name = self.username_field.verbose_name try: self.UserModel._meta.get_field(PASSWORD_FIELD) except exceptions.FieldDoesNotExist: pass else: # If not provided, create the user with an unusable password. user_data[PASSWORD_FIELD] = None try: if options['interactive']: # Same as user_data but with foreign keys as fake model # instances instead of raw IDs. fake_user_data = {} if hasattr(self.stdin, 'isatty') and not self.stdin.isatty(): raise NotRunningInTTYException default_username = get_default_username() if username: error_msg = self._validate_username(username, verbose_field_name, database) if error_msg: self.stderr.write(error_msg) username = None elif username == '': raise CommandError('%s cannot be blank.' % capfirst(verbose_field_name)) # Prompt for username. while username is None: message = self._get_input_message(self.username_field, default_username) username = self.get_input_data(self.username_field, message, default_username) if username: error_msg = self._validate_username(username, verbose_field_name, database) if error_msg: self.stderr.write(error_msg) username = None continue user_data[self.UserModel.USERNAME_FIELD] = username fake_user_data[self.UserModel.USERNAME_FIELD] = ( self.username_field.remote_field.model(username) if self.username_field.remote_field else username ) # Prompt for required fields. for field_name in self.UserModel.REQUIRED_FIELDS: field = self.UserModel._meta.get_field(field_name) user_data[field_name] = options[field_name] while user_data[field_name] is None: message = self._get_input_message(field) input_value = self.get_input_data(field, message) user_data[field_name] = input_value fake_user_data[field_name] = input_value # Wrap any foreign keys in fake model instances if field.remote_field: fake_user_data[field_name] = field.remote_field.model(input_value) # Prompt for a password if the model has one. while PASSWORD_FIELD in user_data and user_data[PASSWORD_FIELD] is None: password = getpass.getpass() password2 = getpass.getpass('Password (again): ') if password != password2: self.stderr.write("Error: Your passwords didn't match.") # Don't validate passwords that don't match. continue if password.strip() == '': self.stderr.write("Error: Blank passwords aren't allowed.") # Don't validate blank passwords. continue try: validate_password(password2, self.UserModel(**fake_user_data)) except exceptions.ValidationError as err: self.stderr.write('\n'.join(err.messages)) response = input('Bypass password validation and create user anyway? [y/N]: ') if response.lower() != 'y': continue user_data[PASSWORD_FIELD] = password else: # Non-interactive mode. if username is None: raise CommandError('You must use --%s with --noinput.' % self.UserModel.USERNAME_FIELD) else: error_msg = self._validate_username(username, verbose_field_name, database) if error_msg: raise CommandError(error_msg) user_data[self.UserModel.USERNAME_FIELD] = username for field_name in self.UserModel.REQUIRED_FIELDS: if options[field_name]: field = self.UserModel._meta.get_field(field_name) user_data[field_name] = field.clean(options[field_name], None) else: raise CommandError('You must use --%s with --noinput.' % field_name) self.UserModel._default_manager.db_manager(database).create_superuser(**user_data) if options['verbosity'] >= 1: self.stdout.write("Superuser created successfully.") except KeyboardInterrupt: self.stderr.write('\nOperation cancelled.') sys.exit(1) except exceptions.ValidationError as e: raise CommandError('; '.join(e.messages)) except NotRunningInTTYException: self.stdout.write( 'Superuser creation skipped due to not running in a TTY. ' 'You can run `manage.py createsuperuser` in your project ' 'to create one manually.' ) def get_input_data(self, field, message, default=None): """ Override this method if you want to customize data inputs or validation exceptions. """ raw_value = input(message) if default and raw_value == '': raw_value = default try: val = field.clean(raw_value, None) except exceptions.ValidationError as e: self.stderr.write("Error: %s" % '; '.join(e.messages)) val = None return val def _get_input_message(self, field, default=None): return '%s%s%s: ' % ( capfirst(field.verbose_name), " (leave blank to use '%s')" % default if default else '', ' (%s.%s)' % ( field.remote_field.model._meta.object_name, field.remote_field.field_name, ) if field.remote_field else '', ) def _validate_username(self, username, verbose_field_name, database): """Validate username. If invalid, return a string error message.""" if self.username_field.unique: try: self.UserModel._default_manager.db_manager(database).get_by_natural_key(username) except self.UserModel.DoesNotExist: pass else: return 'Error: That %s is already taken.' % verbose_field_name if not username: return '%s cannot be blank.' % capfirst(verbose_field_name) try: self.username_field.clean(username, None) except exceptions.ValidationError as e: return '; '.join(e.messages)
ca65f5dd19dd64788b584dd5eb9be00758b8a7e021ccaf2969963f421d4e15e9
#!/usr/bin/env python import argparse import subprocess import sys from pathlib import Path try: import closure except ImportError: closure_compiler = None else: closure_compiler = closure.get_jar_filename() js_path = Path(__file__).parent.parent / 'static' / 'admin' / 'js' def main(): description = """With no file paths given this script will automatically compress all jQuery-based files of the admin app. Requires the Google Closure Compiler library and Java version 6 or later.""" parser = argparse.ArgumentParser(description=description) parser.add_argument('file', nargs='*') parser.add_argument( "-c", dest="compiler", default="~/bin/compiler.jar", help="path to Closure Compiler jar file", ) parser.add_argument("-v", "--verbose", action="store_true", dest="verbose") parser.add_argument("-q", "--quiet", action="store_false", dest="verbose") options = parser.parse_args() compiler = Path(closure_compiler or options.compiler).expanduser() if not compiler.exists(): sys.exit( "Google Closure compiler jar file %s not found. Please use the -c " "option to specify the path." % compiler ) if not options.file: if options.verbose: sys.stdout.write("No filenames given; defaulting to admin scripts\n") files = [ js_path / f for f in ["actions.js", "collapse.js", "inlines.js", "prepopulate.js"] ] else: files = [Path(f) for f in options.file] for file_path in files: to_compress = file_path.expanduser() if to_compress.exists(): to_compress_min = to_compress.with_suffix('.min.js') cmd = "java -jar %s --rewrite_polyfills=false --js %s --js_output_file %s" % ( compiler, to_compress, to_compress_min, ) if options.verbose: sys.stdout.write("Running: %s\n" % cmd) subprocess.call(cmd.split()) else: sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress) if __name__ == '__main__': main()
7a1052608d430cbab3983514636275cccc08246a1fa27b8028091210a4e62b21
from datetime import datetime, timedelta from django.conf import settings from django.contrib.admin import FieldListFilter from django.contrib.admin.exceptions import ( DisallowedModelAdminLookup, DisallowedModelAdminToField, ) from django.contrib.admin.options import ( IS_POPUP_VAR, TO_FIELD_VAR, IncorrectLookupParameters, ) from django.contrib.admin.utils import ( get_fields_from_path, lookup_needs_distinct, prepare_lookup_value, quote, ) from django.core.exceptions import ( FieldDoesNotExist, ImproperlyConfigured, SuspiciousOperation, ) from django.core.paginator import InvalidPage from django.db import models from django.db.models.expressions import Combinable, F, OrderBy from django.urls import reverse from django.utils.http import urlencode from django.utils.timezone import make_aware from django.utils.translation import gettext # Changelist settings ALL_VAR = 'all' ORDER_VAR = 'o' ORDER_TYPE_VAR = 'ot' PAGE_VAR = 'p' SEARCH_VAR = 'q' ERROR_FLAG = 'e' IGNORED_PARAMS = ( ALL_VAR, ORDER_VAR, ORDER_TYPE_VAR, SEARCH_VAR, IS_POPUP_VAR, TO_FIELD_VAR) class ChangeList: def __init__(self, request, model, list_display, list_display_links, list_filter, date_hierarchy, search_fields, list_select_related, list_per_page, list_max_show_all, list_editable, model_admin, sortable_by): self.model = model self.opts = model._meta self.lookup_opts = self.opts self.root_queryset = model_admin.get_queryset(request) self.list_display = list_display self.list_display_links = list_display_links self.list_filter = list_filter self.has_filters = None self.date_hierarchy = date_hierarchy self.search_fields = search_fields self.list_select_related = list_select_related self.list_per_page = list_per_page self.list_max_show_all = list_max_show_all self.model_admin = model_admin self.preserved_filters = model_admin.get_preserved_filters(request) self.sortable_by = sortable_by # Get search parameters from the query string. try: self.page_num = int(request.GET.get(PAGE_VAR, 0)) except ValueError: self.page_num = 0 self.show_all = ALL_VAR in request.GET self.is_popup = IS_POPUP_VAR in request.GET to_field = request.GET.get(TO_FIELD_VAR) if to_field and not model_admin.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) self.to_field = to_field self.params = dict(request.GET.items()) if PAGE_VAR in self.params: del self.params[PAGE_VAR] if ERROR_FLAG in self.params: del self.params[ERROR_FLAG] if self.is_popup: self.list_editable = () else: self.list_editable = list_editable self.query = request.GET.get(SEARCH_VAR, '') self.queryset = self.get_queryset(request) self.get_results(request) if self.is_popup: title = gettext('Select %s') elif self.model_admin.has_change_permission(request): title = gettext('Select %s to change') else: title = gettext('Select %s to view') self.title = title % self.opts.verbose_name self.pk_attname = self.lookup_opts.pk.attname def get_filters_params(self, params=None): """ Return all params except IGNORED_PARAMS. """ params = params or self.params lookup_params = params.copy() # a dictionary of the query string # Remove all the parameters that are globally and systematically # ignored. for ignored in IGNORED_PARAMS: if ignored in lookup_params: del lookup_params[ignored] return lookup_params def get_filters(self, request): lookup_params = self.get_filters_params() use_distinct = False for key, value in lookup_params.items(): if not self.model_admin.lookup_allowed(key, value): raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key) filter_specs = [] for list_filter in self.list_filter: if callable(list_filter): # This is simply a custom list filter class. spec = list_filter(request, lookup_params, self.model, self.model_admin) else: field_path = None if isinstance(list_filter, (tuple, list)): # This is a custom FieldListFilter class for a given field. field, field_list_filter_class = list_filter else: # This is simply a field name, so use the default # FieldListFilter class that has been registered for the # type of the given field. field, field_list_filter_class = list_filter, FieldListFilter.create if not isinstance(field, models.Field): field_path = field field = get_fields_from_path(self.model, field_path)[-1] lookup_params_count = len(lookup_params) spec = field_list_filter_class( field, request, lookup_params, self.model, self.model_admin, field_path=field_path, ) # field_list_filter_class removes any lookup_params it # processes. If that happened, check if distinct() is needed to # remove duplicate results. if lookup_params_count > len(lookup_params): use_distinct = use_distinct or lookup_needs_distinct(self.lookup_opts, field_path) if spec and spec.has_output(): filter_specs.append(spec) if self.date_hierarchy: # Create bounded lookup parameters so that the query is more # efficient. year = lookup_params.pop('%s__year' % self.date_hierarchy, None) if year is not None: month = lookup_params.pop('%s__month' % self.date_hierarchy, None) day = lookup_params.pop('%s__day' % self.date_hierarchy, None) try: from_date = datetime( int(year), int(month if month is not None else 1), int(day if day is not None else 1), ) except ValueError as e: raise IncorrectLookupParameters(e) from e if settings.USE_TZ: from_date = make_aware(from_date) if day: to_date = from_date + timedelta(days=1) elif month: # In this branch, from_date will always be the first of a # month, so advancing 32 days gives the next month. to_date = (from_date + timedelta(days=32)).replace(day=1) else: to_date = from_date.replace(year=from_date.year + 1) lookup_params.update({ '%s__gte' % self.date_hierarchy: from_date, '%s__lt' % self.date_hierarchy: to_date, }) # At this point, all the parameters used by the various ListFilters # have been removed from lookup_params, which now only contains other # parameters passed via the query string. We now loop through the # remaining parameters both to ensure that all the parameters are valid # fields and to determine if at least one of them needs distinct(). If # the lookup parameters aren't real fields, then bail out. try: for key, value in lookup_params.items(): lookup_params[key] = prepare_lookup_value(key, value) use_distinct = use_distinct or lookup_needs_distinct(self.lookup_opts, key) return filter_specs, bool(filter_specs), lookup_params, use_distinct except FieldDoesNotExist as e: raise IncorrectLookupParameters(e) from e def get_query_string(self, new_params=None, remove=None): if new_params is None: new_params = {} if remove is None: remove = [] p = self.params.copy() for r in remove: for k in list(p): if k.startswith(r): del p[k] for k, v in new_params.items(): if v is None: if k in p: del p[k] else: p[k] = v return '?%s' % urlencode(sorted(p.items())) def get_results(self, request): paginator = self.model_admin.get_paginator(request, self.queryset, self.list_per_page) # Get the number of objects, with admin filters applied. result_count = paginator.count # Get the total number of objects, with no admin filters applied. if self.model_admin.show_full_result_count: full_result_count = self.root_queryset.count() else: full_result_count = None can_show_all = result_count <= self.list_max_show_all multi_page = result_count > self.list_per_page # Get the list of objects to display on this page. if (self.show_all and can_show_all) or not multi_page: result_list = self.queryset._clone() else: try: result_list = paginator.page(self.page_num + 1).object_list except InvalidPage: raise IncorrectLookupParameters self.result_count = result_count self.show_full_result_count = self.model_admin.show_full_result_count # Admin actions are shown if there is at least one entry # or if entries are not counted because show_full_result_count is disabled self.show_admin_actions = not self.show_full_result_count or bool(full_result_count) self.full_result_count = full_result_count self.result_list = result_list self.can_show_all = can_show_all self.multi_page = multi_page self.paginator = paginator def _get_default_ordering(self): ordering = [] if self.model_admin.ordering: ordering = self.model_admin.ordering elif self.lookup_opts.ordering: ordering = self.lookup_opts.ordering return ordering def get_ordering_field(self, field_name): """ Return the proper model field name corresponding to the given field_name to use for ordering. field_name may either be the name of a proper model field or the name of a method (on the admin or model) or a callable with the 'admin_order_field' attribute. Return None if no proper model field name can be matched. """ try: field = self.lookup_opts.get_field(field_name) return field.name except FieldDoesNotExist: # See whether field_name is a name of a non-field # that allows sorting. if callable(field_name): attr = field_name elif hasattr(self.model_admin, field_name): attr = getattr(self.model_admin, field_name) else: attr = getattr(self.model, field_name) if isinstance(attr, property) and hasattr(attr, 'fget'): attr = attr.fget return getattr(attr, 'admin_order_field', None) def get_ordering(self, request, queryset): """ Return the list of ordering fields for the change list. First check the get_ordering() method in model admin, then check the object's default ordering. Then, any manually-specified ordering from the query string overrides anything. Finally, a deterministic order is guaranteed by calling _get_deterministic_ordering() with the constructed ordering. """ params = self.params ordering = list(self.model_admin.get_ordering(request) or self._get_default_ordering()) if ORDER_VAR in params: # Clear ordering and used params ordering = [] order_params = params[ORDER_VAR].split('.') for p in order_params: try: none, pfx, idx = p.rpartition('-') field_name = self.list_display[int(idx)] order_field = self.get_ordering_field(field_name) if not order_field: continue # No 'admin_order_field', skip it if hasattr(order_field, 'as_sql'): # order_field is an expression. ordering.append(order_field.desc() if pfx == '-' else order_field.asc()) # reverse order if order_field has already "-" as prefix elif order_field.startswith('-') and pfx == '-': ordering.append(order_field[1:]) else: ordering.append(pfx + order_field) except (IndexError, ValueError): continue # Invalid ordering specified, skip it. # Add the given query's ordering fields, if any. ordering.extend(queryset.query.order_by) return self._get_deterministic_ordering(ordering) def _get_deterministic_ordering(self, ordering): """ Ensure a deterministic order across all database backends. Search for a single field or unique together set of fields providing a total ordering. If these are missing, augment the ordering with a descendant primary key. """ ordering = list(ordering) ordering_fields = set() total_ordering_fields = {'pk'} | { field.attname for field in self.lookup_opts.fields if field.unique and not field.null } for part in ordering: # Search for single field providing a total ordering. field_name = None if isinstance(part, str): field_name = part.lstrip('-') elif isinstance(part, F): field_name = part.name elif isinstance(part, OrderBy) and isinstance(part.expression, F): field_name = part.expression.name if field_name: # Normalize attname references by using get_field(). try: field = self.lookup_opts.get_field(field_name) except FieldDoesNotExist: # Could be "?" for random ordering or a related field # lookup. Skip this part of introspection for now. continue # Ordering by a related field name orders by the referenced # model's ordering. Skip this part of introspection for now. if field.remote_field and field_name == field.name: continue if field.attname in total_ordering_fields: break ordering_fields.add(field.attname) else: # No single total ordering field, try unique_together. for field_names in self.lookup_opts.unique_together: # Normalize attname references by using get_field(). fields = [self.lookup_opts.get_field(field_name) for field_name in field_names] # Composite unique constraints containing a nullable column # cannot ensure total ordering. if any(field.null for field in fields): continue if ordering_fields.issuperset(field.attname for field in fields): break else: # If no set of unique fields is present in the ordering, rely # on the primary key to provide total ordering. ordering.append('-pk') return ordering def get_ordering_field_columns(self): """ Return a dictionary of ordering field column numbers and asc/desc. """ # We must cope with more than one column having the same underlying sort # field, so we base things on column numbers. ordering = self._get_default_ordering() ordering_fields = {} if ORDER_VAR not in self.params: # for ordering specified on ModelAdmin or model Meta, we don't know # the right column numbers absolutely, because there might be more # than one column associated with that ordering, so we guess. for field in ordering: if isinstance(field, (Combinable, OrderBy)): if not isinstance(field, OrderBy): field = field.asc() if isinstance(field.expression, F): order_type = 'desc' if field.descending else 'asc' field = field.expression.name else: continue elif field.startswith('-'): field = field[1:] order_type = 'desc' else: order_type = 'asc' for index, attr in enumerate(self.list_display): if self.get_ordering_field(attr) == field: ordering_fields[index] = order_type break else: for p in self.params[ORDER_VAR].split('.'): none, pfx, idx = p.rpartition('-') try: idx = int(idx) except ValueError: continue # skip it ordering_fields[idx] = 'desc' if pfx == '-' else 'asc' return ordering_fields def get_queryset(self, request): # First, we collect all the declared list filters. (self.filter_specs, self.has_filters, remaining_lookup_params, filters_use_distinct) = self.get_filters(request) # Then, we let every list filter modify the queryset to its liking. qs = self.root_queryset for filter_spec in self.filter_specs: new_qs = filter_spec.queryset(request, qs) if new_qs is not None: qs = new_qs try: # Finally, we apply the remaining lookup parameters from the query # string (i.e. those that haven't already been processed by the # filters). qs = qs.filter(**remaining_lookup_params) except (SuspiciousOperation, ImproperlyConfigured): # Allow certain types of errors to be re-raised as-is so that the # caller can treat them in a special way. raise except Exception as e: # Every other error is caught with a naked except, because we don't # have any other way of validating lookup parameters. They might be # invalid if the keyword arguments are incorrect, or if the values # are not in the correct type, so we might get FieldError, # ValueError, ValidationError, or ?. raise IncorrectLookupParameters(e) if not qs.query.select_related: qs = self.apply_select_related(qs) # Set ordering. ordering = self.get_ordering(request, qs) qs = qs.order_by(*ordering) # Apply search results qs, search_use_distinct = self.model_admin.get_search_results(request, qs, self.query) # Remove duplicates from results, if necessary if filters_use_distinct | search_use_distinct: return qs.distinct() else: return qs def apply_select_related(self, qs): if self.list_select_related is True: return qs.select_related() if self.list_select_related is False: if self.has_related_field_in_list_display(): return qs.select_related() if self.list_select_related: return qs.select_related(*self.list_select_related) return qs def has_related_field_in_list_display(self): for field_name in self.list_display: try: field = self.lookup_opts.get_field(field_name) except FieldDoesNotExist: pass else: if isinstance(field.remote_field, models.ManyToOneRel): # <FK>_id field names don't require a join. if field_name != field.get_attname(): return True return False def url_for_result(self, result): pk = getattr(result, self.pk_attname) return reverse('admin:%s_%s_change' % (self.opts.app_label, self.opts.model_name), args=(quote(pk),), current_app=self.model_admin.admin_site.name)
8e51ca52f46dd3cc791a396e43eb7aef1dea5e87afacd56302cc7c6dac746f98
from django.http import Http404, JsonResponse from django.views.generic.list import BaseListView class AutocompleteJsonView(BaseListView): """Handle AutocompleteWidget's AJAX requests for data.""" paginate_by = 20 model_admin = None def get(self, request, *args, **kwargs): """ Return a JsonResponse with search results of the form: { results: [{id: "123" text: "foo"}], pagination: {more: true} } """ if not self.model_admin.get_search_fields(request): raise Http404( '%s must have search_fields for the autocomplete_view.' % type(self.model_admin).__name__ ) if not self.has_perm(request): return JsonResponse({'error': '403 Forbidden'}, status=403) self.term = request.GET.get('term', '') self.object_list = self.get_queryset() context = self.get_context_data() return JsonResponse({ 'results': [ {'id': str(obj.pk), 'text': str(obj)} for obj in context['object_list'] ], 'pagination': {'more': context['page_obj'].has_next()}, }) def get_paginator(self, *args, **kwargs): """Use the ModelAdmin's paginator.""" return self.model_admin.get_paginator(self.request, *args, **kwargs) def get_queryset(self): """Return queryset based on ModelAdmin.get_search_results().""" qs = self.model_admin.get_queryset(self.request) qs, search_use_distinct = self.model_admin.get_search_results(self.request, qs, self.term) if search_use_distinct: qs = qs.distinct() return qs def has_perm(self, request, obj=None): """Check if user has permission to access the related model.""" return self.model_admin.has_view_permission(request, obj=obj)
920fa9ef371044a0d8fdc026a8584a501a09322b9505e52411dcfa5276fa1bb5
import datetime from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.utils import ( display_for_field, display_for_value, label_for_field, lookup_field, ) from django.contrib.admin.views.main import ( ALL_VAR, ORDER_VAR, PAGE_VAR, SEARCH_VAR, ) from django.core.exceptions import ObjectDoesNotExist from django.db import models from django.template import Library from django.template.loader import get_template from django.templatetags.static import static from django.urls import NoReverseMatch from django.utils import formats from django.utils.html import format_html from django.utils.safestring import mark_safe from django.utils.text import capfirst from django.utils.translation import gettext as _ from .base import InclusionAdminNode register = Library() DOT = '.' @register.simple_tag def paginator_number(cl, i): """ Generate an individual page index link in a paginated list. """ if i == DOT: return '… ' elif i == cl.page_num: return format_html('<span class="this-page">{}</span> ', i + 1) else: return format_html( '<a href="{}"{}>{}</a> ', cl.get_query_string({PAGE_VAR: i}), mark_safe(' class="end"' if i == cl.paginator.num_pages - 1 else ''), i + 1, ) def pagination(cl): """ Generate the series of links to the pages in a paginated list. """ paginator, page_num = cl.paginator, cl.page_num pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page if not pagination_required: page_range = [] else: ON_EACH_SIDE = 3 ON_ENDS = 2 # If there are 10 or fewer pages, display links to every page. # Otherwise, do some fancy if paginator.num_pages <= 10: page_range = range(paginator.num_pages) else: # Insert "smart" pagination links, so that there are always ON_ENDS # links at either end of the list of pages, and there are always # ON_EACH_SIDE links at either end of the "current page" link. page_range = [] if page_num > (ON_EACH_SIDE + ON_ENDS): page_range += [ *range(0, ON_ENDS), DOT, *range(page_num - ON_EACH_SIDE, page_num + 1), ] else: page_range.extend(range(0, page_num + 1)) if page_num < (paginator.num_pages - ON_EACH_SIDE - ON_ENDS - 1): page_range += [ *range(page_num + 1, page_num + ON_EACH_SIDE + 1), DOT, *range(paginator.num_pages - ON_ENDS, paginator.num_pages) ] else: page_range.extend(range(page_num + 1, paginator.num_pages)) need_show_all_link = cl.can_show_all and not cl.show_all and cl.multi_page return { 'cl': cl, 'pagination_required': pagination_required, 'show_all_url': need_show_all_link and cl.get_query_string({ALL_VAR: ''}), 'page_range': page_range, 'ALL_VAR': ALL_VAR, '1': 1, } @register.tag(name='pagination') def pagination_tag(parser, token): return InclusionAdminNode( parser, token, func=pagination, template_name='pagination.html', takes_context=False, ) def result_headers(cl): """ Generate the list column headers. """ ordering_field_columns = cl.get_ordering_field_columns() for i, field_name in enumerate(cl.list_display): text, attr = label_for_field( field_name, cl.model, model_admin=cl.model_admin, return_attr=True ) is_field_sortable = cl.sortable_by is None or field_name in cl.sortable_by if attr: field_name = _coerce_field_name(field_name, i) # Potentially not sortable # if the field is the action checkbox: no sorting and special class if field_name == 'action_checkbox': yield { "text": text, "class_attrib": mark_safe(' class="action-checkbox-column"'), "sortable": False, } continue admin_order_field = getattr(attr, "admin_order_field", None) # Set ordering for attr that is a property, if defined. if isinstance(attr, property) and hasattr(attr, 'fget'): admin_order_field = getattr(attr.fget, 'admin_order_field', None) if not admin_order_field: is_field_sortable = False if not is_field_sortable: # Not sortable yield { 'text': text, 'class_attrib': format_html(' class="column-{}"', field_name), 'sortable': False, } continue # OK, it is sortable if we got this far th_classes = ['sortable', 'column-{}'.format(field_name)] order_type = '' new_order_type = 'asc' sort_priority = 0 # Is it currently being sorted on? is_sorted = i in ordering_field_columns if is_sorted: order_type = ordering_field_columns.get(i).lower() sort_priority = list(ordering_field_columns).index(i) + 1 th_classes.append('sorted %sending' % order_type) new_order_type = {'asc': 'desc', 'desc': 'asc'}[order_type] # build new ordering param o_list_primary = [] # URL for making this field the primary sort o_list_remove = [] # URL for removing this field from sort o_list_toggle = [] # URL for toggling order type for this field def make_qs_param(t, n): return ('-' if t == 'desc' else '') + str(n) for j, ot in ordering_field_columns.items(): if j == i: # Same column param = make_qs_param(new_order_type, j) # We want clicking on this header to bring the ordering to the # front o_list_primary.insert(0, param) o_list_toggle.append(param) # o_list_remove - omit else: param = make_qs_param(ot, j) o_list_primary.append(param) o_list_toggle.append(param) o_list_remove.append(param) if i not in ordering_field_columns: o_list_primary.insert(0, make_qs_param(new_order_type, i)) yield { "text": text, "sortable": True, "sorted": is_sorted, "ascending": order_type == "asc", "sort_priority": sort_priority, "url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}), "url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}), "url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}), "class_attrib": format_html(' class="{}"', ' '.join(th_classes)) if th_classes else '', } def _boolean_icon(field_val): icon_url = static('admin/img/icon-%s.svg' % {True: 'yes', False: 'no', None: 'unknown'}[field_val]) return format_html('<img src="{}" alt="{}">', icon_url, field_val) def _coerce_field_name(field_name, field_index): """ Coerce a field_name (which may be a callable) to a string. """ if callable(field_name): if field_name.__name__ == '<lambda>': return 'lambda' + str(field_index) else: return field_name.__name__ return field_name def items_for_result(cl, result, form): """ Generate the actual list of data. """ def link_in_col(is_first, field_name, cl): if cl.list_display_links is None: return False if is_first and not cl.list_display_links: return True return field_name in cl.list_display_links first = True pk = cl.lookup_opts.pk.attname for field_index, field_name in enumerate(cl.list_display): empty_value_display = cl.model_admin.get_empty_value_display() row_classes = ['field-%s' % _coerce_field_name(field_name, field_index)] try: f, attr, value = lookup_field(field_name, result, cl.model_admin) except ObjectDoesNotExist: result_repr = empty_value_display else: empty_value_display = getattr(attr, 'empty_value_display', empty_value_display) if f is None or f.auto_created: if field_name == 'action_checkbox': row_classes = ['action-checkbox'] boolean = getattr(attr, 'boolean', False) result_repr = display_for_value(value, empty_value_display, boolean) if isinstance(value, (datetime.date, datetime.time)): row_classes.append('nowrap') else: if isinstance(f.remote_field, models.ManyToOneRel): field_val = getattr(result, f.name) if field_val is None: result_repr = empty_value_display else: result_repr = field_val else: result_repr = display_for_field(value, f, empty_value_display) if isinstance(f, (models.DateField, models.TimeField, models.ForeignKey)): row_classes.append('nowrap') if str(result_repr) == '': result_repr = mark_safe('&nbsp;') row_class = mark_safe(' class="%s"' % ' '.join(row_classes)) # If list_display_links not defined, add the link tag to the first field if link_in_col(first, field_name, cl): table_tag = 'th' if first else 'td' first = False # Display link to the result's change_view if the url exists, else # display just the result's representation. try: url = cl.url_for_result(result) except NoReverseMatch: link_or_text = result_repr else: url = add_preserved_filters({'preserved_filters': cl.preserved_filters, 'opts': cl.opts}, url) # Convert the pk to something that can be used in Javascript. # Problem cases are non-ASCII strings. if cl.to_field: attr = str(cl.to_field) else: attr = pk value = result.serializable_value(attr) link_or_text = format_html( '<a href="{}"{}>{}</a>', url, format_html( ' data-popup-opener="{}"', value ) if cl.is_popup else '', result_repr) yield format_html('<{}{}>{}</{}>', table_tag, row_class, link_or_text, table_tag) else: # By default the fields come from ModelAdmin.list_editable, but if we pull # the fields out of the form instead of list_editable custom admins # can provide fields on a per request basis if (form and field_name in form.fields and not ( field_name == cl.model._meta.pk.name and form[cl.model._meta.pk.name].is_hidden)): bf = form[field_name] result_repr = mark_safe(str(bf.errors) + str(bf)) yield format_html('<td{}>{}</td>', row_class, result_repr) if form and not form[cl.model._meta.pk.name].is_hidden: yield format_html('<td>{}</td>', form[cl.model._meta.pk.name]) class ResultList(list): """ Wrapper class used to return items in a list_editable changelist, annotated with the form object for error reporting purposes. Needed to maintain backwards compatibility with existing admin templates. """ def __init__(self, form, *items): self.form = form super().__init__(*items) def results(cl): if cl.formset: for res, form in zip(cl.result_list, cl.formset.forms): yield ResultList(form, items_for_result(cl, res, form)) else: for res in cl.result_list: yield ResultList(None, items_for_result(cl, res, None)) def result_hidden_fields(cl): if cl.formset: for res, form in zip(cl.result_list, cl.formset.forms): if form[cl.model._meta.pk.name].is_hidden: yield mark_safe(form[cl.model._meta.pk.name]) def result_list(cl): """ Display the headers and data list together. """ headers = list(result_headers(cl)) num_sorted_fields = 0 for h in headers: if h['sortable'] and h['sorted']: num_sorted_fields += 1 return { 'cl': cl, 'result_hidden_fields': list(result_hidden_fields(cl)), 'result_headers': headers, 'num_sorted_fields': num_sorted_fields, 'results': list(results(cl)), } @register.tag(name='result_list') def result_list_tag(parser, token): return InclusionAdminNode( parser, token, func=result_list, template_name='change_list_results.html', takes_context=False, ) def date_hierarchy(cl): """ Display the date hierarchy for date drill-down functionality. """ if cl.date_hierarchy: field_name = cl.date_hierarchy year_field = '%s__year' % field_name month_field = '%s__month' % field_name day_field = '%s__day' % field_name field_generic = '%s__' % field_name year_lookup = cl.params.get(year_field) month_lookup = cl.params.get(month_field) day_lookup = cl.params.get(day_field) def link(filters): return cl.get_query_string(filters, [field_generic]) if not (year_lookup or month_lookup or day_lookup): # select appropriate start level date_range = cl.queryset.aggregate(first=models.Min(field_name), last=models.Max(field_name)) if date_range['first'] and date_range['last']: if date_range['first'].year == date_range['last'].year: year_lookup = date_range['first'].year if date_range['first'].month == date_range['last'].month: month_lookup = date_range['first'].month if year_lookup and month_lookup and day_lookup: day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup)) return { 'show': True, 'back': { 'link': link({year_field: year_lookup, month_field: month_lookup}), 'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT')) }, 'choices': [{'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))}] } elif year_lookup and month_lookup: days = getattr(cl.queryset, 'dates')(field_name, 'day') return { 'show': True, 'back': { 'link': link({year_field: year_lookup}), 'title': str(year_lookup) }, 'choices': [{ 'link': link({year_field: year_lookup, month_field: month_lookup, day_field: day.day}), 'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT')) } for day in days] } elif year_lookup: months = getattr(cl.queryset, 'dates')(field_name, 'month') return { 'show': True, 'back': { 'link': link({}), 'title': _('All dates') }, 'choices': [{ 'link': link({year_field: year_lookup, month_field: month.month}), 'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT')) } for month in months] } else: years = getattr(cl.queryset, 'dates')(field_name, 'year') return { 'show': True, 'back': None, 'choices': [{ 'link': link({year_field: str(year.year)}), 'title': str(year.year), } for year in years] } @register.tag(name='date_hierarchy') def date_hierarchy_tag(parser, token): return InclusionAdminNode( parser, token, func=date_hierarchy, template_name='date_hierarchy.html', takes_context=False, ) def search_form(cl): """ Display a search form for searching the list. """ return { 'cl': cl, 'show_result_count': cl.result_count != cl.full_result_count, 'search_var': SEARCH_VAR } @register.tag(name='search_form') def search_form_tag(parser, token): return InclusionAdminNode(parser, token, func=search_form, template_name='search_form.html', takes_context=False) @register.simple_tag def admin_list_filter(cl, spec): tpl = get_template(spec.template) return tpl.render({ 'title': spec.title, 'choices': list(spec.choices(cl)), 'spec': spec, }) def admin_actions(context): """ Track the number of times the action field has been rendered on the page, so we know which value to use. """ context['action_index'] = context.get('action_index', -1) + 1 return context @register.tag(name='admin_actions') def admin_actions_tag(parser, token): return InclusionAdminNode(parser, token, func=admin_actions, template_name='actions.html') @register.tag(name='change_list_object_tools') def change_list_object_tools_tag(parser, token): """Display the row of change list object tools.""" return InclusionAdminNode( parser, token, func=lambda context: context, template_name='change_list_object_tools.html', )
46517a05203c8349b9668260a229277001a045c71913a165572f33494d587171
import re from datetime import date, datetime from decimal import Decimal from django import template from django.conf import settings from django.template import defaultfilters from django.utils.formats import number_format from django.utils.safestring import mark_safe from django.utils.timezone import is_aware, utc from django.utils.translation import ( gettext as _, gettext_lazy, ngettext, ngettext_lazy, npgettext_lazy, pgettext, round_away_from_one, ) register = template.Library() @register.filter(is_safe=True) def ordinal(value): """ Convert an integer to its ordinal as a string. 1 is '1st', 2 is '2nd', 3 is '3rd', etc. Works for any integer. """ try: value = int(value) except (TypeError, ValueError): return value if value % 100 in (11, 12, 13): # Translators: Ordinal format for 11 (11th), 12 (12th), and 13 (13th). value = pgettext('ordinal 11, 12, 13', '{}th').format(value) else: templates = ( # Translators: Ordinal format when value ends with 0, e.g. 80th. pgettext('ordinal 0', '{}th'), # Translators: Ordinal format when value ends with 1, e.g. 81st, except 11. pgettext('ordinal 1', '{}st'), # Translators: Ordinal format when value ends with 2, e.g. 82nd, except 12. pgettext('ordinal 2', '{}nd'), # Translators: Ordinal format when value ends with 3, e.g. 83th, except 13. pgettext('ordinal 3', '{}rd'), # Translators: Ordinal format when value ends with 4, e.g. 84th. pgettext('ordinal 4', '{}th'), # Translators: Ordinal format when value ends with 5, e.g. 85th. pgettext('ordinal 5', '{}th'), # Translators: Ordinal format when value ends with 6, e.g. 86th. pgettext('ordinal 6', '{}th'), # Translators: Ordinal format when value ends with 7, e.g. 87th. pgettext('ordinal 7', '{}th'), # Translators: Ordinal format when value ends with 8, e.g. 88th. pgettext('ordinal 8', '{}th'), # Translators: Ordinal format when value ends with 9, e.g. 89th. pgettext('ordinal 9', '{}th'), ) value = templates[value % 10].format(value) # Mark value safe so i18n does not break with <sup> or <sub> see #19988 return mark_safe(value) @register.filter(is_safe=True) def intcomma(value, use_l10n=True): """ Convert an integer to a string containing commas every three digits. For example, 3000 becomes '3,000' and 45000 becomes '45,000'. """ if settings.USE_L10N and use_l10n: try: if not isinstance(value, (float, Decimal)): value = int(value) except (TypeError, ValueError): return intcomma(value, False) else: return number_format(value, force_grouping=True) orig = str(value) new = re.sub(r"^(-?\d+)(\d{3})", r'\g<1>,\g<2>', orig) if orig == new: return new else: return intcomma(new, use_l10n) # A tuple of standard large number to their converters intword_converters = ( (6, lambda number: ( ngettext('%(value).1f million', '%(value).1f million', number), ngettext('%(value)s million', '%(value)s million', number), )), (9, lambda number: ( ngettext('%(value).1f billion', '%(value).1f billion', number), ngettext('%(value)s billion', '%(value)s billion', number), )), (12, lambda number: ( ngettext('%(value).1f trillion', '%(value).1f trillion', number), ngettext('%(value)s trillion', '%(value)s trillion', number), )), (15, lambda number: ( ngettext('%(value).1f quadrillion', '%(value).1f quadrillion', number), ngettext('%(value)s quadrillion', '%(value)s quadrillion', number), )), (18, lambda number: ( ngettext('%(value).1f quintillion', '%(value).1f quintillion', number), ngettext('%(value)s quintillion', '%(value)s quintillion', number), )), (21, lambda number: ( ngettext('%(value).1f sextillion', '%(value).1f sextillion', number), ngettext('%(value)s sextillion', '%(value)s sextillion', number), )), (24, lambda number: ( ngettext('%(value).1f septillion', '%(value).1f septillion', number), ngettext('%(value)s septillion', '%(value)s septillion', number), )), (27, lambda number: ( ngettext('%(value).1f octillion', '%(value).1f octillion', number), ngettext('%(value)s octillion', '%(value)s octillion', number), )), (30, lambda number: ( ngettext('%(value).1f nonillion', '%(value).1f nonillion', number), ngettext('%(value)s nonillion', '%(value)s nonillion', number), )), (33, lambda number: ( ngettext('%(value).1f decillion', '%(value).1f decillion', number), ngettext('%(value)s decillion', '%(value)s decillion', number), )), (100, lambda number: ( ngettext('%(value).1f googol', '%(value).1f googol', number), ngettext('%(value)s googol', '%(value)s googol', number), )), ) @register.filter(is_safe=False) def intword(value): """ Convert a large integer to a friendly text representation. Works best for numbers over 1 million. For example, 1000000 becomes '1.0 million', 1200000 becomes '1.2 million' and '1200000000' becomes '1.2 billion'. """ try: value = int(value) except (TypeError, ValueError): return value if value < 1000000: return value def _check_for_i18n(value, float_formatted, string_formatted): """ Use the i18n enabled defaultfilters.floatformat if possible """ if settings.USE_L10N: value = defaultfilters.floatformat(value, 1) template = string_formatted else: template = float_formatted return template % {'value': value} for exponent, converters in intword_converters: large_number = 10 ** exponent if value < large_number * 1000: new_value = value / large_number rounded_value = round_away_from_one(new_value) return _check_for_i18n(new_value, *converters(rounded_value)) return value @register.filter(is_safe=True) def apnumber(value): """ For numbers 1-9, return the number spelled out. Otherwise, return the number. This follows Associated Press style. """ try: value = int(value) except (TypeError, ValueError): return value if not 0 < value < 10: return value return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value - 1] # Perform the comparison in the default time zone when USE_TZ = True # (unless a specific time zone has been applied with the |timezone filter). @register.filter(expects_localtime=True) def naturalday(value, arg=None): """ For date values that are tomorrow, today or yesterday compared to present day return representing string. Otherwise, return a string formatted according to settings.DATE_FORMAT. """ tzinfo = getattr(value, 'tzinfo', None) try: value = date(value.year, value.month, value.day) except AttributeError: # Passed value wasn't a date object return value today = datetime.now(tzinfo).date() delta = value - today if delta.days == 0: return _('today') elif delta.days == 1: return _('tomorrow') elif delta.days == -1: return _('yesterday') return defaultfilters.date(value, arg) # This filter doesn't require expects_localtime=True because it deals properly # with both naive and aware datetimes. Therefore avoid the cost of conversion. @register.filter def naturaltime(value): """ For date and time values show how many seconds, minutes, or hours ago compared to current timestamp return representing string. """ return NaturalTimeFormatter.string_for(value) class NaturalTimeFormatter: time_strings = { # Translators: delta will contain a string like '2 months' or '1 month, 2 weeks' 'past-day': gettext_lazy('%(delta)s ago'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'past-hour': ngettext_lazy('an hour ago', '%(count)s hours ago', 'count'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'past-minute': ngettext_lazy('a minute ago', '%(count)s minutes ago', 'count'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'past-second': ngettext_lazy('a second ago', '%(count)s seconds ago', 'count'), 'now': gettext_lazy('now'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'future-second': ngettext_lazy('a second from now', '%(count)s seconds from now', 'count'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'future-minute': ngettext_lazy('a minute from now', '%(count)s minutes from now', 'count'), # Translators: please keep a non-breaking space (U+00A0) between count # and time unit. 'future-hour': ngettext_lazy('an hour from now', '%(count)s hours from now', 'count'), # Translators: delta will contain a string like '2 months' or '1 month, 2 weeks' 'future-day': gettext_lazy('%(delta)s from now'), } past_substrings = { # Translators: 'naturaltime-past' strings will be included in '%(delta)s ago' 'year': npgettext_lazy('naturaltime-past', '%d year', '%d years'), 'month': npgettext_lazy('naturaltime-past', '%d month', '%d months'), 'week': npgettext_lazy('naturaltime-past', '%d week', '%d weeks'), 'day': npgettext_lazy('naturaltime-past', '%d day', '%d days'), 'hour': npgettext_lazy('naturaltime-past', '%d hour', '%d hours'), 'minute': npgettext_lazy('naturaltime-past', '%d minute', '%d minutes'), } future_substrings = { # Translators: 'naturaltime-future' strings will be included in '%(delta)s from now' 'year': npgettext_lazy('naturaltime-future', '%d year', '%d years'), 'month': npgettext_lazy('naturaltime-future', '%d month', '%d months'), 'week': npgettext_lazy('naturaltime-future', '%d week', '%d weeks'), 'day': npgettext_lazy('naturaltime-future', '%d day', '%d days'), 'hour': npgettext_lazy('naturaltime-future', '%d hour', '%d hours'), 'minute': npgettext_lazy('naturaltime-future', '%d minute', '%d minutes'), } @classmethod def string_for(cls, value): if not isinstance(value, date): # datetime is a subclass of date return value now = datetime.now(utc if is_aware(value) else None) if value < now: delta = now - value if delta.days != 0: return cls.time_strings['past-day'] % { 'delta': defaultfilters.timesince(value, now, time_strings=cls.past_substrings), } elif delta.seconds == 0: return cls.time_strings['now'] elif delta.seconds < 60: return cls.time_strings['past-second'] % {'count': delta.seconds} elif delta.seconds // 60 < 60: count = delta.seconds // 60 return cls.time_strings['past-minute'] % {'count': count} else: count = delta.seconds // 60 // 60 return cls.time_strings['past-hour'] % {'count': count} else: delta = value - now if delta.days != 0: return cls.time_strings['future-day'] % { 'delta': defaultfilters.timeuntil(value, now, time_strings=cls.future_substrings), } elif delta.seconds == 0: return cls.time_strings['now'] elif delta.seconds < 60: return cls.time_strings['future-second'] % {'count': delta.seconds} elif delta.seconds // 60 < 60: count = delta.seconds // 60 return cls.time_strings['future-minute'] % {'count': count} else: count = delta.seconds // 60 // 60 return cls.time_strings['future-hour'] % {'count': count}
f2e6e31e3ba8eccd3751b8b01a42853f07d80db4986dd7a3f429cffbee2eb71b
import os from django.apps import apps from django.contrib.staticfiles.finders import get_finders from django.contrib.staticfiles.storage import staticfiles_storage from django.core.files.storage import FileSystemStorage from django.core.management.base import BaseCommand, CommandError from django.core.management.color import no_style from django.utils.functional import cached_property class Command(BaseCommand): """ Copies or symlinks static files from different locations to the settings.STATIC_ROOT. """ help = "Collect static files in a single location." requires_system_checks = False def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.copied_files = [] self.symlinked_files = [] self.unmodified_files = [] self.post_processed_files = [] self.storage = staticfiles_storage self.style = no_style() @cached_property def local(self): try: self.storage.path('') except NotImplementedError: return False return True def add_arguments(self, parser): parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', help="Do NOT prompt the user for input of any kind.", ) parser.add_argument( '--no-post-process', action='store_false', dest='post_process', help="Do NOT post process collected files.", ) parser.add_argument( '-i', '--ignore', action='append', default=[], dest='ignore_patterns', metavar='PATTERN', help="Ignore files or directories matching this glob-style " "pattern. Use multiple times to ignore more.", ) parser.add_argument( '-n', '--dry-run', action='store_true', help="Do everything except modify the filesystem.", ) parser.add_argument( '-c', '--clear', action='store_true', help="Clear the existing files using the storage " "before trying to copy or link the original file.", ) parser.add_argument( '-l', '--link', action='store_true', help="Create a symbolic link to each file instead of copying.", ) parser.add_argument( '--no-default-ignore', action='store_false', dest='use_default_ignore_patterns', help="Don't ignore the common private glob-style patterns (defaults to 'CVS', '.*' and '*~').", ) def set_options(self, **options): """ Set instance variables based on an options dict """ self.interactive = options['interactive'] self.verbosity = options['verbosity'] self.symlink = options['link'] self.clear = options['clear'] self.dry_run = options['dry_run'] ignore_patterns = options['ignore_patterns'] if options['use_default_ignore_patterns']: ignore_patterns += apps.get_app_config('staticfiles').ignore_patterns self.ignore_patterns = list(set(os.path.normpath(p) for p in ignore_patterns)) self.post_process = options['post_process'] def collect(self): """ Perform the bulk of the work of collectstatic. Split off from handle() to facilitate testing. """ if self.symlink and not self.local: raise CommandError("Can't symlink to a remote destination.") if self.clear: self.clear_dir('') if self.symlink: handler = self.link_file else: handler = self.copy_file found_files = {} for finder in get_finders(): for path, storage in finder.list(self.ignore_patterns): # Prefix the relative path if the source storage contains it if getattr(storage, 'prefix', None): prefixed_path = os.path.join(storage.prefix, path) else: prefixed_path = path if prefixed_path not in found_files: found_files[prefixed_path] = (storage, path) handler(path, prefixed_path, storage) else: self.log( "Found another file with the destination path '%s'. It " "will be ignored since only the first encountered file " "is collected. If this is not what you want, make sure " "every static file has a unique path." % prefixed_path, level=1, ) # Storage backends may define a post_process() method. if self.post_process and hasattr(self.storage, 'post_process'): processor = self.storage.post_process(found_files, dry_run=self.dry_run) for original_path, processed_path, processed in processor: if isinstance(processed, Exception): self.stderr.write("Post-processing '%s' failed!" % original_path) # Add a blank line before the traceback, otherwise it's # too easy to miss the relevant part of the error message. self.stderr.write("") raise processed if processed: self.log("Post-processed '%s' as '%s'" % (original_path, processed_path), level=2) self.post_processed_files.append(original_path) else: self.log("Skipped post-processing '%s'" % original_path) return { 'modified': self.copied_files + self.symlinked_files, 'unmodified': self.unmodified_files, 'post_processed': self.post_processed_files, } def handle(self, **options): self.set_options(**options) message = ['\n'] if self.dry_run: message.append( 'You have activated the --dry-run option so no files will be modified.\n\n' ) message.append( 'You have requested to collect static files at the destination\n' 'location as specified in your settings' ) if self.is_local_storage() and self.storage.location: destination_path = self.storage.location message.append(':\n\n %s\n\n' % destination_path) should_warn_user = ( self.storage.exists(destination_path) and any(self.storage.listdir(destination_path)) ) else: destination_path = None message.append('.\n\n') # Destination files existence not checked; play it safe and warn. should_warn_user = True if self.interactive and should_warn_user: if self.clear: message.append('This will DELETE ALL FILES in this location!\n') else: message.append('This will overwrite existing files!\n') message.append( 'Are you sure you want to do this?\n\n' "Type 'yes' to continue, or 'no' to cancel: " ) if input(''.join(message)) != 'yes': raise CommandError("Collecting static files cancelled.") collected = self.collect() modified_count = len(collected['modified']) unmodified_count = len(collected['unmodified']) post_processed_count = len(collected['post_processed']) if self.verbosity >= 1: template = ("\n%(modified_count)s %(identifier)s %(action)s" "%(destination)s%(unmodified)s%(post_processed)s.\n") summary = template % { 'modified_count': modified_count, 'identifier': 'static file' + ('' if modified_count == 1 else 's'), 'action': 'symlinked' if self.symlink else 'copied', 'destination': (" to '%s'" % destination_path if destination_path else ''), 'unmodified': (', %s unmodified' % unmodified_count if collected['unmodified'] else ''), 'post_processed': (collected['post_processed'] and ', %s post-processed' % post_processed_count or ''), } return summary def log(self, msg, level=2): """ Small log helper """ if self.verbosity >= level: self.stdout.write(msg) def is_local_storage(self): return isinstance(self.storage, FileSystemStorage) def clear_dir(self, path): """ Delete the given relative path using the destination storage backend. """ if not self.storage.exists(path): return dirs, files = self.storage.listdir(path) for f in files: fpath = os.path.join(path, f) if self.dry_run: self.log("Pretending to delete '%s'" % fpath, level=1) else: self.log("Deleting '%s'" % fpath, level=1) try: full_path = self.storage.path(fpath) except NotImplementedError: self.storage.delete(fpath) else: if not os.path.exists(full_path) and os.path.lexists(full_path): # Delete broken symlinks os.unlink(full_path) else: self.storage.delete(fpath) for d in dirs: self.clear_dir(os.path.join(path, d)) def delete_file(self, path, prefixed_path, source_storage): """ Check if the target file should be deleted if it already exists. """ if self.storage.exists(prefixed_path): try: # When was the target file modified last time? target_last_modified = self.storage.get_modified_time(prefixed_path) except (OSError, NotImplementedError, AttributeError): # The storage doesn't support get_modified_time() or failed pass else: try: # When was the source file modified last time? source_last_modified = source_storage.get_modified_time(path) except (OSError, NotImplementedError, AttributeError): pass else: # The full path of the target file if self.local: full_path = self.storage.path(prefixed_path) # If it's --link mode and the path isn't a link (i.e. # the previous collectstatic wasn't with --link) or if # it's non-link mode and the path is a link (i.e. the # previous collectstatic was with --link), the old # links/files must be deleted so it's not safe to skip # unmodified files. can_skip_unmodified_files = not (self.symlink ^ os.path.islink(full_path)) else: # In remote storages, skipping is only based on the # modified times since symlinks aren't relevant. can_skip_unmodified_files = True # Avoid sub-second precision (see #14665, #19540) file_is_unmodified = ( target_last_modified.replace(microsecond=0) >= source_last_modified.replace(microsecond=0) ) if file_is_unmodified and can_skip_unmodified_files: if prefixed_path not in self.unmodified_files: self.unmodified_files.append(prefixed_path) self.log("Skipping '%s' (not modified)" % path) return False # Then delete the existing file if really needed if self.dry_run: self.log("Pretending to delete '%s'" % path) else: self.log("Deleting '%s'" % path) self.storage.delete(prefixed_path) return True def link_file(self, path, prefixed_path, source_storage): """ Attempt to link ``path`` """ # Skip this file if it was already copied earlier if prefixed_path in self.symlinked_files: return self.log("Skipping '%s' (already linked earlier)" % path) # Delete the target file if needed or break if not self.delete_file(path, prefixed_path, source_storage): return # The full path of the source file source_path = source_storage.path(path) # Finally link the file if self.dry_run: self.log("Pretending to link '%s'" % source_path, level=1) else: self.log("Linking '%s'" % source_path, level=2) full_path = self.storage.path(prefixed_path) os.makedirs(os.path.dirname(full_path), exist_ok=True) try: if os.path.lexists(full_path): os.unlink(full_path) os.symlink(source_path, full_path) except AttributeError: import platform raise CommandError("Symlinking is not supported by Python %s." % platform.python_version()) except NotImplementedError: import platform raise CommandError("Symlinking is not supported in this " "platform (%s)." % platform.platform()) except OSError as e: raise CommandError(e) if prefixed_path not in self.symlinked_files: self.symlinked_files.append(prefixed_path) def copy_file(self, path, prefixed_path, source_storage): """ Attempt to copy ``path`` with storage """ # Skip this file if it was already copied earlier if prefixed_path in self.copied_files: return self.log("Skipping '%s' (already copied earlier)" % path) # Delete the target file if needed or break if not self.delete_file(path, prefixed_path, source_storage): return # The full path of the source file source_path = source_storage.path(path) # Finally start copying if self.dry_run: self.log("Pretending to copy '%s'" % source_path, level=1) else: self.log("Copying '%s'" % source_path, level=2) with source_storage.open(path) as source_file: self.storage.save(prefixed_path, source_file) self.copied_files.append(prefixed_path)
29a810135b50800221fd3d1f858cfeb42f808bf778d231b20aa23b7e25b13976
from django.apps import apps as global_apps from django.db import DEFAULT_DB_ALIAS, migrations, router, transaction from django.db.utils import IntegrityError class RenameContentType(migrations.RunPython): def __init__(self, app_label, old_model, new_model): self.app_label = app_label self.old_model = old_model self.new_model = new_model super().__init__(self.rename_forward, self.rename_backward) def _rename(self, apps, schema_editor, old_model, new_model): ContentType = apps.get_model('contenttypes', 'ContentType') db = schema_editor.connection.alias if not router.allow_migrate_model(db, ContentType): return try: content_type = ContentType.objects.db_manager(db).get_by_natural_key(self.app_label, old_model) except ContentType.DoesNotExist: pass else: content_type.model = new_model try: with transaction.atomic(using=db): content_type.save(using=db, update_fields={'model'}) except IntegrityError: # Gracefully fallback if a stale content type causes a # conflict as remove_stale_contenttypes will take care of # asking the user what should be done next. content_type.model = old_model else: # Clear the cache as the `get_by_natual_key()` call will cache # the renamed ContentType instance by its old model name. ContentType.objects.clear_cache() def rename_forward(self, apps, schema_editor): self._rename(apps, schema_editor, self.old_model, self.new_model) def rename_backward(self, apps, schema_editor): self._rename(apps, schema_editor, self.new_model, self.old_model) def inject_rename_contenttypes_operations(plan=None, apps=global_apps, using=DEFAULT_DB_ALIAS, **kwargs): """ Insert a `RenameContentType` operation after every planned `RenameModel` operation. """ if plan is None: return # Determine whether or not the ContentType model is available. try: ContentType = apps.get_model('contenttypes', 'ContentType') except LookupError: available = False else: if not router.allow_migrate_model(using, ContentType): return available = True for migration, backward in plan: if (migration.app_label, migration.name) == ('contenttypes', '0001_initial'): # There's no point in going forward if the initial contenttypes # migration is unapplied as the ContentType model will be # unavailable from this point. if backward: break else: available = True continue # The ContentType model is not available yet. if not available: continue inserts = [] for index, operation in enumerate(migration.operations): if isinstance(operation, migrations.RenameModel): operation = RenameContentType( migration.app_label, operation.old_name_lower, operation.new_name_lower ) inserts.append((index + 1, operation)) for inserted, (index, operation) in enumerate(inserts): migration.operations.insert(inserted + index, operation) def get_contenttypes_and_models(app_config, using, ContentType): if not router.allow_migrate_model(using, ContentType): return None, None ContentType.objects.clear_cache() content_types = { ct.model: ct for ct in ContentType.objects.using(using).filter(app_label=app_config.label) } app_models = { model._meta.model_name: model for model in app_config.get_models() } return content_types, app_models def create_contenttypes(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, apps=global_apps, **kwargs): """ Create content types for models in the given app. """ if not app_config.models_module: return app_label = app_config.label try: app_config = apps.get_app_config(app_label) ContentType = apps.get_model('contenttypes', 'ContentType') except LookupError: return content_types, app_models = get_contenttypes_and_models(app_config, using, ContentType) if not app_models: return cts = [ ContentType( app_label=app_label, model=model_name, ) for (model_name, model) in app_models.items() if model_name not in content_types ] ContentType.objects.using(using).bulk_create(cts) if verbosity >= 2: for ct in cts: print("Adding content type '%s | %s'" % (ct.app_label, ct.model))
74fc430e910e1eca11a1e1d8a2c1e7e7da04618a37d513e6a3674cf1d4e7bb45
from django.apps import apps from django.contrib.contenttypes.models import ContentType from django.core.management import BaseCommand from django.db import DEFAULT_DB_ALIAS, router from django.db.models.deletion import Collector from ...management import get_contenttypes_and_models class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', help='Tells Django to NOT prompt the user for input of any kind.', ) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates the database to use. Defaults to the "default" database.', ) def handle(self, **options): db = options['database'] interactive = options['interactive'] verbosity = options['verbosity'] for app_config in apps.get_app_configs(): content_types, app_models = get_contenttypes_and_models(app_config, db, ContentType) to_remove = [ ct for (model_name, ct) in content_types.items() if model_name not in app_models ] # Confirm that the content type is stale before deletion. using = router.db_for_write(ContentType) if to_remove: if interactive: ct_info = [] for ct in to_remove: ct_info.append(' - Content type for %s.%s' % (ct.app_label, ct.model)) collector = NoFastDeleteCollector(using=using) collector.collect([ct]) for obj_type, objs in collector.data.items(): if objs != {ct}: ct_info.append(' - %s %s object(s)' % ( len(objs), obj_type._meta.label, )) content_type_display = '\n'.join(ct_info) self.stdout.write("""Some content types in your database are stale and can be deleted. Any objects that depend on these content types will also be deleted. The content types and dependent objects that would be deleted are: %s This list doesn't include any cascade deletions to data outside of Django's models (uncommon). Are you sure you want to delete these content types? If you're unsure, answer 'no'.\n""" % content_type_display) ok_to_delete = input("Type 'yes' to continue, or 'no' to cancel: ") else: ok_to_delete = 'yes' if ok_to_delete == 'yes': for ct in to_remove: if verbosity >= 2: self.stdout.write("Deleting stale content type '%s | %s'" % (ct.app_label, ct.model)) ct.delete() else: if verbosity >= 2: self.stdout.write("Stale content types remain.") class NoFastDeleteCollector(Collector): def can_fast_delete(self, *args, **kwargs): """ Always load related objects to display them when showing confirmation. """ return False
d0bdf20d7d3f785b4ff7f19597bf4ea517c746d27da39bce52c4426071473840
import datetime import logging import os import shutil import tempfile from django.conf import settings from django.contrib.sessions.backends.base import ( VALID_KEY_CHARS, CreateError, SessionBase, UpdateError, ) from django.contrib.sessions.exceptions import InvalidSessionKey from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation from django.utils import timezone class SessionStore(SessionBase): """ Implement a file based session store. """ def __init__(self, session_key=None): self.storage_path = self._get_storage_path() self.file_prefix = settings.SESSION_COOKIE_NAME super().__init__(session_key) @classmethod def _get_storage_path(cls): try: return cls._storage_path except AttributeError: storage_path = getattr(settings, 'SESSION_FILE_PATH', None) or tempfile.gettempdir() # Make sure the storage path is valid. if not os.path.isdir(storage_path): raise ImproperlyConfigured( "The session storage path %r doesn't exist. Please set your" " SESSION_FILE_PATH setting to an existing directory in which" " Django can store session data." % storage_path) cls._storage_path = storage_path return storage_path def _key_to_file(self, session_key=None): """ Get the file associated with this session key. """ if session_key is None: session_key = self._get_or_create_session_key() # Make sure we're not vulnerable to directory traversal. Session keys # should always be md5s, so they should never contain directory # components. if not set(session_key).issubset(VALID_KEY_CHARS): raise InvalidSessionKey( "Invalid characters in session key") return os.path.join(self.storage_path, self.file_prefix + session_key) def _last_modification(self): """ Return the modification time of the file storing the session's content. """ modification = os.stat(self._key_to_file()).st_mtime if settings.USE_TZ: modification = datetime.datetime.utcfromtimestamp(modification) return modification.replace(tzinfo=timezone.utc) return datetime.datetime.fromtimestamp(modification) def _expiry_date(self, session_data): """ Return the expiry time of the file storing the session's content. """ return session_data.get('_session_expiry') or ( self._last_modification() + datetime.timedelta(seconds=self.get_session_cookie_age()) ) def load(self): session_data = {} try: with open(self._key_to_file(), encoding='ascii') as session_file: file_data = session_file.read() # Don't fail if there is no data in the session file. # We may have opened the empty placeholder file. if file_data: try: session_data = self.decode(file_data) except (EOFError, SuspiciousOperation) as e: if isinstance(e, SuspiciousOperation): logger = logging.getLogger('django.security.%s' % e.__class__.__name__) logger.warning(str(e)) self.create() # Remove expired sessions. expiry_age = self.get_expiry_age(expiry=self._expiry_date(session_data)) if expiry_age <= 0: session_data = {} self.delete() self.create() except (OSError, SuspiciousOperation): self._session_key = None return session_data def create(self): while True: self._session_key = self._get_new_session_key() try: self.save(must_create=True) except CreateError: continue self.modified = True return def save(self, must_create=False): if self.session_key is None: return self.create() # Get the session data now, before we start messing # with the file it is stored within. session_data = self._get_session(no_load=must_create) session_file_name = self._key_to_file() try: # Make sure the file exists. If it does not already exist, an # empty placeholder file is created. flags = os.O_WRONLY | getattr(os, 'O_BINARY', 0) if must_create: flags |= os.O_EXCL | os.O_CREAT fd = os.open(session_file_name, flags) os.close(fd) except FileNotFoundError: if not must_create: raise UpdateError except FileExistsError: if must_create: raise CreateError # Write the session file without interfering with other threads # or processes. By writing to an atomically generated temporary # file and then using the atomic os.rename() to make the complete # file visible, we avoid having to lock the session file, while # still maintaining its integrity. # # Note: Locking the session file was explored, but rejected in part # because in order to be atomic and cross-platform, it required a # long-lived lock file for each session, doubling the number of # files in the session storage directory at any given time. This # rename solution is cleaner and avoids any additional overhead # when reading the session data, which is the more common case # unless SESSION_SAVE_EVERY_REQUEST = True. # # See ticket #8616. dir, prefix = os.path.split(session_file_name) try: output_file_fd, output_file_name = tempfile.mkstemp(dir=dir, prefix=prefix + '_out_') renamed = False try: try: os.write(output_file_fd, self.encode(session_data).encode()) finally: os.close(output_file_fd) # This will atomically rename the file (os.rename) if the OS # supports it. Otherwise this will result in a shutil.copy2 # and os.unlink (for example on Windows). See #9084. shutil.move(output_file_name, session_file_name) renamed = True finally: if not renamed: os.unlink(output_file_name) except (EOFError, OSError): pass def exists(self, session_key): return os.path.exists(self._key_to_file(session_key)) def delete(self, session_key=None): if session_key is None: if self.session_key is None: return session_key = self.session_key try: os.unlink(self._key_to_file(session_key)) except OSError: pass def clean(self): pass @classmethod def clear_expired(cls): storage_path = cls._get_storage_path() file_prefix = settings.SESSION_COOKIE_NAME for session_file in os.listdir(storage_path): if not session_file.startswith(file_prefix): continue session_key = session_file[len(file_prefix):] session = cls(session_key) # When an expired session is loaded, its file is removed, and a # new file is immediately created. Prevent this by disabling # the create() method. session.create = lambda: None session.load()
2771ee7741b1dd222bdc0e32b0b4e18505123d7730cd170dde4b21d9536fd9d1
import base64 import logging import string import warnings from datetime import datetime, timedelta from django.conf import settings from django.contrib.sessions.exceptions import SuspiciousSession from django.core.exceptions import SuspiciousOperation from django.utils import timezone from django.utils.crypto import ( constant_time_compare, get_random_string, salted_hmac, ) from django.utils.deprecation import RemovedInDjango40Warning from django.utils.module_loading import import_string from django.utils.translation import LANGUAGE_SESSION_KEY # session_key should not be case sensitive because some backends can store it # on case insensitive file systems. VALID_KEY_CHARS = string.ascii_lowercase + string.digits class CreateError(Exception): """ Used internally as a consistent exception type to catch from save (see the docstring for SessionBase.save() for details). """ pass class UpdateError(Exception): """ Occurs if Django tries to update a session that was deleted. """ pass class SessionBase: """ Base class for all Session classes. """ TEST_COOKIE_NAME = 'testcookie' TEST_COOKIE_VALUE = 'worked' __not_given = object() def __init__(self, session_key=None): self._session_key = session_key self.accessed = False self.modified = False self.serializer = import_string(settings.SESSION_SERIALIZER) def __contains__(self, key): return key in self._session def __getitem__(self, key): if key == LANGUAGE_SESSION_KEY: warnings.warn( 'The user language will no longer be stored in ' 'request.session in Django 4.0. Read it from ' 'request.COOKIES[settings.LANGUAGE_COOKIE_NAME] instead.', RemovedInDjango40Warning, stacklevel=2, ) return self._session[key] def __setitem__(self, key, value): self._session[key] = value self.modified = True def __delitem__(self, key): del self._session[key] self.modified = True def get(self, key, default=None): return self._session.get(key, default) def pop(self, key, default=__not_given): self.modified = self.modified or key in self._session args = () if default is self.__not_given else (default,) return self._session.pop(key, *args) def setdefault(self, key, value): if key in self._session: return self._session[key] else: self.modified = True self._session[key] = value return value def set_test_cookie(self): self[self.TEST_COOKIE_NAME] = self.TEST_COOKIE_VALUE def test_cookie_worked(self): return self.get(self.TEST_COOKIE_NAME) == self.TEST_COOKIE_VALUE def delete_test_cookie(self): del self[self.TEST_COOKIE_NAME] def _hash(self, value): key_salt = "django.contrib.sessions" + self.__class__.__name__ return salted_hmac(key_salt, value).hexdigest() def encode(self, session_dict): "Return the given session dictionary serialized and encoded as a string." serialized = self.serializer().dumps(session_dict) hash = self._hash(serialized) return base64.b64encode(hash.encode() + b":" + serialized).decode('ascii') def decode(self, session_data): encoded_data = base64.b64decode(session_data.encode('ascii')) try: # could produce ValueError if there is no ':' hash, serialized = encoded_data.split(b':', 1) expected_hash = self._hash(serialized) if not constant_time_compare(hash.decode(), expected_hash): raise SuspiciousSession("Session data corrupted") else: return self.serializer().loads(serialized) except Exception as e: # ValueError, SuspiciousOperation, unpickling exceptions. If any of # these happen, just return an empty dictionary (an empty session). if isinstance(e, SuspiciousOperation): logger = logging.getLogger('django.security.%s' % e.__class__.__name__) logger.warning(str(e)) return {} def update(self, dict_): self._session.update(dict_) self.modified = True def has_key(self, key): return key in self._session def keys(self): return self._session.keys() def values(self): return self._session.values() def items(self): return self._session.items() def clear(self): # To avoid unnecessary persistent storage accesses, we set up the # internals directly (loading data wastes time, since we are going to # set it to an empty dict anyway). self._session_cache = {} self.accessed = True self.modified = True def is_empty(self): "Return True when there is no session_key and the session is empty." try: return not self._session_key and not self._session_cache except AttributeError: return True def _get_new_session_key(self): "Return session key that isn't being used." while True: session_key = get_random_string(32, VALID_KEY_CHARS) if not self.exists(session_key): return session_key def _get_or_create_session_key(self): if self._session_key is None: self._session_key = self._get_new_session_key() return self._session_key def _validate_session_key(self, key): """ Key must be truthy and at least 8 characters long. 8 characters is an arbitrary lower bound for some minimal key security. """ return key and len(key) >= 8 def _get_session_key(self): return self.__session_key def _set_session_key(self, value): """ Validate session key on assignment. Invalid values will set to None. """ if self._validate_session_key(value): self.__session_key = value else: self.__session_key = None session_key = property(_get_session_key) _session_key = property(_get_session_key, _set_session_key) def _get_session(self, no_load=False): """ Lazily load session from storage (unless "no_load" is True, when only an empty dict is stored) and store it in the current instance. """ self.accessed = True try: return self._session_cache except AttributeError: if self.session_key is None or no_load: self._session_cache = {} else: self._session_cache = self.load() return self._session_cache _session = property(_get_session) def get_session_cookie_age(self): return settings.SESSION_COOKIE_AGE def get_expiry_age(self, **kwargs): """Get the number of seconds until the session expires. Optionally, this function accepts `modification` and `expiry` keyword arguments specifying the modification and expiry of the session. """ try: modification = kwargs['modification'] except KeyError: modification = timezone.now() # Make the difference between "expiry=None passed in kwargs" and # "expiry not passed in kwargs", in order to guarantee not to trigger # self.load() when expiry is provided. try: expiry = kwargs['expiry'] except KeyError: expiry = self.get('_session_expiry') if not expiry: # Checks both None and 0 cases return self.get_session_cookie_age() if not isinstance(expiry, datetime): return expiry delta = expiry - modification return delta.days * 86400 + delta.seconds def get_expiry_date(self, **kwargs): """Get session the expiry date (as a datetime object). Optionally, this function accepts `modification` and `expiry` keyword arguments specifying the modification and expiry of the session. """ try: modification = kwargs['modification'] except KeyError: modification = timezone.now() # Same comment as in get_expiry_age try: expiry = kwargs['expiry'] except KeyError: expiry = self.get('_session_expiry') if isinstance(expiry, datetime): return expiry expiry = expiry or self.get_session_cookie_age() return modification + timedelta(seconds=expiry) def set_expiry(self, value): """ Set a custom expiration for the session. ``value`` can be an integer, a Python ``datetime`` or ``timedelta`` object or ``None``. If ``value`` is an integer, the session will expire after that many seconds of inactivity. If set to ``0`` then the session will expire on browser close. If ``value`` is a ``datetime`` or ``timedelta`` object, the session will expire at that specific future time. If ``value`` is ``None``, the session uses the global session expiry policy. """ if value is None: # Remove any custom expiration for this session. try: del self['_session_expiry'] except KeyError: pass return if isinstance(value, timedelta): value = timezone.now() + value self['_session_expiry'] = value def get_expire_at_browser_close(self): """ Return ``True`` if the session is set to expire when the browser closes, and ``False`` if there's an expiry date. Use ``get_expiry_date()`` or ``get_expiry_age()`` to find the actual expiry date/age, if there is one. """ if self.get('_session_expiry') is None: return settings.SESSION_EXPIRE_AT_BROWSER_CLOSE return self.get('_session_expiry') == 0 def flush(self): """ Remove the current session data from the database and regenerate the key. """ self.clear() self.delete() self._session_key = None def cycle_key(self): """ Create a new session key, while retaining the current session data. """ data = self._session key = self.session_key self.create() self._session_cache = data if key: self.delete(key) # Methods that child classes must implement. def exists(self, session_key): """ Return True if the given session_key already exists. """ raise NotImplementedError('subclasses of SessionBase must provide an exists() method') def create(self): """ Create a new session instance. Guaranteed to create a new object with a unique key and will have saved the result once (with empty data) before the method returns. """ raise NotImplementedError('subclasses of SessionBase must provide a create() method') def save(self, must_create=False): """ Save the session data. If 'must_create' is True, create a new session object (or raise CreateError). Otherwise, only update an existing object and don't create one (raise UpdateError if needed). """ raise NotImplementedError('subclasses of SessionBase must provide a save() method') def delete(self, session_key=None): """ Delete the session data under this key. If the key is None, use the current session key value. """ raise NotImplementedError('subclasses of SessionBase must provide a delete() method') def load(self): """ Load the session data and return a dictionary. """ raise NotImplementedError('subclasses of SessionBase must provide a load() method') @classmethod def clear_expired(cls): """ Remove expired sessions from the session store. If this operation isn't possible on a given backend, it should raise NotImplementedError. If it isn't necessary, because the backend has a built-in expiration mechanism, it should be a no-op. """ raise NotImplementedError('This backend does not support clear_expired().')
2f8de00e993e4456cc17ff9f96e123cf250ee672ab1220935f4c99b3b71de5e2
from django.contrib.sessions.backends.base import SessionBase from django.core import signing class SessionStore(SessionBase): def load(self): """ Load the data from the key itself instead of fetching from some external data store. Opposite of _get_session_key(), raise BadSignature if signature fails. """ try: return signing.loads( self.session_key, serializer=self.serializer, # This doesn't handle non-default expiry dates, see #19201 max_age=self.get_session_cookie_age(), salt='django.contrib.sessions.backends.signed_cookies', ) except Exception: # BadSignature, ValueError, or unpickling exceptions. If any of # these happen, reset the session. self.create() return {} def create(self): """ To create a new key, set the modified flag so that the cookie is set on the client for the current request. """ self.modified = True def save(self, must_create=False): """ To save, get the session key as a securely signed string and then set the modified flag so that the cookie is set on the client for the current request. """ self._session_key = self._get_session_key() self.modified = True def exists(self, session_key=None): """ This method makes sense when you're talking to a shared resource, but it doesn't matter when you're storing the information in the client's cookie. """ return False def delete(self, session_key=None): """ To delete, clear the session key and the underlying data structure and set the modified flag so that the cookie is set on the client for the current request. """ self._session_key = '' self._session_cache = {} self.modified = True def cycle_key(self): """ Keep the same data but with a new key. Call save() and it will automatically save a cookie with a new key at the end of the request. """ self.save() def _get_session_key(self): """ Instead of generating a random string, generate a secure url-safe base64-encoded string of data as our session key. """ return signing.dumps( self._session, compress=True, salt='django.contrib.sessions.backends.signed_cookies', serializer=self.serializer, ) @classmethod def clear_expired(cls): pass
e25640e7fadb768f881bb0e7a9d750c93ff6248fc05e1b96f6776e070316ad06
""" This module is for inspecting OGR data sources and generating either models for GeoDjango and/or mapping dictionaries for use with the `LayerMapping` utility. """ from django.contrib.gis.gdal import DataSource from django.contrib.gis.gdal.field import ( OFTDate, OFTDateTime, OFTInteger, OFTInteger64, OFTReal, OFTString, OFTTime, ) def mapping(data_source, geom_name='geom', layer_key=0, multi_geom=False): """ Given a DataSource, generate a dictionary that may be used for invoking the LayerMapping utility. Keyword Arguments: `geom_name` => The name of the geometry field to use for the model. `layer_key` => The key for specifying which layer in the DataSource to use; defaults to 0 (the first layer). May be an integer index or a string identifier for the layer. `multi_geom` => Boolean (default: False) - specify as multigeometry. """ if isinstance(data_source, str): # Instantiating the DataSource from the string. data_source = DataSource(data_source) elif isinstance(data_source, DataSource): pass else: raise TypeError('Data source parameter must be a string or a DataSource object.') # Creating the dictionary. _mapping = {} # Generating the field name for each field in the layer. for field in data_source[layer_key].fields: mfield = field.lower() if mfield[-1:] == '_': mfield += 'field' _mapping[mfield] = field gtype = data_source[layer_key].geom_type if multi_geom: gtype.to_multi() _mapping[geom_name] = str(gtype).upper() return _mapping def ogrinspect(*args, **kwargs): """ Given a data source (either a string or a DataSource object) and a string model name this function will generate a GeoDjango model. Usage: >>> from django.contrib.gis.utils import ogrinspect >>> ogrinspect('/path/to/shapefile.shp','NewModel') ...will print model definition to stout or put this in a Python script and use to redirect the output to a new model like: $ python generate_model.py > myapp/models.py # generate_model.py from django.contrib.gis.utils import ogrinspect shp_file = 'data/mapping_hacks/world_borders.shp' model_name = 'WorldBorders' print(ogrinspect(shp_file, model_name, multi_geom=True, srid=4326, geom_name='shapes', blank=True)) Required Arguments `datasource` => string or DataSource object to file pointer `model name` => string of name of new model class to create Optional Keyword Arguments `geom_name` => For specifying the model name for the Geometry Field. Otherwise will default to `geom` `layer_key` => The key for specifying which layer in the DataSource to use; defaults to 0 (the first layer). May be an integer index or a string identifier for the layer. `srid` => The SRID to use for the Geometry Field. If it can be determined, the SRID of the datasource is used. `multi_geom` => Boolean (default: False) - specify as multigeometry. `name_field` => String - specifies a field name to return for the __str__() method (which will be generated if specified). `imports` => Boolean (default: True) - set to False to omit the `from django.contrib.gis.db import models` code from the autogenerated models thus avoiding duplicated imports when building more than one model by batching ogrinspect() `decimal` => Boolean or sequence (default: False). When set to True all generated model fields corresponding to the `OFTReal` type will be `DecimalField` instead of `FloatField`. A sequence of specific field names to generate as `DecimalField` may also be used. `blank` => Boolean or sequence (default: False). When set to True all generated model fields will have `blank=True`. If the user wants to give specific fields to have blank, then a list/tuple of OGR field names may be used. `null` => Boolean (default: False) - When set to True all generated model fields will have `null=True`. If the user wants to specify give specific fields to have null, then a list/tuple of OGR field names may be used. Note: Call the _ogrinspect() helper to do the heavy lifting. """ return '\n'.join(_ogrinspect(*args, **kwargs)) def _ogrinspect(data_source, model_name, geom_name='geom', layer_key=0, srid=None, multi_geom=False, name_field=None, imports=True, decimal=False, blank=False, null=False): """ Helper routine for `ogrinspect` that generates GeoDjango models corresponding to the given data source. See the `ogrinspect` docstring for more details. """ # Getting the DataSource if isinstance(data_source, str): data_source = DataSource(data_source) elif isinstance(data_source, DataSource): pass else: raise TypeError('Data source parameter must be a string or a DataSource object.') # Getting the layer corresponding to the layer key and getting # a string listing of all OGR fields in the Layer. layer = data_source[layer_key] ogr_fields = layer.fields # Creating lists from the `null`, `blank`, and `decimal` # keyword arguments. def process_kwarg(kwarg): if isinstance(kwarg, (list, tuple)): return [s.lower() for s in kwarg] elif kwarg: return [s.lower() for s in ogr_fields] else: return [] null_fields = process_kwarg(null) blank_fields = process_kwarg(blank) decimal_fields = process_kwarg(decimal) # Gets the `null` and `blank` keywords for the given field name. def get_kwargs_str(field_name): kwlist = [] if field_name.lower() in null_fields: kwlist.append('null=True') if field_name.lower() in blank_fields: kwlist.append('blank=True') if kwlist: return ', ' + ', '.join(kwlist) else: return '' # For those wishing to disable the imports. if imports: yield '# This is an auto-generated Django model module created by ogrinspect.' yield 'from django.contrib.gis.db import models' yield '' yield '' yield 'class %s(models.Model):' % model_name for field_name, width, precision, field_type in zip( ogr_fields, layer.field_widths, layer.field_precisions, layer.field_types): # The model field name. mfield = field_name.lower() if mfield[-1:] == '_': mfield += 'field' # Getting the keyword args string. kwargs_str = get_kwargs_str(field_name) if field_type is OFTReal: # By default OFTReals are mapped to `FloatField`, however, they # may also be mapped to `DecimalField` if specified in the # `decimal` keyword. if field_name.lower() in decimal_fields: yield ' %s = models.DecimalField(max_digits=%d, decimal_places=%d%s)' % ( mfield, width, precision, kwargs_str ) else: yield ' %s = models.FloatField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTInteger: yield ' %s = models.IntegerField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTInteger64: yield ' %s = models.BigIntegerField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTString: yield ' %s = models.CharField(max_length=%s%s)' % (mfield, width, kwargs_str) elif field_type is OFTDate: yield ' %s = models.DateField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTDateTime: yield ' %s = models.DateTimeField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTTime: yield ' %s = models.TimeField(%s)' % (mfield, kwargs_str[2:]) else: raise TypeError('Unknown field type %s in %s' % (field_type, mfield)) # TODO: Autodetection of multigeometry types (see #7218). gtype = layer.geom_type if multi_geom: gtype.to_multi() geom_field = gtype.django # Setting up the SRID keyword string. if srid is None: if layer.srs is None: srid_str = 'srid=-1' else: srid = layer.srs.srid if srid is None: srid_str = 'srid=-1' elif srid == 4326: # WGS84 is already the default. srid_str = '' else: srid_str = 'srid=%s' % srid else: srid_str = 'srid=%s' % srid yield ' %s = models.%s(%s)' % (geom_name, geom_field, srid_str) if name_field: yield '' yield ' def __str__(self): return self.%s' % name_field
7ff3201e5cb151acf5afa1447c8d85e8dcc8ee75a9f3f2d2572fc9998c886ef9
# LayerMapping -- A Django Model/OGR Layer Mapping Utility """ The LayerMapping class provides a way to map the contents of OGR vector files (e.g. SHP files) to Geographic-enabled Django models. For more information, please consult the GeoDjango documentation: https://docs.djangoproject.com/en/dev/ref/contrib/gis/layermapping/ """ import sys from decimal import Decimal, InvalidOperation as DecimalInvalidOperation from django.contrib.gis.db.models import GeometryField from django.contrib.gis.gdal import ( CoordTransform, DataSource, GDALException, OGRGeometry, OGRGeomType, SpatialReference, ) from django.contrib.gis.gdal.field import ( OFTDate, OFTDateTime, OFTInteger, OFTInteger64, OFTReal, OFTString, OFTTime, ) from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist from django.db import connections, models, router, transaction from django.utils.encoding import force_str # LayerMapping exceptions. class LayerMapError(Exception): pass class InvalidString(LayerMapError): pass class InvalidDecimal(LayerMapError): pass class InvalidInteger(LayerMapError): pass class MissingForeignKey(LayerMapError): pass class LayerMapping: "A class that maps OGR Layers to GeoDjango Models." # Acceptable 'base' types for a multi-geometry type. MULTI_TYPES = { 1: OGRGeomType('MultiPoint'), 2: OGRGeomType('MultiLineString'), 3: OGRGeomType('MultiPolygon'), OGRGeomType('Point25D').num: OGRGeomType('MultiPoint25D'), OGRGeomType('LineString25D').num: OGRGeomType('MultiLineString25D'), OGRGeomType('Polygon25D').num: OGRGeomType('MultiPolygon25D'), } # Acceptable Django field types and corresponding acceptable OGR # counterparts. FIELD_TYPES = { models.AutoField: OFTInteger, models.BigAutoField: OFTInteger64, models.BooleanField: (OFTInteger, OFTReal, OFTString), models.IntegerField: (OFTInteger, OFTReal, OFTString), models.FloatField: (OFTInteger, OFTReal), models.DateField: OFTDate, models.DateTimeField: OFTDateTime, models.EmailField: OFTString, models.TimeField: OFTTime, models.DecimalField: (OFTInteger, OFTReal), models.CharField: OFTString, models.SlugField: OFTString, models.TextField: OFTString, models.URLField: OFTString, models.UUIDField: OFTString, models.BigIntegerField: (OFTInteger, OFTReal, OFTString), models.SmallIntegerField: (OFTInteger, OFTReal, OFTString), models.PositiveIntegerField: (OFTInteger, OFTReal, OFTString), models.PositiveSmallIntegerField: (OFTInteger, OFTReal, OFTString), } def __init__(self, model, data, mapping, layer=0, source_srs=None, encoding='utf-8', transaction_mode='commit_on_success', transform=True, unique=None, using=None): """ A LayerMapping object is initialized using the given Model (not an instance), a DataSource (or string path to an OGR-supported data file), and a mapping dictionary. See the module level docstring for more details and keyword argument usage. """ # Getting the DataSource and the associated Layer. if isinstance(data, str): self.ds = DataSource(data, encoding=encoding) else: self.ds = data self.layer = self.ds[layer] self.using = using if using is not None else router.db_for_write(model) self.spatial_backend = connections[self.using].ops # Setting the mapping & model attributes. self.mapping = mapping self.model = model # Checking the layer -- initialization of the object will fail if # things don't check out before hand. self.check_layer() # Getting the geometry column associated with the model (an # exception will be raised if there is no geometry column). if connections[self.using].features.supports_transform: self.geo_field = self.geometry_field() else: transform = False # Checking the source spatial reference system, and getting # the coordinate transformation object (unless the `transform` # keyword is set to False) if transform: self.source_srs = self.check_srs(source_srs) self.transform = self.coord_transform() else: self.transform = transform # Setting the encoding for OFTString fields, if specified. if encoding: # Making sure the encoding exists, if not a LookupError # exception will be thrown. from codecs import lookup lookup(encoding) self.encoding = encoding else: self.encoding = None if unique: self.check_unique(unique) transaction_mode = 'autocommit' # Has to be set to autocommit. self.unique = unique else: self.unique = None # Setting the transaction decorator with the function in the # transaction modes dictionary. self.transaction_mode = transaction_mode if transaction_mode == 'autocommit': self.transaction_decorator = None elif transaction_mode == 'commit_on_success': self.transaction_decorator = transaction.atomic else: raise LayerMapError('Unrecognized transaction mode: %s' % transaction_mode) # #### Checking routines used during initialization #### def check_fid_range(self, fid_range): "Check the `fid_range` keyword." if fid_range: if isinstance(fid_range, (tuple, list)): return slice(*fid_range) elif isinstance(fid_range, slice): return fid_range else: raise TypeError else: return None def check_layer(self): """ Check the Layer metadata and ensure that it's compatible with the mapping information and model. Unlike previous revisions, there is no need to increment through each feature in the Layer. """ # The geometry field of the model is set here. # TODO: Support more than one geometry field / model. However, this # depends on the GDAL Driver in use. self.geom_field = False self.fields = {} # Getting lists of the field names and the field types available in # the OGR Layer. ogr_fields = self.layer.fields ogr_field_types = self.layer.field_types # Function for determining if the OGR mapping field is in the Layer. def check_ogr_fld(ogr_map_fld): try: idx = ogr_fields.index(ogr_map_fld) except ValueError: raise LayerMapError('Given mapping OGR field "%s" not found in OGR Layer.' % ogr_map_fld) return idx # No need to increment through each feature in the model, simply check # the Layer metadata against what was given in the mapping dictionary. for field_name, ogr_name in self.mapping.items(): # Ensuring that a corresponding field exists in the model # for the given field name in the mapping. try: model_field = self.model._meta.get_field(field_name) except FieldDoesNotExist: raise LayerMapError('Given mapping field "%s" not in given Model fields.' % field_name) # Getting the string name for the Django field class (e.g., 'PointField'). fld_name = model_field.__class__.__name__ if isinstance(model_field, GeometryField): if self.geom_field: raise LayerMapError('LayerMapping does not support more than one GeometryField per model.') # Getting the coordinate dimension of the geometry field. coord_dim = model_field.dim try: if coord_dim == 3: gtype = OGRGeomType(ogr_name + '25D') else: gtype = OGRGeomType(ogr_name) except GDALException: raise LayerMapError('Invalid mapping for GeometryField "%s".' % field_name) # Making sure that the OGR Layer's Geometry is compatible. ltype = self.layer.geom_type if not (ltype.name.startswith(gtype.name) or self.make_multi(ltype, model_field)): raise LayerMapError('Invalid mapping geometry; model has %s%s, ' 'layer geometry type is %s.' % (fld_name, '(dim=3)' if coord_dim == 3 else '', ltype)) # Setting the `geom_field` attribute w/the name of the model field # that is a Geometry. Also setting the coordinate dimension # attribute. self.geom_field = field_name self.coord_dim = coord_dim fields_val = model_field elif isinstance(model_field, models.ForeignKey): if isinstance(ogr_name, dict): # Is every given related model mapping field in the Layer? rel_model = model_field.remote_field.model for rel_name, ogr_field in ogr_name.items(): idx = check_ogr_fld(ogr_field) try: rel_model._meta.get_field(rel_name) except FieldDoesNotExist: raise LayerMapError('ForeignKey mapping field "%s" not in %s fields.' % (rel_name, rel_model.__class__.__name__)) fields_val = rel_model else: raise TypeError('ForeignKey mapping must be of dictionary type.') else: # Is the model field type supported by LayerMapping? if model_field.__class__ not in self.FIELD_TYPES: raise LayerMapError('Django field type "%s" has no OGR mapping (yet).' % fld_name) # Is the OGR field in the Layer? idx = check_ogr_fld(ogr_name) ogr_field = ogr_field_types[idx] # Can the OGR field type be mapped to the Django field type? if not issubclass(ogr_field, self.FIELD_TYPES[model_field.__class__]): raise LayerMapError('OGR field "%s" (of type %s) cannot be mapped to Django %s.' % (ogr_field, ogr_field.__name__, fld_name)) fields_val = model_field self.fields[field_name] = fields_val def check_srs(self, source_srs): "Check the compatibility of the given spatial reference object." if isinstance(source_srs, SpatialReference): sr = source_srs elif isinstance(source_srs, self.spatial_backend.spatial_ref_sys()): sr = source_srs.srs elif isinstance(source_srs, (int, str)): sr = SpatialReference(source_srs) else: # Otherwise just pulling the SpatialReference from the layer sr = self.layer.srs if not sr: raise LayerMapError('No source reference system defined.') else: return sr def check_unique(self, unique): "Check the `unique` keyword parameter -- may be a sequence or string." if isinstance(unique, (list, tuple)): # List of fields to determine uniqueness with for attr in unique: if attr not in self.mapping: raise ValueError elif isinstance(unique, str): # Only a single field passed in. if unique not in self.mapping: raise ValueError else: raise TypeError('Unique keyword argument must be set with a tuple, list, or string.') # Keyword argument retrieval routines #### def feature_kwargs(self, feat): """ Given an OGR Feature, return a dictionary of keyword arguments for constructing the mapped model. """ # The keyword arguments for model construction. kwargs = {} # Incrementing through each model field and OGR field in the # dictionary mapping. for field_name, ogr_name in self.mapping.items(): model_field = self.fields[field_name] if isinstance(model_field, GeometryField): # Verify OGR geometry. try: val = self.verify_geom(feat.geom, model_field) except GDALException: raise LayerMapError('Could not retrieve geometry from feature.') elif isinstance(model_field, models.base.ModelBase): # The related _model_, not a field was passed in -- indicating # another mapping for the related Model. val = self.verify_fk(feat, model_field, ogr_name) else: # Otherwise, verify OGR Field type. val = self.verify_ogr_field(feat[ogr_name], model_field) # Setting the keyword arguments for the field name with the # value obtained above. kwargs[field_name] = val return kwargs def unique_kwargs(self, kwargs): """ Given the feature keyword arguments (from `feature_kwargs`), construct and return the uniqueness keyword arguments -- a subset of the feature kwargs. """ if isinstance(self.unique, str): return {self.unique: kwargs[self.unique]} else: return {fld: kwargs[fld] for fld in self.unique} # #### Verification routines used in constructing model keyword arguments. #### def verify_ogr_field(self, ogr_field, model_field): """ Verify if the OGR Field contents are acceptable to the model field. If they are, return the verified value, otherwise raise an exception. """ if (isinstance(ogr_field, OFTString) and isinstance(model_field, (models.CharField, models.TextField))): if self.encoding and ogr_field.value is not None: # The encoding for OGR data sources may be specified here # (e.g., 'cp437' for Census Bureau boundary files). val = force_str(ogr_field.value, self.encoding) else: val = ogr_field.value if model_field.max_length and val is not None and len(val) > model_field.max_length: raise InvalidString('%s model field maximum string length is %s, given %s characters.' % (model_field.name, model_field.max_length, len(val))) elif isinstance(ogr_field, OFTReal) and isinstance(model_field, models.DecimalField): try: # Creating an instance of the Decimal value to use. d = Decimal(str(ogr_field.value)) except DecimalInvalidOperation: raise InvalidDecimal('Could not construct decimal from: %s' % ogr_field.value) # Getting the decimal value as a tuple. dtup = d.as_tuple() digits = dtup[1] d_idx = dtup[2] # index where the decimal is # Maximum amount of precision, or digits to the left of the decimal. max_prec = model_field.max_digits - model_field.decimal_places # Getting the digits to the left of the decimal place for the # given decimal. if d_idx < 0: n_prec = len(digits[:d_idx]) else: n_prec = len(digits) + d_idx # If we have more than the maximum digits allowed, then throw an # InvalidDecimal exception. if n_prec > max_prec: raise InvalidDecimal( 'A DecimalField with max_digits %d, decimal_places %d must ' 'round to an absolute value less than 10^%d.' % (model_field.max_digits, model_field.decimal_places, max_prec) ) val = d elif isinstance(ogr_field, (OFTReal, OFTString)) and isinstance(model_field, models.IntegerField): # Attempt to convert any OFTReal and OFTString value to an OFTInteger. try: val = int(ogr_field.value) except ValueError: raise InvalidInteger('Could not construct integer from: %s' % ogr_field.value) else: val = ogr_field.value return val def verify_fk(self, feat, rel_model, rel_mapping): """ Given an OGR Feature, the related model and its dictionary mapping, retrieve the related model for the ForeignKey mapping. """ # TODO: It is expensive to retrieve a model for every record -- # explore if an efficient mechanism exists for caching related # ForeignKey models. # Constructing and verifying the related model keyword arguments. fk_kwargs = {} for field_name, ogr_name in rel_mapping.items(): fk_kwargs[field_name] = self.verify_ogr_field(feat[ogr_name], rel_model._meta.get_field(field_name)) # Attempting to retrieve and return the related model. try: return rel_model.objects.using(self.using).get(**fk_kwargs) except ObjectDoesNotExist: raise MissingForeignKey( 'No ForeignKey %s model found with keyword arguments: %s' % (rel_model.__name__, fk_kwargs) ) def verify_geom(self, geom, model_field): """ Verify the geometry -- construct and return a GeometryCollection if necessary (for example if the model field is MultiPolygonField while the mapped shapefile only contains Polygons). """ # Downgrade a 3D geom to a 2D one, if necessary. if self.coord_dim != geom.coord_dim: geom.coord_dim = self.coord_dim if self.make_multi(geom.geom_type, model_field): # Constructing a multi-geometry type to contain the single geometry multi_type = self.MULTI_TYPES[geom.geom_type.num] g = OGRGeometry(multi_type) g.add(geom) else: g = geom # Transforming the geometry with our Coordinate Transformation object, # but only if the class variable `transform` is set w/a CoordTransform # object. if self.transform: g.transform(self.transform) # Returning the WKT of the geometry. return g.wkt # #### Other model methods #### def coord_transform(self): "Return the coordinate transformation object." SpatialRefSys = self.spatial_backend.spatial_ref_sys() try: # Getting the target spatial reference system target_srs = SpatialRefSys.objects.using(self.using).get(srid=self.geo_field.srid).srs # Creating the CoordTransform object return CoordTransform(self.source_srs, target_srs) except Exception as exc: raise LayerMapError( 'Could not translate between the data source and model geometry.' ) from exc def geometry_field(self): "Return the GeometryField instance associated with the geographic column." # Use `get_field()` on the model's options so that we # get the correct field instance if there's model inheritance. opts = self.model._meta return opts.get_field(self.geom_field) def make_multi(self, geom_type, model_field): """ Given the OGRGeomType for a geometry and its associated GeometryField, determine whether the geometry should be turned into a GeometryCollection. """ return (geom_type.num in self.MULTI_TYPES and model_field.__class__.__name__ == 'Multi%s' % geom_type.django) def save(self, verbose=False, fid_range=False, step=False, progress=False, silent=False, stream=sys.stdout, strict=False): """ Save the contents from the OGR DataSource Layer into the database according to the mapping dictionary given at initialization. Keyword Parameters: verbose: If set, information will be printed subsequent to each model save executed on the database. fid_range: May be set with a slice or tuple of (begin, end) feature ID's to map from the data source. In other words, this keyword enables the user to selectively import a subset range of features in the geographic data source. step: If set with an integer, transactions will occur at every step interval. For example, if step=1000, a commit would occur after the 1,000th feature, the 2,000th feature etc. progress: When this keyword is set, status information will be printed giving the number of features processed and successfully saved. By default, progress information will pe printed every 1000 features processed, however, this default may be overridden by setting this keyword with an integer for the desired interval. stream: Status information will be written to this file handle. Defaults to using `sys.stdout`, but any object with a `write` method is supported. silent: By default, non-fatal error notifications are printed to stdout, but this keyword may be set to disable these notifications. strict: Execution of the model mapping will cease upon the first error encountered. The default behavior is to attempt to continue. """ # Getting the default Feature ID range. default_range = self.check_fid_range(fid_range) # Setting the progress interval, if requested. if progress: if progress is True or not isinstance(progress, int): progress_interval = 1000 else: progress_interval = progress def _save(feat_range=default_range, num_feat=0, num_saved=0): if feat_range: layer_iter = self.layer[feat_range] else: layer_iter = self.layer for feat in layer_iter: num_feat += 1 # Getting the keyword arguments try: kwargs = self.feature_kwargs(feat) except LayerMapError as msg: # Something borked the validation if strict: raise elif not silent: stream.write('Ignoring Feature ID %s because: %s\n' % (feat.fid, msg)) else: # Constructing the model using the keyword args is_update = False if self.unique: # If we want unique models on a particular field, handle the # geometry appropriately. try: # Getting the keyword arguments and retrieving # the unique model. u_kwargs = self.unique_kwargs(kwargs) m = self.model.objects.using(self.using).get(**u_kwargs) is_update = True # Getting the geometry (in OGR form), creating # one from the kwargs WKT, adding in additional # geometries, and update the attribute with the # just-updated geometry WKT. geom_value = getattr(m, self.geom_field) if geom_value is None: geom = OGRGeometry(kwargs[self.geom_field]) else: geom = geom_value.ogr new = OGRGeometry(kwargs[self.geom_field]) for g in new: geom.add(g) setattr(m, self.geom_field, geom.wkt) except ObjectDoesNotExist: # No unique model exists yet, create. m = self.model(**kwargs) else: m = self.model(**kwargs) try: # Attempting to save. m.save(using=self.using) num_saved += 1 if verbose: stream.write('%s: %s\n' % ('Updated' if is_update else 'Saved', m)) except Exception as msg: if strict: # Bailing out if the `strict` keyword is set. if not silent: stream.write( 'Failed to save the feature (id: %s) into the ' 'model with the keyword arguments:\n' % feat.fid ) stream.write('%s\n' % kwargs) raise elif not silent: stream.write('Failed to save %s:\n %s\nContinuing\n' % (kwargs, msg)) # Printing progress information, if requested. if progress and num_feat % progress_interval == 0: stream.write('Processed %d features, saved %d ...\n' % (num_feat, num_saved)) # Only used for status output purposes -- incremental saving uses the # values returned here. return num_saved, num_feat if self.transaction_decorator is not None: _save = self.transaction_decorator(_save) nfeat = self.layer.num_feat if step and isinstance(step, int) and step < nfeat: # Incremental saving is requested at the given interval (step) if default_range: raise LayerMapError('The `step` keyword may not be used in conjunction with the `fid_range` keyword.') beg, num_feat, num_saved = (0, 0, 0) indices = range(step, nfeat, step) n_i = len(indices) for i, end in enumerate(indices): # Constructing the slice to use for this step; the last slice is # special (e.g, [100:] instead of [90:100]). if i + 1 == n_i: step_slice = slice(beg, None) else: step_slice = slice(beg, end) try: num_feat, num_saved = _save(step_slice, num_feat, num_saved) beg = end except Exception: # Deliberately catch everything stream.write('%s\nFailed to save slice: %s\n' % ('=-' * 20, step_slice)) raise else: # Otherwise, just calling the previously defined _save() function. _save()
e43e653d9c0562066255a283ede0a497dbe3fa9191075d4712078d9715008dfa
from django.contrib.gis.gdal import SpatialReference from django.db import DEFAULT_DB_ALIAS, connections def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None, database=None): """ Take a GDAL SpatialReference system and add its information to the `spatial_ref_sys` table of the spatial backend. Doing this enables database-level spatial transformations for the backend. Thus, this utility is useful for adding spatial reference systems not included by default with the backend: >>> from django.contrib.gis.utils import add_srs_entry >>> add_srs_entry(3857) Keyword Arguments: auth_name: This keyword may be customized with the value of the `auth_name` field. Defaults to 'EPSG'. auth_srid: This keyword may be customized with the value of the `auth_srid` field. Defaults to the SRID determined by GDAL. ref_sys_name: For SpatiaLite users only, sets the value of the `ref_sys_name` field. Defaults to the name determined by GDAL. database: The name of the database connection to use; the default is the value of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value is 'default'). """ database = database or DEFAULT_DB_ALIAS connection = connections[database] if not hasattr(connection.ops, 'spatial_version'): raise Exception('The `add_srs_entry` utility only works ' 'with spatial backends.') if not connection.features.supports_add_srs_entry: raise Exception('This utility does not support your database backend.') SpatialRefSys = connection.ops.spatial_ref_sys() # If argument is not a `SpatialReference` instance, use it as parameter # to construct a `SpatialReference` instance. if not isinstance(srs, SpatialReference): srs = SpatialReference(srs) if srs.srid is None: raise Exception('Spatial reference requires an SRID to be ' 'compatible with the spatial backend.') # Initializing the keyword arguments dictionary for both PostGIS # and SpatiaLite. kwargs = { 'srid': srs.srid, 'auth_name': auth_name, 'auth_srid': auth_srid or srs.srid, 'proj4text': srs.proj4, } # Backend-specific fields for the SpatialRefSys model. srs_field_names = {f.name for f in SpatialRefSys._meta.get_fields()} if 'srtext' in srs_field_names: kwargs['srtext'] = srs.wkt if 'ref_sys_name' in srs_field_names: # SpatiaLite specific kwargs['ref_sys_name'] = ref_sys_name or srs.name # Creating the spatial_ref_sys model. try: # Try getting via SRID only, because using all kwargs may # differ from exact wkt/proj in database. SpatialRefSys.objects.using(database).get(srid=srs.srid) except SpatialRefSys.DoesNotExist: SpatialRefSys.objects.using(database).create(**kwargs)
bb7f6f6d9ccd5796d0292d2729bd156c7b12466de6ebdaf6f5b670a4d6cdb376
def City(response): return { 'city': response.city.name, 'continent_code': response.continent.code, 'continent_name': response.continent.name, 'country_code': response.country.iso_code, 'country_name': response.country.name, 'dma_code': response.location.metro_code, 'is_in_european_union': response.country.is_in_european_union, 'latitude': response.location.latitude, 'longitude': response.location.longitude, 'postal_code': response.postal.code, 'region': response.subdivisions[0].iso_code if response.subdivisions else None, 'time_zone': response.location.time_zone, } def Country(response): return { 'country_code': response.country.iso_code, 'country_name': response.country.name, }
8653993a4b8a47643cc1c97edeee61e92e82518053b5842179e3ad6951a23c18
from django import forms from django.contrib.gis.gdal import GDALException from django.contrib.gis.geos import GEOSException, GEOSGeometry from django.utils.translation import gettext_lazy as _ from .widgets import OpenLayersWidget class GeometryField(forms.Field): """ This is the basic form field for a Geometry. Any textual input that is accepted by GEOSGeometry is accepted by this form. By default, this includes WKT, HEXEWKB, WKB (in a buffer), and GeoJSON. """ widget = OpenLayersWidget geom_type = 'GEOMETRY' default_error_messages = { 'required': _('No geometry value provided.'), 'invalid_geom': _('Invalid geometry value.'), 'invalid_geom_type': _('Invalid geometry type.'), 'transform_error': _('An error occurred when transforming the geometry ' 'to the SRID of the geometry form field.'), } def __init__(self, *, srid=None, geom_type=None, **kwargs): self.srid = srid if geom_type is not None: self.geom_type = geom_type super().__init__(**kwargs) self.widget.attrs['geom_type'] = self.geom_type def to_python(self, value): """Transform the value to a Geometry object.""" if value in self.empty_values: return None if not isinstance(value, GEOSGeometry): if hasattr(self.widget, 'deserialize'): try: value = self.widget.deserialize(value) except GDALException: value = None else: try: value = GEOSGeometry(value) except (GEOSException, ValueError, TypeError): value = None if value is None: raise forms.ValidationError(self.error_messages['invalid_geom'], code='invalid_geom') # Try to set the srid if not value.srid: try: value.srid = self.widget.map_srid except AttributeError: if self.srid: value.srid = self.srid return value def clean(self, value): """ Validate that the input value can be converted to a Geometry object and return it. Raise a ValidationError if the value cannot be instantiated as a Geometry. """ geom = super().clean(value) if geom is None: return geom # Ensuring that the geometry is of the correct type (indicated # using the OGC string label). if str(geom.geom_type).upper() != self.geom_type and not self.geom_type == 'GEOMETRY': raise forms.ValidationError(self.error_messages['invalid_geom_type'], code='invalid_geom_type') # Transforming the geometry if the SRID was set. if self.srid and self.srid != -1 and self.srid != geom.srid: try: geom.transform(self.srid) except GEOSException: raise forms.ValidationError( self.error_messages['transform_error'], code='transform_error') return geom def has_changed(self, initial, data): """ Compare geographic value of data with its initial value. """ try: data = self.to_python(data) initial = self.to_python(initial) except forms.ValidationError: return True # Only do a geographic comparison if both values are available if initial and data: data.transform(initial.srid) # If the initial value was not added by the browser, the geometry # provided may be slightly different, the first time it is saved. # The comparison is done with a very low tolerance. return not initial.equals_exact(data, tolerance=0.000001) else: # Check for change of state of existence return bool(initial) != bool(data) class GeometryCollectionField(GeometryField): geom_type = 'GEOMETRYCOLLECTION' class PointField(GeometryField): geom_type = 'POINT' class MultiPointField(GeometryField): geom_type = 'MULTIPOINT' class LineStringField(GeometryField): geom_type = 'LINESTRING' class MultiLineStringField(GeometryField): geom_type = 'MULTILINESTRING' class PolygonField(GeometryField): geom_type = 'POLYGON' class MultiPolygonField(GeometryField): geom_type = 'MULTIPOLYGON'
21647ef7c218417bc9489e32dddd3d921dd9c6e159b842a0f93f5e0228391840
from django.contrib.gis.gdal import CoordTransform, SpatialReference from django.core.serializers.base import SerializerDoesNotExist from django.core.serializers.json import Serializer as JSONSerializer class Serializer(JSONSerializer): """ Convert a queryset to GeoJSON, http://geojson.org/ """ def _init_options(self): super()._init_options() self.geometry_field = self.json_kwargs.pop('geometry_field', None) self.srid = self.json_kwargs.pop('srid', 4326) if (self.selected_fields is not None and self.geometry_field is not None and self.geometry_field not in self.selected_fields): self.selected_fields = [*self.selected_fields, self.geometry_field] def start_serialization(self): self._init_options() self._cts = {} # cache of CoordTransform's self.stream.write( '{"type": "FeatureCollection", "crs": {"type": "name", "properties": {"name": "EPSG:%d"}},' ' "features": [' % self.srid) def end_serialization(self): self.stream.write(']}') def start_object(self, obj): super().start_object(obj) self._geometry = None if self.geometry_field is None: # Find the first declared geometry field for field in obj._meta.fields: if hasattr(field, 'geom_type'): self.geometry_field = field.name break def get_dump_object(self, obj): data = { "type": "Feature", "properties": self._current, } if ((self.selected_fields is None or 'pk' in self.selected_fields) and 'pk' not in data["properties"]): data["properties"]["pk"] = obj._meta.pk.value_to_string(obj) if self._geometry: if self._geometry.srid != self.srid: # If needed, transform the geometry in the srid of the global geojson srid if self._geometry.srid not in self._cts: srs = SpatialReference(self.srid) self._cts[self._geometry.srid] = CoordTransform(self._geometry.srs, srs) self._geometry.transform(self._cts[self._geometry.srid]) data["geometry"] = eval(self._geometry.geojson) else: data["geometry"] = None return data def handle_field(self, obj, field): if field.name == self.geometry_field: self._geometry = field.value_from_object(obj) else: super().handle_field(obj, field) class Deserializer: def __init__(self, *args, **kwargs): raise SerializerDoesNotExist("geojson is a serialization-only serializer")
c58e67a160073405d6bdbfd07f392a411094a536f1ac9d1b865053b603bf8277
""" DataSource is a wrapper for the OGR Data Source object, which provides an interface for reading vector geometry data from many different file formats (including ESRI shapefiles). When instantiating a DataSource object, use the filename of a GDAL-supported data source. For example, a SHP file or a TIGER/Line file from the government. The ds_driver keyword is used internally when a ctypes pointer is passed in directly. Example: ds = DataSource('/home/foo/bar.shp') for layer in ds: for feature in layer: # Getting the geometry for the feature. g = feature.geom # Getting the 'description' field for the feature. desc = feature['description'] # We can also increment through all of the fields # attached to this feature. for field in feature: # Get the name of the field (e.g. 'description') nm = field.name # Get the type (integer) of the field, e.g. 0 => OFTInteger t = field.type # Returns the value the field; OFTIntegers return ints, # OFTReal returns floats, all else returns string. val = field.value """ from ctypes import byref from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.driver import Driver from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.layer import Layer from django.contrib.gis.gdal.prototypes import ds as capi from django.utils.encoding import force_bytes, force_str # For more information, see the OGR C API source code: # https://www.gdal.org/ogr__api_8h.html # # The OGR_DS_* routines are relevant here. class DataSource(GDALBase): "Wraps an OGR Data Source object." destructor = capi.destroy_ds def __init__(self, ds_input, ds_driver=False, write=False, encoding='utf-8'): # The write flag. if write: self._write = 1 else: self._write = 0 # See also https://trac.osgeo.org/gdal/wiki/rfc23_ogr_unicode self.encoding = encoding Driver.ensure_registered() if isinstance(ds_input, str): # The data source driver is a void pointer. ds_driver = Driver.ptr_type() try: # OGROpen will auto-detect the data source type. ds = capi.open_ds(force_bytes(ds_input), self._write, byref(ds_driver)) except GDALException: # Making the error message more clear rather than something # like "Invalid pointer returned from OGROpen". raise GDALException('Could not open the datasource at "%s"' % ds_input) elif isinstance(ds_input, self.ptr_type) and isinstance(ds_driver, Driver.ptr_type): ds = ds_input else: raise GDALException('Invalid data source input type: %s' % type(ds_input)) if ds: self.ptr = ds self.driver = Driver(ds_driver) else: # Raise an exception if the returned pointer is NULL raise GDALException('Invalid data source file "%s"' % ds_input) def __getitem__(self, index): "Allows use of the index [] operator to get a layer at the index." if isinstance(index, str): try: layer = capi.get_layer_by_name(self.ptr, force_bytes(index)) except GDALException: raise IndexError('Invalid OGR layer name given: %s.' % index) elif isinstance(index, int): if 0 <= index < self.layer_count: layer = capi.get_layer(self._ptr, index) else: raise IndexError('Index out of range when accessing layers in a datasource: %s.' % index) else: raise TypeError('Invalid index type: %s' % type(index)) return Layer(layer, self) def __len__(self): "Return the number of layers within the data source." return self.layer_count def __str__(self): "Return OGR GetName and Driver for the Data Source." return '%s (%s)' % (self.name, self.driver) @property def layer_count(self): "Return the number of layers in the data source." return capi.get_layer_count(self._ptr) @property def name(self): "Return the name of the data source." name = capi.get_ds_name(self._ptr) return force_str(name, self.encoding, strings_only=True)
8421dfc50b1e7016a4219503670124102747edd83709d17863f900158916f4e8
from django.contrib.gis.gdal.error import GDALException class OGRGeomType: "Encapsulate OGR Geometry Types." wkb25bit = -2147483648 # Dictionary of acceptable OGRwkbGeometryType s and their string names. _types = { 0: 'Unknown', 1: 'Point', 2: 'LineString', 3: 'Polygon', 4: 'MultiPoint', 5: 'MultiLineString', 6: 'MultiPolygon', 7: 'GeometryCollection', 100: 'None', 101: 'LinearRing', 102: 'PointZ', 1 + wkb25bit: 'Point25D', 2 + wkb25bit: 'LineString25D', 3 + wkb25bit: 'Polygon25D', 4 + wkb25bit: 'MultiPoint25D', 5 + wkb25bit: 'MultiLineString25D', 6 + wkb25bit: 'MultiPolygon25D', 7 + wkb25bit: 'GeometryCollection25D', } # Reverse type dictionary, keyed by lowercase of the name. _str_types = {v.lower(): k for k, v in _types.items()} def __init__(self, type_input): "Figure out the correct OGR Type based upon the input." if isinstance(type_input, OGRGeomType): num = type_input.num elif isinstance(type_input, str): type_input = type_input.lower() if type_input == 'geometry': type_input = 'unknown' num = self._str_types.get(type_input) if num is None: raise GDALException('Invalid OGR String Type "%s"' % type_input) elif isinstance(type_input, int): if type_input not in self._types: raise GDALException('Invalid OGR Integer Type: %d' % type_input) num = type_input else: raise TypeError('Invalid OGR input type given.') # Setting the OGR geometry type number. self.num = num def __str__(self): "Return the value of the name property." return self.name def __eq__(self, other): """ Do an equivalence test on the OGR type with the given other OGRGeomType, the short-hand string, or the integer. """ if isinstance(other, OGRGeomType): return self.num == other.num elif isinstance(other, str): return self.name.lower() == other.lower() elif isinstance(other, int): return self.num == other else: return False @property def name(self): "Return a short-hand string form of the OGR Geometry type." return self._types[self.num] @property def django(self): "Return the Django GeometryField for this OGR Type." s = self.name.replace('25D', '') if s in ('LinearRing', 'None'): return None elif s == 'Unknown': s = 'Geometry' elif s == 'PointZ': s = 'Point' return s + 'Field' def to_multi(self): """ Transform Point, LineString, Polygon, and their 25D equivalents to their Multi... counterpart. """ if self.name.startswith(('Point', 'LineString', 'Polygon')): self.num += 3
a371b01b9573646b1395c99e7ce320b4d1b8994d378ea8488341895003fd0bcb
""" The OGRGeometry is a wrapper for using the OGR Geometry class (see https://www.gdal.org/classOGRGeometry.html). OGRGeometry may be instantiated when reading geometries from OGR Data Sources (e.g. SHP files), or when given OGC WKT (a string). While the 'full' API is not present yet, the API is "pythonic" unlike the traditional and "next-generation" OGR Python bindings. One major advantage OGR Geometries have over their GEOS counterparts is support for spatial reference systems and their transformation. Example: >>> from django.contrib.gis.gdal import OGRGeometry, OGRGeomType, SpatialReference >>> wkt1, wkt2 = 'POINT(-90 30)', 'POLYGON((0 0, 5 0, 5 5, 0 5)' >>> pnt = OGRGeometry(wkt1) >>> print(pnt) POINT (-90 30) >>> mpnt = OGRGeometry(OGRGeomType('MultiPoint'), SpatialReference('WGS84')) >>> mpnt.add(wkt1) >>> mpnt.add(wkt1) >>> print(mpnt) MULTIPOINT (-90 30,-90 30) >>> print(mpnt.srs.name) WGS 84 >>> print(mpnt.srs.proj) +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs >>> mpnt.transform(SpatialReference('NAD27')) >>> print(mpnt.proj) +proj=longlat +ellps=clrk66 +datum=NAD27 +no_defs >>> print(mpnt) MULTIPOINT (-89.999930378602485 29.999797886557641,-89.999930378602485 29.999797886557641) The OGRGeomType class is to make it easy to specify an OGR geometry type: >>> from django.contrib.gis.gdal import OGRGeomType >>> gt1 = OGRGeomType(3) # Using an integer for the type >>> gt2 = OGRGeomType('Polygon') # Using a string >>> gt3 = OGRGeomType('POLYGON') # It's case-insensitive >>> print(gt1 == 3, gt1 == 'Polygon') # Equivalence works w/non-OGRGeomType objects True True """ import sys from binascii import b2a_hex from ctypes import byref, c_char_p, c_double, c_ubyte, c_void_p, string_at from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope from django.contrib.gis.gdal.error import GDALException, SRSException from django.contrib.gis.gdal.geomtype import OGRGeomType from django.contrib.gis.gdal.prototypes import geom as capi, srs as srs_api from django.contrib.gis.gdal.srs import CoordTransform, SpatialReference from django.contrib.gis.geometry import hex_regex, json_regex, wkt_regex from django.utils.encoding import force_bytes # For more information, see the OGR C API source code: # https://www.gdal.org/ogr__api_8h.html # # The OGR_G_* routines are relevant here. class OGRGeometry(GDALBase): """Encapsulate an OGR geometry.""" destructor = capi.destroy_geom def __init__(self, geom_input, srs=None): """Initialize Geometry on either WKT or an OGR pointer as input.""" str_instance = isinstance(geom_input, str) # If HEX, unpack input to a binary buffer. if str_instance and hex_regex.match(geom_input): geom_input = memoryview(bytes.fromhex(geom_input)) str_instance = False # Constructing the geometry, if str_instance: wkt_m = wkt_regex.match(geom_input) json_m = json_regex.match(geom_input) if wkt_m: if wkt_m.group('srid'): # If there's EWKT, set the SRS w/value of the SRID. srs = int(wkt_m.group('srid')) if wkt_m.group('type').upper() == 'LINEARRING': # OGR_G_CreateFromWkt doesn't work with LINEARRING WKT. # See https://trac.osgeo.org/gdal/ticket/1992. g = capi.create_geom(OGRGeomType(wkt_m.group('type')).num) capi.import_wkt(g, byref(c_char_p(wkt_m.group('wkt').encode()))) else: g = capi.from_wkt(byref(c_char_p(wkt_m.group('wkt').encode())), None, byref(c_void_p())) elif json_m: g = self._from_json(geom_input.encode()) else: # Seeing if the input is a valid short-hand string # (e.g., 'Point', 'POLYGON'). OGRGeomType(geom_input) g = capi.create_geom(OGRGeomType(geom_input).num) elif isinstance(geom_input, memoryview): # WKB was passed in g = self._from_wkb(geom_input) elif isinstance(geom_input, OGRGeomType): # OGRGeomType was passed in, an empty geometry will be created. g = capi.create_geom(geom_input.num) elif isinstance(geom_input, self.ptr_type): # OGR pointer (c_void_p) was the input. g = geom_input else: raise GDALException('Invalid input type for OGR Geometry construction: %s' % type(geom_input)) # Now checking the Geometry pointer before finishing initialization # by setting the pointer for the object. if not g: raise GDALException('Cannot create OGR Geometry from input: %s' % geom_input) self.ptr = g # Assigning the SpatialReference object to the geometry, if valid. if srs: self.srs = srs # Setting the class depending upon the OGR Geometry Type self.__class__ = GEO_CLASSES[self.geom_type.num] # Pickle routines def __getstate__(self): srs = self.srs if srs: srs = srs.wkt else: srs = None return bytes(self.wkb), srs def __setstate__(self, state): wkb, srs = state ptr = capi.from_wkb(wkb, None, byref(c_void_p()), len(wkb)) if not ptr: raise GDALException('Invalid OGRGeometry loaded from pickled state.') self.ptr = ptr self.srs = srs @classmethod def _from_wkb(cls, geom_input): return capi.from_wkb(bytes(geom_input), None, byref(c_void_p()), len(geom_input)) @staticmethod def _from_json(geom_input): return capi.from_json(geom_input) @classmethod def from_bbox(cls, bbox): "Construct a Polygon from a bounding box (4-tuple)." x0, y0, x1, y1 = bbox return OGRGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % ( x0, y0, x0, y1, x1, y1, x1, y0, x0, y0)) @staticmethod def from_json(geom_input): return OGRGeometry(OGRGeometry._from_json(force_bytes(geom_input))) @classmethod def from_gml(cls, gml_string): return cls(capi.from_gml(force_bytes(gml_string))) # ### Geometry set-like operations ### # g = g1 | g2 def __or__(self, other): "Return the union of the two geometries." return self.union(other) # g = g1 & g2 def __and__(self, other): "Return the intersection of this Geometry and the other." return self.intersection(other) # g = g1 - g2 def __sub__(self, other): "Return the difference this Geometry and the other." return self.difference(other) # g = g1 ^ g2 def __xor__(self, other): "Return the symmetric difference of this Geometry and the other." return self.sym_difference(other) def __eq__(self, other): "Is this Geometry equal to the other?" return isinstance(other, OGRGeometry) and self.equals(other) def __str__(self): "WKT is used for the string representation." return self.wkt # #### Geometry Properties #### @property def dimension(self): "Return 0 for points, 1 for lines, and 2 for surfaces." return capi.get_dims(self.ptr) def _get_coord_dim(self): "Return the coordinate dimension of the Geometry." return capi.get_coord_dim(self.ptr) def _set_coord_dim(self, dim): "Set the coordinate dimension of this Geometry." if dim not in (2, 3): raise ValueError('Geometry dimension must be either 2 or 3') capi.set_coord_dim(self.ptr, dim) coord_dim = property(_get_coord_dim, _set_coord_dim) @property def geom_count(self): "Return the number of elements in this Geometry." return capi.get_geom_count(self.ptr) @property def point_count(self): "Return the number of Points in this Geometry." return capi.get_point_count(self.ptr) @property def num_points(self): "Alias for `point_count` (same name method in GEOS API.)" return self.point_count @property def num_coords(self): "Alias for `point_count`." return self.point_count @property def geom_type(self): "Return the Type for this Geometry." return OGRGeomType(capi.get_geom_type(self.ptr)) @property def geom_name(self): "Return the Name of this Geometry." return capi.get_geom_name(self.ptr) @property def area(self): "Return the area for a LinearRing, Polygon, or MultiPolygon; 0 otherwise." return capi.get_area(self.ptr) @property def envelope(self): "Return the envelope for this Geometry." # TODO: Fix Envelope() for Point geometries. return Envelope(capi.get_envelope(self.ptr, byref(OGREnvelope()))) @property def empty(self): return capi.is_empty(self.ptr) @property def extent(self): "Return the envelope as a 4-tuple, instead of as an Envelope object." return self.envelope.tuple # #### SpatialReference-related Properties #### # The SRS property def _get_srs(self): "Return the Spatial Reference for this Geometry." try: srs_ptr = capi.get_geom_srs(self.ptr) return SpatialReference(srs_api.clone_srs(srs_ptr)) except SRSException: return None def _set_srs(self, srs): "Set the SpatialReference for this geometry." # Do not have to clone the `SpatialReference` object pointer because # when it is assigned to this `OGRGeometry` it's internal OGR # reference count is incremented, and will likewise be released # (decremented) when this geometry's destructor is called. if isinstance(srs, SpatialReference): srs_ptr = srs.ptr elif isinstance(srs, (int, str)): sr = SpatialReference(srs) srs_ptr = sr.ptr elif srs is None: srs_ptr = None else: raise TypeError('Cannot assign spatial reference with object of type: %s' % type(srs)) capi.assign_srs(self.ptr, srs_ptr) srs = property(_get_srs, _set_srs) # The SRID property def _get_srid(self): srs = self.srs if srs: return srs.srid return None def _set_srid(self, srid): if isinstance(srid, int) or srid is None: self.srs = srid else: raise TypeError('SRID must be set with an integer.') srid = property(_get_srid, _set_srid) # #### Output Methods #### def _geos_ptr(self): from django.contrib.gis.geos import GEOSGeometry return GEOSGeometry._from_wkb(self.wkb) @property def geos(self): "Return a GEOSGeometry object from this OGRGeometry." from django.contrib.gis.geos import GEOSGeometry return GEOSGeometry(self._geos_ptr(), self.srid) @property def gml(self): "Return the GML representation of the Geometry." return capi.to_gml(self.ptr) @property def hex(self): "Return the hexadecimal representation of the WKB (a string)." return b2a_hex(self.wkb).upper() @property def json(self): """ Return the GeoJSON representation of this Geometry. """ return capi.to_json(self.ptr) geojson = json @property def kml(self): "Return the KML representation of the Geometry." return capi.to_kml(self.ptr, None) @property def wkb_size(self): "Return the size of the WKB buffer." return capi.get_wkbsize(self.ptr) @property def wkb(self): "Return the WKB representation of the Geometry." if sys.byteorder == 'little': byteorder = 1 # wkbNDR (from ogr_core.h) else: byteorder = 0 # wkbXDR sz = self.wkb_size # Creating the unsigned character buffer, and passing it in by reference. buf = (c_ubyte * sz)() capi.to_wkb(self.ptr, byteorder, byref(buf)) # Returning a buffer of the string at the pointer. return memoryview(string_at(buf, sz)) @property def wkt(self): "Return the WKT representation of the Geometry." return capi.to_wkt(self.ptr, byref(c_char_p())) @property def ewkt(self): "Return the EWKT representation of the Geometry." srs = self.srs if srs and srs.srid: return 'SRID=%s;%s' % (srs.srid, self.wkt) else: return self.wkt # #### Geometry Methods #### def clone(self): "Clone this OGR Geometry." return OGRGeometry(capi.clone_geom(self.ptr), self.srs) def close_rings(self): """ If there are any rings within this geometry that have not been closed, this routine will do so by adding the starting point at the end. """ # Closing the open rings. capi.geom_close_rings(self.ptr) def transform(self, coord_trans, clone=False): """ Transform this geometry to a different spatial reference system. May take a CoordTransform object, a SpatialReference object, string WKT or PROJ.4, and/or an integer SRID. By default, return nothing and transform the geometry in-place. However, if the `clone` keyword is set, return a transformed clone of this geometry. """ if clone: klone = self.clone() klone.transform(coord_trans) return klone # Depending on the input type, use the appropriate OGR routine # to perform the transformation. if isinstance(coord_trans, CoordTransform): capi.geom_transform(self.ptr, coord_trans.ptr) elif isinstance(coord_trans, SpatialReference): capi.geom_transform_to(self.ptr, coord_trans.ptr) elif isinstance(coord_trans, (int, str)): sr = SpatialReference(coord_trans) capi.geom_transform_to(self.ptr, sr.ptr) else: raise TypeError('Transform only accepts CoordTransform, ' 'SpatialReference, string, and integer objects.') # #### Topology Methods #### def _topology(self, func, other): """A generalized function for topology operations, takes a GDAL function and the other geometry to perform the operation on.""" if not isinstance(other, OGRGeometry): raise TypeError('Must use another OGRGeometry object for topology operations!') # Returning the output of the given function with the other geometry's # pointer. return func(self.ptr, other.ptr) def intersects(self, other): "Return True if this geometry intersects with the other." return self._topology(capi.ogr_intersects, other) def equals(self, other): "Return True if this geometry is equivalent to the other." return self._topology(capi.ogr_equals, other) def disjoint(self, other): "Return True if this geometry and the other are spatially disjoint." return self._topology(capi.ogr_disjoint, other) def touches(self, other): "Return True if this geometry touches the other." return self._topology(capi.ogr_touches, other) def crosses(self, other): "Return True if this geometry crosses the other." return self._topology(capi.ogr_crosses, other) def within(self, other): "Return True if this geometry is within the other." return self._topology(capi.ogr_within, other) def contains(self, other): "Return True if this geometry contains the other." return self._topology(capi.ogr_contains, other) def overlaps(self, other): "Return True if this geometry overlaps the other." return self._topology(capi.ogr_overlaps, other) # #### Geometry-generation Methods #### def _geomgen(self, gen_func, other=None): "A helper routine for the OGR routines that generate geometries." if isinstance(other, OGRGeometry): return OGRGeometry(gen_func(self.ptr, other.ptr), self.srs) else: return OGRGeometry(gen_func(self.ptr), self.srs) @property def boundary(self): "Return the boundary of this geometry." return self._geomgen(capi.get_boundary) @property def convex_hull(self): """ Return the smallest convex Polygon that contains all the points in this Geometry. """ return self._geomgen(capi.geom_convex_hull) def difference(self, other): """ Return a new geometry consisting of the region which is the difference of this geometry and the other. """ return self._geomgen(capi.geom_diff, other) def intersection(self, other): """ Return a new geometry consisting of the region of intersection of this geometry and the other. """ return self._geomgen(capi.geom_intersection, other) def sym_difference(self, other): """ Return a new geometry which is the symmetric difference of this geometry and the other. """ return self._geomgen(capi.geom_sym_diff, other) def union(self, other): """ Return a new geometry consisting of the region which is the union of this geometry and the other. """ return self._geomgen(capi.geom_union, other) # The subclasses for OGR Geometry. class Point(OGRGeometry): def _geos_ptr(self): from django.contrib.gis import geos return geos.Point._create_empty() if self.empty else super()._geos_ptr() @classmethod def _create_empty(cls): return capi.create_geom(OGRGeomType('point').num) @property def x(self): "Return the X coordinate for this Point." return capi.getx(self.ptr, 0) @property def y(self): "Return the Y coordinate for this Point." return capi.gety(self.ptr, 0) @property def z(self): "Return the Z coordinate for this Point." if self.coord_dim == 3: return capi.getz(self.ptr, 0) @property def tuple(self): "Return the tuple of this point." if self.coord_dim == 2: return (self.x, self.y) elif self.coord_dim == 3: return (self.x, self.y, self.z) coords = tuple class LineString(OGRGeometry): def __getitem__(self, index): "Return the Point at the given index." if 0 <= index < self.point_count: x, y, z = c_double(), c_double(), c_double() capi.get_point(self.ptr, index, byref(x), byref(y), byref(z)) dim = self.coord_dim if dim == 1: return (x.value,) elif dim == 2: return (x.value, y.value) elif dim == 3: return (x.value, y.value, z.value) else: raise IndexError('Index out of range when accessing points of a line string: %s.' % index) def __len__(self): "Return the number of points in the LineString." return self.point_count @property def tuple(self): "Return the tuple representation of this LineString." return tuple(self[i] for i in range(len(self))) coords = tuple def _listarr(self, func): """ Internal routine that returns a sequence (list) corresponding with the given function. """ return [func(self.ptr, i) for i in range(len(self))] @property def x(self): "Return the X coordinates in a list." return self._listarr(capi.getx) @property def y(self): "Return the Y coordinates in a list." return self._listarr(capi.gety) @property def z(self): "Return the Z coordinates in a list." if self.coord_dim == 3: return self._listarr(capi.getz) # LinearRings are used in Polygons. class LinearRing(LineString): pass class Polygon(OGRGeometry): def __len__(self): "Return the number of interior rings in this Polygon." return self.geom_count def __getitem__(self, index): "Get the ring at the specified index." if 0 <= index < self.geom_count: return OGRGeometry(capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs) else: raise IndexError('Index out of range when accessing rings of a polygon: %s.' % index) # Polygon Properties @property def shell(self): "Return the shell of this Polygon." return self[0] # First ring is the shell exterior_ring = shell @property def tuple(self): "Return a tuple of LinearRing coordinate tuples." return tuple(self[i].tuple for i in range(self.geom_count)) coords = tuple @property def point_count(self): "Return the number of Points in this Polygon." # Summing up the number of points in each ring of the Polygon. return sum(self[i].point_count for i in range(self.geom_count)) @property def centroid(self): "Return the centroid (a Point) of this Polygon." # The centroid is a Point, create a geometry for this. p = OGRGeometry(OGRGeomType('Point')) capi.get_centroid(self.ptr, p.ptr) return p # Geometry Collection base class. class GeometryCollection(OGRGeometry): "The Geometry Collection class." def __getitem__(self, index): "Get the Geometry at the specified index." if 0 <= index < self.geom_count: return OGRGeometry(capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs) else: raise IndexError('Index out of range when accessing geometry in a collection: %s.' % index) def __len__(self): "Return the number of geometries in this Geometry Collection." return self.geom_count def add(self, geom): "Add the geometry to this Geometry Collection." if isinstance(geom, OGRGeometry): if isinstance(geom, self.__class__): for g in geom: capi.add_geom(self.ptr, g.ptr) else: capi.add_geom(self.ptr, geom.ptr) elif isinstance(geom, str): tmp = OGRGeometry(geom) capi.add_geom(self.ptr, tmp.ptr) else: raise GDALException('Must add an OGRGeometry.') @property def point_count(self): "Return the number of Points in this Geometry Collection." # Summing up the number of points in each geometry in this collection return sum(self[i].point_count for i in range(self.geom_count)) @property def tuple(self): "Return a tuple representation of this Geometry Collection." return tuple(self[i].tuple for i in range(self.geom_count)) coords = tuple # Multiple Geometry types. class MultiPoint(GeometryCollection): pass class MultiLineString(GeometryCollection): pass class MultiPolygon(GeometryCollection): pass # Class mapping dictionary (using the OGRwkbGeometryType as the key) GEO_CLASSES = { 1: Point, 2: LineString, 3: Polygon, 4: MultiPoint, 5: MultiLineString, 6: MultiPolygon, 7: GeometryCollection, 101: LinearRing, 1 + OGRGeomType.wkb25bit: Point, 2 + OGRGeomType.wkb25bit: LineString, 3 + OGRGeomType.wkb25bit: Polygon, 4 + OGRGeomType.wkb25bit: MultiPoint, 5 + OGRGeomType.wkb25bit: MultiLineString, 6 + OGRGeomType.wkb25bit: MultiPolygon, 7 + OGRGeomType.wkb25bit: GeometryCollection, }
01eadd27db0b7abbbd677f4f12d4c05e9d78bda6cc7305ff2c86638341320c01
from ctypes import byref, c_int from datetime import date, datetime, time from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import ds as capi from django.utils.encoding import force_str # For more information, see the OGR C API source code: # https://www.gdal.org/ogr__api_8h.html # # The OGR_Fld_* routines are relevant here. class Field(GDALBase): """ Wrap an OGR Field. Needs to be instantiated from a Feature object. """ def __init__(self, feat, index): """ Initialize on the feature object and the integer index of the field within the feature. """ # Setting the feature pointer and index. self._feat = feat self._index = index # Getting the pointer for this field. fld_ptr = capi.get_feat_field_defn(feat.ptr, index) if not fld_ptr: raise GDALException('Cannot create OGR Field, invalid pointer given.') self.ptr = fld_ptr # Setting the class depending upon the OGR Field Type (OFT) self.__class__ = OGRFieldTypes[self.type] def __str__(self): "Return the string representation of the Field." return str(self.value).strip() # #### Field Methods #### def as_double(self): "Retrieve the Field's value as a double (float)." return capi.get_field_as_double(self._feat.ptr, self._index) if self.is_set else None def as_int(self, is_64=False): "Retrieve the Field's value as an integer." if is_64: return capi.get_field_as_integer64(self._feat.ptr, self._index) if self.is_set else None else: return capi.get_field_as_integer(self._feat.ptr, self._index) if self.is_set else None def as_string(self): "Retrieve the Field's value as a string." if not self.is_set: return None string = capi.get_field_as_string(self._feat.ptr, self._index) return force_str(string, encoding=self._feat.encoding, strings_only=True) def as_datetime(self): "Retrieve the Field's value as a tuple of date & time components." if not self.is_set: return None yy, mm, dd, hh, mn, ss, tz = [c_int() for i in range(7)] status = capi.get_field_as_datetime( self._feat.ptr, self._index, byref(yy), byref(mm), byref(dd), byref(hh), byref(mn), byref(ss), byref(tz)) if status: return (yy, mm, dd, hh, mn, ss, tz) else: raise GDALException('Unable to retrieve date & time information from the field.') # #### Field Properties #### @property def is_set(self): "Return True if the value of this field isn't null, False otherwise." return capi.is_field_set(self._feat.ptr, self._index) @property def name(self): "Return the name of this Field." name = capi.get_field_name(self.ptr) return force_str(name, encoding=self._feat.encoding, strings_only=True) @property def precision(self): "Return the precision of this Field." return capi.get_field_precision(self.ptr) @property def type(self): "Return the OGR type of this Field." return capi.get_field_type(self.ptr) @property def type_name(self): "Return the OGR field type name for this Field." return capi.get_field_type_name(self.type) @property def value(self): "Return the value of this Field." # Default is to get the field as a string. return self.as_string() @property def width(self): "Return the width of this Field." return capi.get_field_width(self.ptr) # ### The Field sub-classes for each OGR Field type. ### class OFTInteger(Field): _bit64 = False @property def value(self): "Return an integer contained in this field." return self.as_int(self._bit64) @property def type(self): """ GDAL uses OFTReals to represent OFTIntegers in created shapefiles -- forcing the type here since the underlying field type may actually be OFTReal. """ return 0 class OFTReal(Field): @property def value(self): "Return a float contained in this field." return self.as_double() # String & Binary fields, just subclasses class OFTString(Field): pass class OFTWideString(Field): pass class OFTBinary(Field): pass # OFTDate, OFTTime, OFTDateTime fields. class OFTDate(Field): @property def value(self): "Return a Python `date` object for the OFTDate field." try: yy, mm, dd, hh, mn, ss, tz = self.as_datetime() return date(yy.value, mm.value, dd.value) except (TypeError, ValueError, GDALException): return None class OFTDateTime(Field): @property def value(self): "Return a Python `datetime` object for this OFTDateTime field." # TODO: Adapt timezone information. # See https://lists.osgeo.org/pipermail/gdal-dev/2006-February/007990.html # The `tz` variable has values of: 0=unknown, 1=localtime (ambiguous), # 100=GMT, 104=GMT+1, 80=GMT-5, etc. try: yy, mm, dd, hh, mn, ss, tz = self.as_datetime() return datetime(yy.value, mm.value, dd.value, hh.value, mn.value, ss.value) except (TypeError, ValueError, GDALException): return None class OFTTime(Field): @property def value(self): "Return a Python `time` object for this OFTTime field." try: yy, mm, dd, hh, mn, ss, tz = self.as_datetime() return time(hh.value, mn.value, ss.value) except (ValueError, GDALException): return None class OFTInteger64(OFTInteger): _bit64 = True # List fields are also just subclasses class OFTIntegerList(Field): pass class OFTRealList(Field): pass class OFTStringList(Field): pass class OFTWideStringList(Field): pass class OFTInteger64List(Field): pass # Class mapping dictionary for OFT Types and reverse mapping. OGRFieldTypes = { 0: OFTInteger, 1: OFTIntegerList, 2: OFTReal, 3: OFTRealList, 4: OFTString, 5: OFTStringList, 6: OFTWideString, 7: OFTWideStringList, 8: OFTBinary, 9: OFTDate, 10: OFTTime, 11: OFTDateTime, # New 64-bit integer types in GDAL 2 12: OFTInteger64, 13: OFTInteger64List, } ROGRFieldTypes = {cls: num for num, cls in OGRFieldTypes.items()}
2981b24183565eb1099b62347881b529c77b5933995a20be6ac2231790e63bd2
from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.field import Field from django.contrib.gis.gdal.geometries import OGRGeometry, OGRGeomType from django.contrib.gis.gdal.prototypes import ds as capi, geom as geom_api from django.utils.encoding import force_bytes, force_str # For more information, see the OGR C API source code: # https://www.gdal.org/ogr__api_8h.html # # The OGR_F_* routines are relevant here. class Feature(GDALBase): """ This class that wraps an OGR Feature, needs to be instantiated from a Layer object. """ destructor = capi.destroy_feature def __init__(self, feat, layer): """ Initialize Feature from a pointer and its Layer object. """ if not feat: raise GDALException('Cannot create OGR Feature, invalid pointer given.') self.ptr = feat self._layer = layer def __getitem__(self, index): """ Get the Field object at the specified index, which may be either an integer or the Field's string label. Note that the Field object is not the field's _value_ -- use the `get` method instead to retrieve the value (e.g. an integer) instead of a Field instance. """ if isinstance(index, str): i = self.index(index) elif 0 <= index < self.num_fields: i = index else: raise IndexError('Index out of range when accessing field in a feature: %s.' % index) return Field(self, i) def __len__(self): "Return the count of fields in this feature." return self.num_fields def __str__(self): "The string name of the feature." return 'Feature FID %d in Layer<%s>' % (self.fid, self.layer_name) def __eq__(self, other): "Do equivalence testing on the features." return bool(capi.feature_equal(self.ptr, other._ptr)) # #### Feature Properties #### @property def encoding(self): return self._layer._ds.encoding @property def fid(self): "Return the feature identifier." return capi.get_fid(self.ptr) @property def layer_name(self): "Return the name of the layer for the feature." name = capi.get_feat_name(self._layer._ldefn) return force_str(name, self.encoding, strings_only=True) @property def num_fields(self): "Return the number of fields in the Feature." return capi.get_feat_field_count(self.ptr) @property def fields(self): "Return a list of fields in the Feature." return [ force_str( capi.get_field_name(capi.get_field_defn(self._layer._ldefn, i)), self.encoding, strings_only=True ) for i in range(self.num_fields) ] @property def geom(self): "Return the OGR Geometry for this Feature." # Retrieving the geometry pointer for the feature. geom_ptr = capi.get_feat_geom_ref(self.ptr) return OGRGeometry(geom_api.clone_geom(geom_ptr)) @property def geom_type(self): "Return the OGR Geometry Type for this Feature." return OGRGeomType(capi.get_fd_geom_type(self._layer._ldefn)) # #### Feature Methods #### def get(self, field): """ Return the value of the field, instead of an instance of the Field object. May take a string of the field name or a Field object as parameters. """ field_name = getattr(field, 'name', field) return self[field_name].value def index(self, field_name): "Return the index of the given field name." i = capi.get_field_index(self.ptr, force_bytes(field_name)) if i < 0: raise IndexError('Invalid OFT field name given: %s.' % field_name) return i
caff72bed04f8cb591a901e5ce0945fa02c35be9d1ef317f179c6c7a3fe8071e
""" This module houses the GDAL & SRS Exception objects, and the check_err() routine which checks the status code returned by GDAL/OGR methods. """ # #### GDAL & SRS Exceptions #### class GDALException(Exception): pass class SRSException(Exception): pass # #### GDAL/OGR error checking codes and routine #### # OGR Error Codes OGRERR_DICT = { 1: (GDALException, 'Not enough data.'), 2: (GDALException, 'Not enough memory.'), 3: (GDALException, 'Unsupported geometry type.'), 4: (GDALException, 'Unsupported operation.'), 5: (GDALException, 'Corrupt data.'), 6: (GDALException, 'OGR failure.'), 7: (SRSException, 'Unsupported SRS.'), 8: (GDALException, 'Invalid handle.'), } # CPL Error Codes # https://www.gdal.org/cpl__error_8h.html CPLERR_DICT = { 1: (GDALException, 'AppDefined'), 2: (GDALException, 'OutOfMemory'), 3: (GDALException, 'FileIO'), 4: (GDALException, 'OpenFailed'), 5: (GDALException, 'IllegalArg'), 6: (GDALException, 'NotSupported'), 7: (GDALException, 'AssertionFailed'), 8: (GDALException, 'NoWriteAccess'), 9: (GDALException, 'UserInterrupt'), 10: (GDALException, 'ObjectNull'), } ERR_NONE = 0 def check_err(code, cpl=False): """ Check the given CPL/OGRERR and raise an exception where appropriate. """ err_dict = CPLERR_DICT if cpl else OGRERR_DICT if code == ERR_NONE: return elif code in err_dict: e, msg = err_dict[code] raise e(msg) else: raise GDALException('Unknown error code: "%s"' % code)
94bd7705895a132c4336408908f9c5664d777b245bf693a438eac074fd7a42dc
""" The GDAL/OGR library uses an Envelope structure to hold the bounding box information for a geometry. The envelope (bounding box) contains two pairs of coordinates, one for the lower left coordinate and one for the upper right coordinate: +----------o Upper right; (max_x, max_y) | | | | | | Lower left (min_x, min_y) o----------+ """ from ctypes import Structure, c_double from django.contrib.gis.gdal.error import GDALException # The OGR definition of an Envelope is a C structure containing four doubles. # See the 'ogr_core.h' source file for more information: # https://www.gdal.org/ogr__core_8h_source.html class OGREnvelope(Structure): "Represent the OGREnvelope C Structure." _fields_ = [("MinX", c_double), ("MaxX", c_double), ("MinY", c_double), ("MaxY", c_double), ] class Envelope: """ The Envelope object is a C structure that contains the minimum and maximum X, Y coordinates for a rectangle bounding box. The naming of the variables is compatible with the OGR Envelope structure. """ def __init__(self, *args): """ The initialization function may take an OGREnvelope structure, 4-element tuple or list, or 4 individual arguments. """ if len(args) == 1: if isinstance(args[0], OGREnvelope): # OGREnvelope (a ctypes Structure) was passed in. self._envelope = args[0] elif isinstance(args[0], (tuple, list)): # A tuple was passed in. if len(args[0]) != 4: raise GDALException('Incorrect number of tuple elements (%d).' % len(args[0])) else: self._from_sequence(args[0]) else: raise TypeError('Incorrect type of argument: %s' % type(args[0])) elif len(args) == 4: # Individual parameters passed in. # Thanks to ww for the help self._from_sequence([float(a) for a in args]) else: raise GDALException('Incorrect number (%d) of arguments.' % len(args)) # Checking the x,y coordinates if self.min_x > self.max_x: raise GDALException('Envelope minimum X > maximum X.') if self.min_y > self.max_y: raise GDALException('Envelope minimum Y > maximum Y.') def __eq__(self, other): """ Return True if the envelopes are equivalent; can compare against other Envelopes and 4-tuples. """ if isinstance(other, Envelope): return (self.min_x == other.min_x) and (self.min_y == other.min_y) and \ (self.max_x == other.max_x) and (self.max_y == other.max_y) elif isinstance(other, tuple) and len(other) == 4: return (self.min_x == other[0]) and (self.min_y == other[1]) and \ (self.max_x == other[2]) and (self.max_y == other[3]) else: raise GDALException('Equivalence testing only works with other Envelopes.') def __str__(self): "Return a string representation of the tuple." return str(self.tuple) def _from_sequence(self, seq): "Initialize the C OGR Envelope structure from the given sequence." self._envelope = OGREnvelope() self._envelope.MinX = seq[0] self._envelope.MinY = seq[1] self._envelope.MaxX = seq[2] self._envelope.MaxY = seq[3] def expand_to_include(self, *args): """ Modify the envelope to expand to include the boundaries of the passed-in 2-tuple (a point), 4-tuple (an extent) or envelope. """ # We provide a number of different signatures for this method, # and the logic here is all about converting them into a # 4-tuple single parameter which does the actual work of # expanding the envelope. if len(args) == 1: if isinstance(args[0], Envelope): return self.expand_to_include(args[0].tuple) elif hasattr(args[0], 'x') and hasattr(args[0], 'y'): return self.expand_to_include(args[0].x, args[0].y, args[0].x, args[0].y) elif isinstance(args[0], (tuple, list)): # A tuple was passed in. if len(args[0]) == 2: return self.expand_to_include((args[0][0], args[0][1], args[0][0], args[0][1])) elif len(args[0]) == 4: (minx, miny, maxx, maxy) = args[0] if minx < self._envelope.MinX: self._envelope.MinX = minx if miny < self._envelope.MinY: self._envelope.MinY = miny if maxx > self._envelope.MaxX: self._envelope.MaxX = maxx if maxy > self._envelope.MaxY: self._envelope.MaxY = maxy else: raise GDALException('Incorrect number of tuple elements (%d).' % len(args[0])) else: raise TypeError('Incorrect type of argument: %s' % type(args[0])) elif len(args) == 2: # An x and an y parameter were passed in return self.expand_to_include((args[0], args[1], args[0], args[1])) elif len(args) == 4: # Individual parameters passed in. return self.expand_to_include(args) else: raise GDALException('Incorrect number (%d) of arguments.' % len(args[0])) @property def min_x(self): "Return the value of the minimum X coordinate." return self._envelope.MinX @property def min_y(self): "Return the value of the minimum Y coordinate." return self._envelope.MinY @property def max_x(self): "Return the value of the maximum X coordinate." return self._envelope.MaxX @property def max_y(self): "Return the value of the maximum Y coordinate." return self._envelope.MaxY @property def ur(self): "Return the upper-right coordinate." return (self.max_x, self.max_y) @property def ll(self): "Return the lower-left coordinate." return (self.min_x, self.min_y) @property def tuple(self): "Return a tuple representing the envelope." return (self.min_x, self.min_y, self.max_x, self.max_y) @property def wkt(self): "Return WKT representing a Polygon for this envelope." # TODO: Fix significant figures. return 'POLYGON((%s %s,%s %s,%s %s,%s %s,%s %s))' % \ (self.min_x, self.min_y, self.min_x, self.max_y, self.max_x, self.max_y, self.max_x, self.min_y, self.min_x, self.min_y)
13b263e33df3fbe582d88766e46bd8b4318612b76d9b5b40ab337c2e297ec918
from ctypes import c_void_p from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import ds as vcapi, raster as rcapi from django.utils.encoding import force_bytes, force_str class Driver(GDALBase): """ Wrap a GDAL/OGR Data Source Driver. For more information, see the C API source code: https://www.gdal.org/gdal_8h.html - https://www.gdal.org/ogr__api_8h.html """ # Case-insensitive aliases for some GDAL/OGR Drivers. # For a complete list of original driver names see # https://www.gdal.org/ogr_formats.html (vector) # https://www.gdal.org/formats_list.html (raster) _alias = { # vector 'esri': 'ESRI Shapefile', 'shp': 'ESRI Shapefile', 'shape': 'ESRI Shapefile', 'tiger': 'TIGER', 'tiger/line': 'TIGER', # raster 'tiff': 'GTiff', 'tif': 'GTiff', 'jpeg': 'JPEG', 'jpg': 'JPEG', } def __init__(self, dr_input): """ Initialize an GDAL/OGR driver on either a string or integer input. """ if isinstance(dr_input, str): # If a string name of the driver was passed in self.ensure_registered() # Checking the alias dictionary (case-insensitive) to see if an # alias exists for the given driver. if dr_input.lower() in self._alias: name = self._alias[dr_input.lower()] else: name = dr_input # Attempting to get the GDAL/OGR driver by the string name. for iface in (vcapi, rcapi): driver = c_void_p(iface.get_driver_by_name(force_bytes(name))) if driver: break elif isinstance(dr_input, int): self.ensure_registered() for iface in (vcapi, rcapi): driver = iface.get_driver(dr_input) if driver: break elif isinstance(dr_input, c_void_p): driver = dr_input else: raise GDALException('Unrecognized input type for GDAL/OGR Driver: %s' % type(dr_input)) # Making sure we get a valid pointer to the OGR Driver if not driver: raise GDALException('Could not initialize GDAL/OGR Driver on input: %s' % dr_input) self.ptr = driver def __str__(self): return self.name @classmethod def ensure_registered(cls): """ Attempt to register all the data source drivers. """ # Only register all if the driver counts are 0 (or else all drivers # will be registered over and over again) if not vcapi.get_driver_count(): vcapi.register_all() if not rcapi.get_driver_count(): rcapi.register_all() @classmethod def driver_count(cls): """ Return the number of GDAL/OGR data source drivers registered. """ return vcapi.get_driver_count() + rcapi.get_driver_count() @property def name(self): """ Return description/name string for this driver. """ return force_str(rcapi.get_driver_description(self.ptr))
46b0109788fbf12040f57e0e992194fcc286c72334e58e90f56231de5f5634b2
import logging import os import re from ctypes import CDLL, CFUNCTYPE, c_char_p, c_int from ctypes.util import find_library from django.contrib.gis.gdal.error import GDALException from django.core.exceptions import ImproperlyConfigured logger = logging.getLogger('django.contrib.gis') # Custom library path set? try: from django.conf import settings lib_path = settings.GDAL_LIBRARY_PATH except (AttributeError, ImportError, ImproperlyConfigured, OSError): lib_path = None if lib_path: lib_names = None elif os.name == 'nt': # Windows NT shared libraries lib_names = ['gdal204', 'gdal203', 'gdal202', 'gdal201', 'gdal20'] elif os.name == 'posix': # *NIX library names. lib_names = ['gdal', 'GDAL', 'gdal2.4.0', 'gdal2.3.0', 'gdal2.2.0', 'gdal2.1.0', 'gdal2.0.0'] else: raise ImproperlyConfigured('GDAL is unsupported on OS "%s".' % os.name) # Using the ctypes `find_library` utility to find the # path to the GDAL library from the list of library names. if lib_names: for lib_name in lib_names: lib_path = find_library(lib_name) if lib_path is not None: break if lib_path is None: raise ImproperlyConfigured( 'Could not find the GDAL library (tried "%s"). Is GDAL installed? ' 'If it is, try setting GDAL_LIBRARY_PATH in your settings.' % '", "'.join(lib_names) ) # This loads the GDAL/OGR C library lgdal = CDLL(lib_path) # On Windows, the GDAL binaries have some OSR routines exported with # STDCALL, while others are not. Thus, the library will also need to # be loaded up as WinDLL for said OSR functions that require the # different calling convention. if os.name == 'nt': from ctypes import WinDLL lwingdal = WinDLL(lib_path) def std_call(func): """ Return the correct STDCALL function for certain OSR routines on Win32 platforms. """ if os.name == 'nt': return lwingdal[func] else: return lgdal[func] # #### Version-information functions. #### # Return GDAL library version information with the given key. _version_info = std_call('GDALVersionInfo') _version_info.argtypes = [c_char_p] _version_info.restype = c_char_p def gdal_version(): "Return only the GDAL version number information." return _version_info(b'RELEASE_NAME') def gdal_full_version(): "Return the full GDAL version information." return _version_info('') version_regex = re.compile(r'^(?P<major>\d+)\.(?P<minor>\d+)(\.(?P<subminor>\d+))?') def gdal_version_info(): ver = gdal_version().decode() m = version_regex.match(ver) if not m: raise GDALException('Could not parse GDAL version string "%s"' % ver) return {key: m.group(key) for key in ('major', 'minor', 'subminor')} _verinfo = gdal_version_info() GDAL_MAJOR_VERSION = int(_verinfo['major']) GDAL_MINOR_VERSION = int(_verinfo['minor']) GDAL_SUBMINOR_VERSION = _verinfo['subminor'] and int(_verinfo['subminor']) GDAL_VERSION = (GDAL_MAJOR_VERSION, GDAL_MINOR_VERSION, GDAL_SUBMINOR_VERSION) del _verinfo # Set library error handling so as errors are logged CPLErrorHandler = CFUNCTYPE(None, c_int, c_int, c_char_p) def err_handler(error_class, error_number, message): logger.error('GDAL_ERROR %d: %s', error_number, message) err_handler = CPLErrorHandler(err_handler) def function(name, args, restype): func = std_call(name) func.argtypes = args func.restype = restype return func set_error_handler = function('CPLSetErrorHandler', [CPLErrorHandler], CPLErrorHandler) set_error_handler(err_handler)
d8f3cfde5a66963200f8a72e375148e5d3503e410b4777b23e66ab3f62987d89
from ctypes import byref, c_double from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope from django.contrib.gis.gdal.error import GDALException, SRSException from django.contrib.gis.gdal.feature import Feature from django.contrib.gis.gdal.field import OGRFieldTypes from django.contrib.gis.gdal.geometries import OGRGeometry from django.contrib.gis.gdal.geomtype import OGRGeomType from django.contrib.gis.gdal.prototypes import ( ds as capi, geom as geom_api, srs as srs_api, ) from django.contrib.gis.gdal.srs import SpatialReference from django.utils.encoding import force_bytes, force_str # For more information, see the OGR C API source code: # https://www.gdal.org/ogr__api_8h.html # # The OGR_L_* routines are relevant here. class Layer(GDALBase): "A class that wraps an OGR Layer, needs to be instantiated from a DataSource object." def __init__(self, layer_ptr, ds): """ Initialize on an OGR C pointer to the Layer and the `DataSource` object that owns this layer. The `DataSource` object is required so that a reference to it is kept with this Layer. This prevents garbage collection of the `DataSource` while this Layer is still active. """ if not layer_ptr: raise GDALException('Cannot create Layer, invalid pointer given') self.ptr = layer_ptr self._ds = ds self._ldefn = capi.get_layer_defn(self._ptr) # Does the Layer support random reading? self._random_read = self.test_capability(b'RandomRead') def __getitem__(self, index): "Get the Feature at the specified index." if isinstance(index, int): # An integer index was given -- we cannot do a check based on the # number of features because the beginning and ending feature IDs # are not guaranteed to be 0 and len(layer)-1, respectively. if index < 0: raise IndexError('Negative indices are not allowed on OGR Layers.') return self._make_feature(index) elif isinstance(index, slice): # A slice was given start, stop, stride = index.indices(self.num_feat) return [self._make_feature(fid) for fid in range(start, stop, stride)] else: raise TypeError('Integers and slices may only be used when indexing OGR Layers.') def __iter__(self): "Iterate over each Feature in the Layer." # ResetReading() must be called before iteration is to begin. capi.reset_reading(self._ptr) for i in range(self.num_feat): yield Feature(capi.get_next_feature(self._ptr), self) def __len__(self): "The length is the number of features." return self.num_feat def __str__(self): "The string name of the layer." return self.name def _make_feature(self, feat_id): """ Helper routine for __getitem__ that constructs a Feature from the given Feature ID. If the OGR Layer does not support random-access reading, then each feature of the layer will be incremented through until the a Feature is found matching the given feature ID. """ if self._random_read: # If the Layer supports random reading, return. try: return Feature(capi.get_feature(self.ptr, feat_id), self) except GDALException: pass else: # Random access isn't supported, have to increment through # each feature until the given feature ID is encountered. for feat in self: if feat.fid == feat_id: return feat # Should have returned a Feature, raise an IndexError. raise IndexError('Invalid feature id: %s.' % feat_id) # #### Layer properties #### @property def extent(self): "Return the extent (an Envelope) of this layer." env = OGREnvelope() capi.get_extent(self.ptr, byref(env), 1) return Envelope(env) @property def name(self): "Return the name of this layer in the Data Source." name = capi.get_fd_name(self._ldefn) return force_str(name, self._ds.encoding, strings_only=True) @property def num_feat(self, force=1): "Return the number of features in the Layer." return capi.get_feature_count(self.ptr, force) @property def num_fields(self): "Return the number of fields in the Layer." return capi.get_field_count(self._ldefn) @property def geom_type(self): "Return the geometry type (OGRGeomType) of the Layer." return OGRGeomType(capi.get_fd_geom_type(self._ldefn)) @property def srs(self): "Return the Spatial Reference used in this Layer." try: ptr = capi.get_layer_srs(self.ptr) return SpatialReference(srs_api.clone_srs(ptr)) except SRSException: return None @property def fields(self): """ Return a list of string names corresponding to each of the Fields available in this Layer. """ return [force_str( capi.get_field_name(capi.get_field_defn(self._ldefn, i)), self._ds.encoding, strings_only=True, ) for i in range(self.num_fields)] @property def field_types(self): """ Return a list of the types of fields in this Layer. For example, return the list [OFTInteger, OFTReal, OFTString] for an OGR layer that has an integer, a floating-point, and string fields. """ return [OGRFieldTypes[capi.get_field_type(capi.get_field_defn(self._ldefn, i))] for i in range(self.num_fields)] @property def field_widths(self): "Return a list of the maximum field widths for the features." return [capi.get_field_width(capi.get_field_defn(self._ldefn, i)) for i in range(self.num_fields)] @property def field_precisions(self): "Return the field precisions for the features." return [capi.get_field_precision(capi.get_field_defn(self._ldefn, i)) for i in range(self.num_fields)] def _get_spatial_filter(self): try: return OGRGeometry(geom_api.clone_geom(capi.get_spatial_filter(self.ptr))) except GDALException: return None def _set_spatial_filter(self, filter): if isinstance(filter, OGRGeometry): capi.set_spatial_filter(self.ptr, filter.ptr) elif isinstance(filter, (tuple, list)): if not len(filter) == 4: raise ValueError('Spatial filter list/tuple must have 4 elements.') # Map c_double onto params -- if a bad type is passed in it # will be caught here. xmin, ymin, xmax, ymax = map(c_double, filter) capi.set_spatial_filter_rect(self.ptr, xmin, ymin, xmax, ymax) elif filter is None: capi.set_spatial_filter(self.ptr, None) else: raise TypeError('Spatial filter must be either an OGRGeometry instance, a 4-tuple, or None.') spatial_filter = property(_get_spatial_filter, _set_spatial_filter) # #### Layer Methods #### def get_fields(self, field_name): """ Return a list containing the given field name for every Feature in the Layer. """ if field_name not in self.fields: raise GDALException('invalid field name: %s' % field_name) return [feat.get(field_name) for feat in self] def get_geoms(self, geos=False): """ Return a list containing the OGRGeometry for every Feature in the Layer. """ if geos: from django.contrib.gis.geos import GEOSGeometry return [GEOSGeometry(feat.geom.wkb) for feat in self] else: return [feat.geom for feat in self] def test_capability(self, capability): """ Return a bool indicating whether the this Layer supports the given capability (a string). Valid capability strings include: 'RandomRead', 'SequentialWrite', 'RandomWrite', 'FastSpatialFilter', 'FastFeatureCount', 'FastGetExtent', 'CreateField', 'Transactions', 'DeleteFeature', and 'FastSetNextByIndex'. """ return bool(capi.test_capability(self.ptr, force_bytes(capability)))
8b0fbc4e528289e5decc6a3e17f0eb5c53efbe95c188d3b84dd1ea201fd51082
""" The Spatial Reference class, represents OGR Spatial Reference objects. Example: >>> from django.contrib.gis.gdal import SpatialReference >>> srs = SpatialReference('WGS84') >>> print(srs) GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], TOWGS84[0,0,0,0,0,0,0], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.01745329251994328, AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4326"]] >>> print(srs.proj) +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs >>> print(srs.ellipsoid) (6378137.0, 6356752.3142451793, 298.25722356300003) >>> print(srs.projected, srs.geographic) False True >>> srs.import_epsg(32140) >>> print(srs.name) NAD83 / Texas South Central """ from ctypes import byref, c_char_p, c_int from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import SRSException from django.contrib.gis.gdal.prototypes import srs as capi from django.utils.encoding import force_bytes, force_str class SpatialReference(GDALBase): """ A wrapper for the OGRSpatialReference object. According to the GDAL Web site, the SpatialReference object "provide[s] services to represent coordinate systems (projections and datums) and to transform between them." """ destructor = capi.release_srs def __init__(self, srs_input='', srs_type='user'): """ Create a GDAL OSR Spatial Reference object from the given input. The input may be string of OGC Well Known Text (WKT), an integer EPSG code, a PROJ.4 string, and/or a projection "well known" shorthand string (one of 'WGS84', 'WGS72', 'NAD27', 'NAD83'). """ if srs_type == 'wkt': self.ptr = capi.new_srs(c_char_p(b'')) self.import_wkt(srs_input) return elif isinstance(srs_input, str): try: # If SRID is a string, e.g., '4326', then make acceptable # as user input. srid = int(srs_input) srs_input = 'EPSG:%d' % srid except ValueError: pass elif isinstance(srs_input, int): # EPSG integer code was input. srs_type = 'epsg' elif isinstance(srs_input, self.ptr_type): srs = srs_input srs_type = 'ogr' else: raise TypeError('Invalid SRS type "%s"' % srs_type) if srs_type == 'ogr': # Input is already an SRS pointer. srs = srs_input else: # Creating a new SRS pointer, using the string buffer. buf = c_char_p(b'') srs = capi.new_srs(buf) # If the pointer is NULL, throw an exception. if not srs: raise SRSException('Could not create spatial reference from: %s' % srs_input) else: self.ptr = srs # Importing from either the user input string or an integer SRID. if srs_type == 'user': self.import_user_input(srs_input) elif srs_type == 'epsg': self.import_epsg(srs_input) def __getitem__(self, target): """ Return the value of the given string attribute node, None if the node doesn't exist. Can also take a tuple as a parameter, (target, child), where child is the index of the attribute in the WKT. For example: >>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]' >>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326 >>> print(srs['GEOGCS']) WGS 84 >>> print(srs['DATUM']) WGS_1984 >>> print(srs['AUTHORITY']) EPSG >>> print(srs['AUTHORITY', 1]) # The authority value 4326 >>> print(srs['TOWGS84', 4]) # the fourth value in this wkt 0 >>> print(srs['UNIT|AUTHORITY']) # For the units authority, have to use the pipe symbole. EPSG >>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the units 9122 """ if isinstance(target, tuple): return self.attr_value(*target) else: return self.attr_value(target) def __str__(self): "Use 'pretty' WKT." return self.pretty_wkt # #### SpatialReference Methods #### def attr_value(self, target, index=0): """ The attribute value for the given target node (e.g. 'PROJCS'). The index keyword specifies an index of the child node to return. """ if not isinstance(target, str) or not isinstance(index, int): raise TypeError return capi.get_attr_value(self.ptr, force_bytes(target), index) def auth_name(self, target): "Return the authority name for the given string target node." return capi.get_auth_name(self.ptr, force_bytes(target)) def auth_code(self, target): "Return the authority code for the given string target node." return capi.get_auth_code(self.ptr, force_bytes(target)) def clone(self): "Return a clone of this SpatialReference object." return SpatialReference(capi.clone_srs(self.ptr)) def from_esri(self): "Morph this SpatialReference from ESRI's format to EPSG." capi.morph_from_esri(self.ptr) def identify_epsg(self): """ This method inspects the WKT of this SpatialReference, and will add EPSG authority nodes where an EPSG identifier is applicable. """ capi.identify_epsg(self.ptr) def to_esri(self): "Morph this SpatialReference to ESRI's format." capi.morph_to_esri(self.ptr) def validate(self): "Check to see if the given spatial reference is valid." capi.srs_validate(self.ptr) # #### Name & SRID properties #### @property def name(self): "Return the name of this Spatial Reference." if self.projected: return self.attr_value('PROJCS') elif self.geographic: return self.attr_value('GEOGCS') elif self.local: return self.attr_value('LOCAL_CS') else: return None @property def srid(self): "Return the SRID of top-level authority, or None if undefined." try: return int(self.attr_value('AUTHORITY', 1)) except (TypeError, ValueError): return None # #### Unit Properties #### @property def linear_name(self): "Return the name of the linear units." units, name = capi.linear_units(self.ptr, byref(c_char_p())) return name @property def linear_units(self): "Return the value of the linear units." units, name = capi.linear_units(self.ptr, byref(c_char_p())) return units @property def angular_name(self): "Return the name of the angular units." units, name = capi.angular_units(self.ptr, byref(c_char_p())) return name @property def angular_units(self): "Return the value of the angular units." units, name = capi.angular_units(self.ptr, byref(c_char_p())) return units @property def units(self): """ Return a 2-tuple of the units value and the units name. Automatically determine whether to return the linear or angular units. """ units, name = None, None if self.projected or self.local: units, name = capi.linear_units(self.ptr, byref(c_char_p())) elif self.geographic: units, name = capi.angular_units(self.ptr, byref(c_char_p())) if name is not None: name = force_str(name) return (units, name) # #### Spheroid/Ellipsoid Properties #### @property def ellipsoid(self): """ Return a tuple of the ellipsoid parameters: (semimajor axis, semiminor axis, and inverse flattening) """ return (self.semi_major, self.semi_minor, self.inverse_flattening) @property def semi_major(self): "Return the Semi Major Axis for this Spatial Reference." return capi.semi_major(self.ptr, byref(c_int())) @property def semi_minor(self): "Return the Semi Minor Axis for this Spatial Reference." return capi.semi_minor(self.ptr, byref(c_int())) @property def inverse_flattening(self): "Return the Inverse Flattening for this Spatial Reference." return capi.invflattening(self.ptr, byref(c_int())) # #### Boolean Properties #### @property def geographic(self): """ Return True if this SpatialReference is geographic (root node is GEOGCS). """ return bool(capi.isgeographic(self.ptr)) @property def local(self): "Return True if this SpatialReference is local (root node is LOCAL_CS)." return bool(capi.islocal(self.ptr)) @property def projected(self): """ Return True if this SpatialReference is a projected coordinate system (root node is PROJCS). """ return bool(capi.isprojected(self.ptr)) # #### Import Routines ##### def import_epsg(self, epsg): "Import the Spatial Reference from the EPSG code (an integer)." capi.from_epsg(self.ptr, epsg) def import_proj(self, proj): "Import the Spatial Reference from a PROJ.4 string." capi.from_proj(self.ptr, proj) def import_user_input(self, user_input): "Import the Spatial Reference from the given user input string." capi.from_user_input(self.ptr, force_bytes(user_input)) def import_wkt(self, wkt): "Import the Spatial Reference from OGC WKT (string)" capi.from_wkt(self.ptr, byref(c_char_p(force_bytes(wkt)))) def import_xml(self, xml): "Import the Spatial Reference from an XML string." capi.from_xml(self.ptr, xml) # #### Export Properties #### @property def wkt(self): "Return the WKT representation of this Spatial Reference." return capi.to_wkt(self.ptr, byref(c_char_p())) @property def pretty_wkt(self, simplify=0): "Return the 'pretty' representation of the WKT." return capi.to_pretty_wkt(self.ptr, byref(c_char_p()), simplify) @property def proj(self): "Return the PROJ.4 representation for this Spatial Reference." return capi.to_proj(self.ptr, byref(c_char_p())) @property def proj4(self): "Alias for proj()." return self.proj @property def xml(self, dialect=''): "Return the XML representation of this Spatial Reference." return capi.to_xml(self.ptr, byref(c_char_p()), force_bytes(dialect)) class CoordTransform(GDALBase): "The coordinate system transformation object." destructor = capi.destroy_ct def __init__(self, source, target): "Initialize on a source and target SpatialReference objects." if not isinstance(source, SpatialReference) or not isinstance(target, SpatialReference): raise TypeError('source and target must be of type SpatialReference') self.ptr = capi.new_ct(source._ptr, target._ptr) self._srs1_name = source.name self._srs2_name = target.name def __str__(self): return 'Transform from "%s" to "%s"' % (self._srs1_name, self._srs2_name)
cf852b23b3f36fbdc5b13d96808317f73b0c1bf1e0ea0f3dbe492f80a3c73b3f
from django.contrib.admin import ModelAdmin from django.contrib.gis.admin.widgets import OpenLayersWidget from django.contrib.gis.db import models from django.contrib.gis.gdal import OGRGeomType from django.forms import Media spherical_mercator_srid = 3857 class GeoModelAdmin(ModelAdmin): """ The administration options class for Geographic models. Map settings may be overloaded from their defaults to create custom maps. """ # The default map settings that may be overloaded -- still subject # to API changes. default_lon = 0 default_lat = 0 default_zoom = 4 display_wkt = False display_srid = False extra_js = [] num_zoom = 18 max_zoom = False min_zoom = False units = False max_resolution = False max_extent = False modifiable = True mouse_position = True scale_text = True layerswitcher = True scrollable = True map_width = 600 map_height = 400 map_srid = 4326 map_template = 'gis/admin/openlayers.html' openlayers_url = 'https://cdnjs.cloudflare.com/ajax/libs/openlayers/2.13.1/OpenLayers.js' point_zoom = num_zoom - 6 wms_url = 'http://vmap0.tiles.osgeo.org/wms/vmap0' wms_layer = 'basic' wms_name = 'OpenLayers WMS' wms_options = {'format': 'image/jpeg'} debug = False widget = OpenLayersWidget @property def media(self): "Injects OpenLayers JavaScript into the admin." return super().media + Media(js=[self.openlayers_url] + self.extra_js) def formfield_for_dbfield(self, db_field, request, **kwargs): """ Overloaded from ModelAdmin so that an OpenLayersWidget is used for viewing/editing 2D GeometryFields (OpenLayers 2 does not support 3D editing). """ if isinstance(db_field, models.GeometryField) and db_field.dim < 3: # Setting the widget with the newly defined widget. kwargs['widget'] = self.get_map_widget(db_field) return db_field.formfield(**kwargs) else: return super().formfield_for_dbfield(db_field, request, **kwargs) def get_map_widget(self, db_field): """ Return a subclass of the OpenLayersWidget (or whatever was specified in the `widget` attribute) using the settings from the attributes set in this class. """ is_collection = db_field.geom_type in ('MULTIPOINT', 'MULTILINESTRING', 'MULTIPOLYGON', 'GEOMETRYCOLLECTION') if is_collection: if db_field.geom_type == 'GEOMETRYCOLLECTION': collection_type = 'Any' else: collection_type = OGRGeomType(db_field.geom_type.replace('MULTI', '')) else: collection_type = 'None' class OLMap(self.widget): template_name = self.map_template geom_type = db_field.geom_type wms_options = '' if self.wms_options: wms_options = ["%s: '%s'" % pair for pair in self.wms_options.items()] wms_options = ', %s' % ', '.join(wms_options) params = { 'default_lon': self.default_lon, 'default_lat': self.default_lat, 'default_zoom': self.default_zoom, 'display_wkt': self.debug or self.display_wkt, 'geom_type': OGRGeomType(db_field.geom_type), 'field_name': db_field.name, 'is_collection': is_collection, 'scrollable': self.scrollable, 'layerswitcher': self.layerswitcher, 'collection_type': collection_type, 'is_generic': db_field.geom_type == 'GEOMETRY', 'is_linestring': db_field.geom_type in ('LINESTRING', 'MULTILINESTRING'), 'is_polygon': db_field.geom_type in ('POLYGON', 'MULTIPOLYGON'), 'is_point': db_field.geom_type in ('POINT', 'MULTIPOINT'), 'num_zoom': self.num_zoom, 'max_zoom': self.max_zoom, 'min_zoom': self.min_zoom, 'units': self.units, # likely should get from object 'max_resolution': self.max_resolution, 'max_extent': self.max_extent, 'modifiable': self.modifiable, 'mouse_position': self.mouse_position, 'scale_text': self.scale_text, 'map_width': self.map_width, 'map_height': self.map_height, 'point_zoom': self.point_zoom, 'srid': self.map_srid, 'display_srid': self.display_srid, 'wms_url': self.wms_url, 'wms_layer': self.wms_layer, 'wms_name': self.wms_name, 'wms_options': wms_options, 'debug': self.debug, } return OLMap class OSMGeoAdmin(GeoModelAdmin): map_template = 'gis/admin/osm.html' num_zoom = 20 map_srid = spherical_mercator_srid max_extent = '-20037508,-20037508,20037508,20037508' max_resolution = '156543.0339' point_zoom = num_zoom - 6 units = 'm'
f2e27ff6be3c0252080f361a51bfea003d1e62c95e93dcaf017f4809d0a68790
# Copyright (c) 2008-2009 Aryeh Leib Taurog, all rights reserved. # Released under the New BSD license. """ This module contains a base type which provides list-style mutations without specific data storage methods. See also http://static.aryehleib.com/oldsite/MutableLists.html Author: Aryeh Leib Taurog. """ from functools import total_ordering @total_ordering class ListMixin: """ A base class which provides complete list interface. Derived classes must call ListMixin's __init__() function and implement the following: function _get_single_external(self, i): Return single item with index i for general use. The index i will always satisfy 0 <= i < len(self). function _get_single_internal(self, i): Same as above, but for use within the class [Optional] Note that if _get_single_internal and _get_single_internal return different types of objects, _set_list must distinguish between the two and handle each appropriately. function _set_list(self, length, items): Recreate the entire object. NOTE: items may be a generator which calls _get_single_internal. Therefore, it is necessary to cache the values in a temporary: temp = list(items) before clobbering the original storage. function _set_single(self, i, value): Set the single item at index i to value [Optional] If left undefined, all mutations will result in rebuilding the object using _set_list. function __len__(self): Return the length int _minlength: The minimum legal length [Optional] int _maxlength: The maximum legal length [Optional] type or tuple _allowed: A type or tuple of allowed item types [Optional] """ _minlength = 0 _maxlength = None # ### Python initialization and special list interface methods ### def __init__(self, *args, **kwargs): if not hasattr(self, '_get_single_internal'): self._get_single_internal = self._get_single_external if not hasattr(self, '_set_single'): self._set_single = self._set_single_rebuild self._assign_extended_slice = self._assign_extended_slice_rebuild super().__init__(*args, **kwargs) def __getitem__(self, index): "Get the item(s) at the specified index/slice." if isinstance(index, slice): return [self._get_single_external(i) for i in range(*index.indices(len(self)))] else: index = self._checkindex(index) return self._get_single_external(index) def __delitem__(self, index): "Delete the item(s) at the specified index/slice." if not isinstance(index, (int, slice)): raise TypeError("%s is not a legal index" % index) # calculate new length and dimensions origLen = len(self) if isinstance(index, int): index = self._checkindex(index) indexRange = [index] else: indexRange = range(*index.indices(origLen)) newLen = origLen - len(indexRange) newItems = (self._get_single_internal(i) for i in range(origLen) if i not in indexRange) self._rebuild(newLen, newItems) def __setitem__(self, index, val): "Set the item(s) at the specified index/slice." if isinstance(index, slice): self._set_slice(index, val) else: index = self._checkindex(index) self._check_allowed((val,)) self._set_single(index, val) # ### Special methods for arithmetic operations ### def __add__(self, other): 'add another list-like object' return self.__class__([*self, *other]) def __radd__(self, other): 'add to another list-like object' return other.__class__([*other, *self]) def __iadd__(self, other): 'add another list-like object to self' self.extend(other) return self def __mul__(self, n): 'multiply' return self.__class__(list(self) * n) def __rmul__(self, n): 'multiply' return self.__class__(list(self) * n) def __imul__(self, n): 'multiply' if n <= 0: del self[:] else: cache = list(self) for i in range(n - 1): self.extend(cache) return self def __eq__(self, other): olen = len(other) for i in range(olen): try: c = self[i] == other[i] except IndexError: # self must be shorter return False if not c: return False return len(self) == olen def __lt__(self, other): olen = len(other) for i in range(olen): try: c = self[i] < other[i] except IndexError: # self must be shorter return True if c: return c elif other[i] < self[i]: return False return len(self) < olen # ### Public list interface Methods ### # ## Non-mutating ## def count(self, val): "Standard list count method" count = 0 for i in self: if val == i: count += 1 return count def index(self, val): "Standard list index method" for i in range(0, len(self)): if self[i] == val: return i raise ValueError('%s not found in object' % val) # ## Mutating ## def append(self, val): "Standard list append method" self[len(self):] = [val] def extend(self, vals): "Standard list extend method" self[len(self):] = vals def insert(self, index, val): "Standard list insert method" if not isinstance(index, int): raise TypeError("%s is not a legal index" % index) self[index:index] = [val] def pop(self, index=-1): "Standard list pop method" result = self[index] del self[index] return result def remove(self, val): "Standard list remove method" del self[self.index(val)] def reverse(self): "Standard list reverse method" self[:] = self[-1::-1] def sort(self, key=None, reverse=False): "Standard list sort method" self[:] = sorted(self, key=key, reverse=reverse) # ### Private routines ### def _rebuild(self, newLen, newItems): if newLen and newLen < self._minlength: raise ValueError('Must have at least %d items' % self._minlength) if self._maxlength is not None and newLen > self._maxlength: raise ValueError('Cannot have more than %d items' % self._maxlength) self._set_list(newLen, newItems) def _set_single_rebuild(self, index, value): self._set_slice(slice(index, index + 1, 1), [value]) def _checkindex(self, index): length = len(self) if 0 <= index < length: return index if -length <= index < 0: return index + length raise IndexError('invalid index: %s' % index) def _check_allowed(self, items): if hasattr(self, '_allowed'): if False in [isinstance(val, self._allowed) for val in items]: raise TypeError('Invalid type encountered in the arguments.') def _set_slice(self, index, values): "Assign values to a slice of the object" try: valueList = list(values) except TypeError: raise TypeError('can only assign an iterable to a slice') self._check_allowed(valueList) origLen = len(self) start, stop, step = index.indices(origLen) # CAREFUL: index.step and step are not the same! # step will never be None if index.step is None: self._assign_simple_slice(start, stop, valueList) else: self._assign_extended_slice(start, stop, step, valueList) def _assign_extended_slice_rebuild(self, start, stop, step, valueList): 'Assign an extended slice by rebuilding entire list' indexList = range(start, stop, step) # extended slice, only allow assigning slice of same size if len(valueList) != len(indexList): raise ValueError('attempt to assign sequence of size %d ' 'to extended slice of size %d' % (len(valueList), len(indexList))) # we're not changing the length of the sequence newLen = len(self) newVals = dict(zip(indexList, valueList)) def newItems(): for i in range(newLen): if i in newVals: yield newVals[i] else: yield self._get_single_internal(i) self._rebuild(newLen, newItems()) def _assign_extended_slice(self, start, stop, step, valueList): 'Assign an extended slice by re-assigning individual items' indexList = range(start, stop, step) # extended slice, only allow assigning slice of same size if len(valueList) != len(indexList): raise ValueError('attempt to assign sequence of size %d ' 'to extended slice of size %d' % (len(valueList), len(indexList))) for i, val in zip(indexList, valueList): self._set_single(i, val) def _assign_simple_slice(self, start, stop, valueList): 'Assign a simple slice; Can assign slice of any length' origLen = len(self) stop = max(start, stop) newLen = origLen - stop + start + len(valueList) def newItems(): for i in range(origLen + 1): if i == start: yield from valueList if i < origLen: if i < start or i >= stop: yield self._get_single_internal(i) self._rebuild(newLen, newItems())
c94323d3602486ed4137dce968f3126a8c9fa512568b6f3692463f47a305fa46
""" This module houses the Geometry Collection objects: GeometryCollection, MultiPoint, MultiLineString, and MultiPolygon """ from ctypes import byref, c_int, c_uint from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.geometry import GEOSGeometry, LinearGeometryMixin from django.contrib.gis.geos.libgeos import GEOM_PTR from django.contrib.gis.geos.linestring import LinearRing, LineString from django.contrib.gis.geos.point import Point from django.contrib.gis.geos.polygon import Polygon class GeometryCollection(GEOSGeometry): _typeid = 7 def __init__(self, *args, **kwargs): "Initialize a Geometry Collection from a sequence of Geometry objects." # Checking the arguments if len(args) == 1: # If only one geometry provided or a list of geometries is provided # in the first argument. if isinstance(args[0], (tuple, list)): init_geoms = args[0] else: init_geoms = args else: init_geoms = args # Ensuring that only the permitted geometries are allowed in this collection # this is moved to list mixin super class self._check_allowed(init_geoms) # Creating the geometry pointer array. collection = self._create_collection(len(init_geoms), init_geoms) super().__init__(collection, **kwargs) def __iter__(self): "Iterate over each Geometry in the Collection." for i in range(len(self)): yield self[i] def __len__(self): "Return the number of geometries in this Collection." return self.num_geom # ### Methods for compatibility with ListMixin ### def _create_collection(self, length, items): # Creating the geometry pointer array. geoms = (GEOM_PTR * length)(*[ # this is a little sloppy, but makes life easier # allow GEOSGeometry types (python wrappers) or pointer types capi.geom_clone(getattr(g, 'ptr', g)) for g in items ]) return capi.create_collection(c_int(self._typeid), byref(geoms), c_uint(length)) def _get_single_internal(self, index): return capi.get_geomn(self.ptr, index) def _get_single_external(self, index): "Return the Geometry from this Collection at the given index (0-based)." # Checking the index and returning the corresponding GEOS geometry. return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid) def _set_list(self, length, items): "Create a new collection, and destroy the contents of the previous pointer." prev_ptr = self.ptr srid = self.srid self.ptr = self._create_collection(length, items) if srid: self.srid = srid capi.destroy_geom(prev_ptr) _set_single = GEOSGeometry._set_single_rebuild _assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild @property def kml(self): "Return the KML for this Geometry Collection." return '<MultiGeometry>%s</MultiGeometry>' % ''.join(g.kml for g in self) @property def tuple(self): "Return a tuple of all the coordinates in this Geometry Collection" return tuple(g.tuple for g in self) coords = tuple # MultiPoint, MultiLineString, and MultiPolygon class definitions. class MultiPoint(GeometryCollection): _allowed = Point _typeid = 4 class MultiLineString(LinearGeometryMixin, GeometryCollection): _allowed = (LineString, LinearRing) _typeid = 5 class MultiPolygon(GeometryCollection): _allowed = Polygon _typeid = 6 # Setting the allowed types here since GeometryCollection is defined before # its subclasses. GeometryCollection._allowed = (Point, LineString, LinearRing, Polygon, MultiPoint, MultiLineString, MultiPolygon)
275a946584f12cbc4c4cfd5de7ef34f4baa878fc2686d102094819306786634b
""" This module contains the 'base' GEOSGeometry object -- all GEOS Geometries inherit from this object. """ import re from ctypes import addressof, byref, c_double from django.contrib.gis import gdal from django.contrib.gis.geometry import hex_regex, json_regex, wkt_regex from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.base import GEOSBase from django.contrib.gis.geos.coordseq import GEOSCoordSeq from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.libgeos import GEOM_PTR from django.contrib.gis.geos.mutable_list import ListMixin from django.contrib.gis.geos.prepared import PreparedGeometry from django.contrib.gis.geos.prototypes.io import ( ewkb_w, wkb_r, wkb_w, wkt_r, wkt_w, ) from django.utils.deconstruct import deconstructible from django.utils.encoding import force_bytes, force_str class GEOSGeometryBase(GEOSBase): _GEOS_CLASSES = None ptr_type = GEOM_PTR destructor = capi.destroy_geom has_cs = False # Only Point, LineString, LinearRing have coordinate sequences def __init__(self, ptr, cls): self._ptr = ptr # Setting the class type (e.g., Point, Polygon, etc.) if type(self) in (GEOSGeometryBase, GEOSGeometry): if cls is None: if GEOSGeometryBase._GEOS_CLASSES is None: # Inner imports avoid import conflicts with GEOSGeometry. from .linestring import LineString, LinearRing from .point import Point from .polygon import Polygon from .collections import ( GeometryCollection, MultiPoint, MultiLineString, MultiPolygon, ) GEOSGeometryBase._GEOS_CLASSES = { 0: Point, 1: LineString, 2: LinearRing, 3: Polygon, 4: MultiPoint, 5: MultiLineString, 6: MultiPolygon, 7: GeometryCollection, } cls = GEOSGeometryBase._GEOS_CLASSES[self.geom_typeid] self.__class__ = cls self._post_init() def _post_init(self): "Perform post-initialization setup." # Setting the coordinate sequence for the geometry (will be None on # geometries that do not have coordinate sequences) self._cs = GEOSCoordSeq(capi.get_cs(self.ptr), self.hasz) if self.has_cs else None def __copy__(self): """ Return a clone because the copy of a GEOSGeometry may contain an invalid pointer location if the original is garbage collected. """ return self.clone() def __deepcopy__(self, memodict): """ The `deepcopy` routine is used by the `Node` class of django.utils.tree; thus, the protocol routine needs to be implemented to return correct copies (clones) of these GEOS objects, which use C pointers. """ return self.clone() def __str__(self): "EWKT is used for the string representation." return self.ewkt def __repr__(self): "Short-hand representation because WKT may be very large." return '<%s object at %s>' % (self.geom_type, hex(addressof(self.ptr))) # Pickling support def _to_pickle_wkb(self): return bytes(self.wkb) def _from_pickle_wkb(self, wkb): return wkb_r().read(memoryview(wkb)) def __getstate__(self): # The pickled state is simply a tuple of the WKB (in string form) # and the SRID. return self._to_pickle_wkb(), self.srid def __setstate__(self, state): # Instantiating from the tuple state that was pickled. wkb, srid = state ptr = self._from_pickle_wkb(wkb) if not ptr: raise GEOSException('Invalid Geometry loaded from pickled state.') self.ptr = ptr self._post_init() self.srid = srid @classmethod def _from_wkb(cls, wkb): return wkb_r().read(wkb) @staticmethod def from_ewkt(ewkt): ewkt = force_bytes(ewkt) srid = None parts = ewkt.split(b';', 1) if len(parts) == 2: srid_part, wkt = parts match = re.match(br'SRID=(?P<srid>\-?\d+)', srid_part) if not match: raise ValueError('EWKT has invalid SRID part.') srid = int(match.group('srid')) else: wkt = ewkt if not wkt: raise ValueError('Expected WKT but got an empty string.') return GEOSGeometry(GEOSGeometry._from_wkt(wkt), srid=srid) @staticmethod def _from_wkt(wkt): return wkt_r().read(wkt) @classmethod def from_gml(cls, gml_string): return gdal.OGRGeometry.from_gml(gml_string).geos # Comparison operators def __eq__(self, other): """ Equivalence testing, a Geometry may be compared with another Geometry or an EWKT representation. """ if isinstance(other, str): try: other = GEOSGeometry.from_ewkt(other) except (ValueError, GEOSException): return False return isinstance(other, GEOSGeometry) and self.srid == other.srid and self.equals_exact(other) def __hash__(self): return hash((self.srid, self.wkt)) # ### Geometry set-like operations ### # Thanks to Sean Gillies for inspiration: # http://lists.gispython.org/pipermail/community/2007-July/001034.html # g = g1 | g2 def __or__(self, other): "Return the union of this Geometry and the other." return self.union(other) # g = g1 & g2 def __and__(self, other): "Return the intersection of this Geometry and the other." return self.intersection(other) # g = g1 - g2 def __sub__(self, other): "Return the difference this Geometry and the other." return self.difference(other) # g = g1 ^ g2 def __xor__(self, other): "Return the symmetric difference of this Geometry and the other." return self.sym_difference(other) # #### Coordinate Sequence Routines #### @property def coord_seq(self): "Return a clone of the coordinate sequence for this Geometry." if self.has_cs: return self._cs.clone() # #### Geometry Info #### @property def geom_type(self): "Return a string representing the Geometry type, e.g. 'Polygon'" return capi.geos_type(self.ptr).decode() @property def geom_typeid(self): "Return an integer representing the Geometry type." return capi.geos_typeid(self.ptr) @property def num_geom(self): "Return the number of geometries in the Geometry." return capi.get_num_geoms(self.ptr) @property def num_coords(self): "Return the number of coordinates in the Geometry." return capi.get_num_coords(self.ptr) @property def num_points(self): "Return the number points, or coordinates, in the Geometry." return self.num_coords @property def dims(self): "Return the dimension of this Geometry (0=point, 1=line, 2=surface)." return capi.get_dims(self.ptr) def normalize(self): "Convert this Geometry to normal form (or canonical form)." capi.geos_normalize(self.ptr) # #### Unary predicates #### @property def empty(self): """ Return a boolean indicating whether the set of points in this Geometry are empty. """ return capi.geos_isempty(self.ptr) @property def hasz(self): "Return whether the geometry has a 3D dimension." return capi.geos_hasz(self.ptr) @property def ring(self): "Return whether or not the geometry is a ring." return capi.geos_isring(self.ptr) @property def simple(self): "Return false if the Geometry isn't simple." return capi.geos_issimple(self.ptr) @property def valid(self): "Test the validity of this Geometry." return capi.geos_isvalid(self.ptr) @property def valid_reason(self): """ Return a string containing the reason for any invalidity. """ return capi.geos_isvalidreason(self.ptr).decode() # #### Binary predicates. #### def contains(self, other): "Return true if other.within(this) returns true." return capi.geos_contains(self.ptr, other.ptr) def covers(self, other): """ Return True if the DE-9IM Intersection Matrix for the two geometries is T*****FF*, *T****FF*, ***T**FF*, or ****T*FF*. If either geometry is empty, return False. """ return capi.geos_covers(self.ptr, other.ptr) def crosses(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is T*T****** (for a point and a curve,a point and an area or a line and an area) 0******** (for two curves). """ return capi.geos_crosses(self.ptr, other.ptr) def disjoint(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is FF*FF****. """ return capi.geos_disjoint(self.ptr, other.ptr) def equals(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is T*F**FFF*. """ return capi.geos_equals(self.ptr, other.ptr) def equals_exact(self, other, tolerance=0): """ Return true if the two Geometries are exactly equal, up to a specified tolerance. """ return capi.geos_equalsexact(self.ptr, other.ptr, float(tolerance)) def intersects(self, other): "Return true if disjoint return false." return capi.geos_intersects(self.ptr, other.ptr) def overlaps(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is T*T***T** (for two points or two surfaces) 1*T***T** (for two curves). """ return capi.geos_overlaps(self.ptr, other.ptr) def relate_pattern(self, other, pattern): """ Return true if the elements in the DE-9IM intersection matrix for the two Geometries match the elements in pattern. """ if not isinstance(pattern, str) or len(pattern) > 9: raise GEOSException('invalid intersection matrix pattern') return capi.geos_relatepattern(self.ptr, other.ptr, force_bytes(pattern)) def touches(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is FT*******, F**T***** or F***T****. """ return capi.geos_touches(self.ptr, other.ptr) def within(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is T*F**F***. """ return capi.geos_within(self.ptr, other.ptr) # #### SRID Routines #### @property def srid(self): "Get the SRID for the geometry. Return None if no SRID is set." s = capi.geos_get_srid(self.ptr) if s == 0: return None else: return s @srid.setter def srid(self, srid): "Set the SRID for the geometry." capi.geos_set_srid(self.ptr, 0 if srid is None else srid) # #### Output Routines #### @property def ewkt(self): """ Return the EWKT (SRID + WKT) of the Geometry. """ srid = self.srid return 'SRID=%s;%s' % (srid, self.wkt) if srid else self.wkt @property def wkt(self): "Return the WKT (Well-Known Text) representation of this Geometry." return wkt_w(dim=3 if self.hasz else 2, trim=True).write(self).decode() @property def hex(self): """ Return the WKB of this Geometry in hexadecimal form. Please note that the SRID is not included in this representation because it is not a part of the OGC specification (use the `hexewkb` property instead). """ # A possible faster, all-python, implementation: # str(self.wkb).encode('hex') return wkb_w(dim=3 if self.hasz else 2).write_hex(self) @property def hexewkb(self): """ Return the EWKB of this Geometry in hexadecimal form. This is an extension of the WKB specification that includes SRID value that are a part of this geometry. """ return ewkb_w(dim=3 if self.hasz else 2).write_hex(self) @property def json(self): """ Return GeoJSON representation of this Geometry. """ return self.ogr.json geojson = json @property def wkb(self): """ Return the WKB (Well-Known Binary) representation of this Geometry as a Python buffer. SRID and Z values are not included, use the `ewkb` property instead. """ return wkb_w(3 if self.hasz else 2).write(self) @property def ewkb(self): """ Return the EWKB representation of this Geometry as a Python buffer. This is an extension of the WKB specification that includes any SRID value that are a part of this geometry. """ return ewkb_w(3 if self.hasz else 2).write(self) @property def kml(self): "Return the KML representation of this Geometry." gtype = self.geom_type return '<%s>%s</%s>' % (gtype, self.coord_seq.kml, gtype) @property def prepared(self): """ Return a PreparedGeometry corresponding to this geometry -- it is optimized for the contains, intersects, and covers operations. """ return PreparedGeometry(self) # #### GDAL-specific output routines #### def _ogr_ptr(self): return gdal.OGRGeometry._from_wkb(self.wkb) @property def ogr(self): "Return the OGR Geometry for this Geometry." return gdal.OGRGeometry(self._ogr_ptr(), self.srs) @property def srs(self): "Return the OSR SpatialReference for SRID of this Geometry." if self.srid: try: return gdal.SpatialReference(self.srid) except gdal.SRSException: pass return None @property def crs(self): "Alias for `srs` property." return self.srs def transform(self, ct, clone=False): """ Requires GDAL. Transform the geometry according to the given transformation object, which may be an integer SRID, and WKT or PROJ.4 string. By default, transform the geometry in-place and return nothing. However if the `clone` keyword is set, don't modify the geometry and return a transformed clone instead. """ srid = self.srid if ct == srid: # short-circuit where source & dest SRIDs match if clone: return self.clone() else: return if isinstance(ct, gdal.CoordTransform): # We don't care about SRID because CoordTransform presupposes # source SRS. srid = None elif srid is None or srid < 0: raise GEOSException("Calling transform() with no SRID set is not supported") # Creating an OGR Geometry, which is then transformed. g = gdal.OGRGeometry(self._ogr_ptr(), srid) g.transform(ct) # Getting a new GEOS pointer ptr = g._geos_ptr() if clone: # User wants a cloned transformed geometry returned. return GEOSGeometry(ptr, srid=g.srid) if ptr: # Reassigning pointer, and performing post-initialization setup # again due to the reassignment. capi.destroy_geom(self.ptr) self.ptr = ptr self._post_init() self.srid = g.srid else: raise GEOSException('Transformed WKB was invalid.') # #### Topology Routines #### def _topology(self, gptr): "Return Geometry from the given pointer." return GEOSGeometry(gptr, srid=self.srid) @property def boundary(self): "Return the boundary as a newly allocated Geometry object." return self._topology(capi.geos_boundary(self.ptr)) def buffer(self, width, quadsegs=8): """ Return a geometry that represents all points whose distance from this Geometry is less than or equal to distance. Calculations are in the Spatial Reference System of this Geometry. The optional third parameter sets the number of segment used to approximate a quarter circle (defaults to 8). (Text from PostGIS documentation at ch. 6.1.3) """ return self._topology(capi.geos_buffer(self.ptr, width, quadsegs)) def buffer_with_style(self, width, quadsegs=8, end_cap_style=1, join_style=1, mitre_limit=5.0): """ Same as buffer() but allows customizing the style of the buffer. End cap style can be round (1), flat (2), or square (3). Join style can be round (1), mitre (2), or bevel (3). Mitre ratio limit only affects mitered join style. """ return self._topology( capi.geos_bufferwithstyle(self.ptr, width, quadsegs, end_cap_style, join_style, mitre_limit), ) @property def centroid(self): """ The centroid is equal to the centroid of the set of component Geometries of highest dimension (since the lower-dimension geometries contribute zero "weight" to the centroid). """ return self._topology(capi.geos_centroid(self.ptr)) @property def convex_hull(self): """ Return the smallest convex Polygon that contains all the points in the Geometry. """ return self._topology(capi.geos_convexhull(self.ptr)) def difference(self, other): """ Return a Geometry representing the points making up this Geometry that do not make up other. """ return self._topology(capi.geos_difference(self.ptr, other.ptr)) @property def envelope(self): "Return the envelope for this geometry (a polygon)." return self._topology(capi.geos_envelope(self.ptr)) def intersection(self, other): "Return a Geometry representing the points shared by this Geometry and other." return self._topology(capi.geos_intersection(self.ptr, other.ptr)) @property def point_on_surface(self): "Compute an interior point of this Geometry." return self._topology(capi.geos_pointonsurface(self.ptr)) def relate(self, other): "Return the DE-9IM intersection matrix for this Geometry and the other." return capi.geos_relate(self.ptr, other.ptr).decode() def simplify(self, tolerance=0.0, preserve_topology=False): """ Return the Geometry, simplified using the Douglas-Peucker algorithm to the specified tolerance (higher tolerance => less points). If no tolerance provided, defaults to 0. By default, don't preserve topology - e.g. polygons can be split, collapse to lines or disappear holes can be created or disappear, and lines can cross. By specifying preserve_topology=True, the result will have the same dimension and number of components as the input. This is significantly slower. """ if preserve_topology: return self._topology(capi.geos_preservesimplify(self.ptr, tolerance)) else: return self._topology(capi.geos_simplify(self.ptr, tolerance)) def sym_difference(self, other): """ Return a set combining the points in this Geometry not in other, and the points in other not in this Geometry. """ return self._topology(capi.geos_symdifference(self.ptr, other.ptr)) @property def unary_union(self): "Return the union of all the elements of this geometry." return self._topology(capi.geos_unary_union(self.ptr)) def union(self, other): "Return a Geometry representing all the points in this Geometry and other." return self._topology(capi.geos_union(self.ptr, other.ptr)) # #### Other Routines #### @property def area(self): "Return the area of the Geometry." return capi.geos_area(self.ptr, byref(c_double())) def distance(self, other): """ Return the distance between the closest points on this Geometry and the other. Units will be in those of the coordinate system of the Geometry. """ if not isinstance(other, GEOSGeometry): raise TypeError('distance() works only on other GEOS Geometries.') return capi.geos_distance(self.ptr, other.ptr, byref(c_double())) @property def extent(self): """ Return the extent of this geometry as a 4-tuple, consisting of (xmin, ymin, xmax, ymax). """ from .point import Point env = self.envelope if isinstance(env, Point): xmin, ymin = env.tuple xmax, ymax = xmin, ymin else: xmin, ymin = env[0][0] xmax, ymax = env[0][2] return (xmin, ymin, xmax, ymax) @property def length(self): """ Return the length of this Geometry (e.g., 0 for point, or the circumference of a Polygon). """ return capi.geos_length(self.ptr, byref(c_double())) def clone(self): "Clone this Geometry." return GEOSGeometry(capi.geom_clone(self.ptr)) class LinearGeometryMixin: """ Used for LineString and MultiLineString. """ def interpolate(self, distance): return self._topology(capi.geos_interpolate(self.ptr, distance)) def interpolate_normalized(self, distance): return self._topology(capi.geos_interpolate_normalized(self.ptr, distance)) def project(self, point): from .point import Point if not isinstance(point, Point): raise TypeError('locate_point argument must be a Point') return capi.geos_project(self.ptr, point.ptr) def project_normalized(self, point): from .point import Point if not isinstance(point, Point): raise TypeError('locate_point argument must be a Point') return capi.geos_project_normalized(self.ptr, point.ptr) @property def merged(self): """ Return the line merge of this Geometry. """ return self._topology(capi.geos_linemerge(self.ptr)) @property def closed(self): """ Return whether or not this Geometry is closed. """ return capi.geos_isclosed(self.ptr) @deconstructible class GEOSGeometry(GEOSGeometryBase, ListMixin): "A class that, generally, encapsulates a GEOS geometry." def __init__(self, geo_input, srid=None): """ The base constructor for GEOS geometry objects. It may take the following inputs: * strings: - WKT - HEXEWKB (a PostGIS-specific canonical form) - GeoJSON (requires GDAL) * buffer: - WKB The `srid` keyword specifies the Source Reference Identifier (SRID) number for this Geometry. If not provided, it defaults to None. """ input_srid = None if isinstance(geo_input, bytes): geo_input = force_str(geo_input) if isinstance(geo_input, str): wkt_m = wkt_regex.match(geo_input) if wkt_m: # Handle WKT input. if wkt_m.group('srid'): input_srid = int(wkt_m.group('srid')) g = self._from_wkt(force_bytes(wkt_m.group('wkt'))) elif hex_regex.match(geo_input): # Handle HEXEWKB input. g = wkb_r().read(force_bytes(geo_input)) elif json_regex.match(geo_input): # Handle GeoJSON input. ogr = gdal.OGRGeometry.from_json(geo_input) g = ogr._geos_ptr() input_srid = ogr.srid else: raise ValueError('String input unrecognized as WKT EWKT, and HEXEWKB.') elif isinstance(geo_input, GEOM_PTR): # When the input is a pointer to a geometry (GEOM_PTR). g = geo_input elif isinstance(geo_input, memoryview): # When the input is a buffer (WKB). g = wkb_r().read(geo_input) elif isinstance(geo_input, GEOSGeometry): g = capi.geom_clone(geo_input.ptr) else: raise TypeError('Improper geometry input type: %s' % type(geo_input)) if not g: raise GEOSException('Could not initialize GEOS Geometry with given input.') input_srid = input_srid or capi.geos_get_srid(g) or None if input_srid and srid and input_srid != srid: raise ValueError('Input geometry already has SRID: %d.' % input_srid) super().__init__(g, None) # Set the SRID, if given. srid = input_srid or srid if srid and isinstance(srid, int): self.srid = srid
023ea485d20f30c204f9c0de0ea882390e92a87a4319da393470bc7cfa128dd1
from ctypes import c_uint from django.contrib.gis import gdal from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.geometry import GEOSGeometry class Point(GEOSGeometry): _minlength = 2 _maxlength = 3 has_cs = True def __init__(self, x=None, y=None, z=None, srid=None): """ The Point object may be initialized with either a tuple, or individual parameters. For example: >>> p = Point((5, 23)) # 2D point, passed in as a tuple >>> p = Point(5, 23, 8) # 3D point, passed in with individual parameters """ if x is None: coords = [] elif isinstance(x, (tuple, list)): # Here a tuple or list was passed in under the `x` parameter. coords = x elif isinstance(x, (float, int)) and isinstance(y, (float, int)): # Here X, Y, and (optionally) Z were passed in individually, as parameters. if isinstance(z, (float, int)): coords = [x, y, z] else: coords = [x, y] else: raise TypeError('Invalid parameters given for Point initialization.') point = self._create_point(len(coords), coords) # Initializing using the address returned from the GEOS # createPoint factory. super().__init__(point, srid=srid) def _to_pickle_wkb(self): return None if self.empty else super()._to_pickle_wkb() def _from_pickle_wkb(self, wkb): return self._create_empty() if wkb is None else super()._from_pickle_wkb(wkb) def _ogr_ptr(self): return gdal.geometries.Point._create_empty() if self.empty else super()._ogr_ptr() @classmethod def _create_empty(cls): return cls._create_point(None, None) @classmethod def _create_point(cls, ndim, coords): """ Create a coordinate sequence, set X, Y, [Z], and create point """ if not ndim: return capi.create_point(None) if ndim < 2 or ndim > 3: raise TypeError('Invalid point dimension: %s' % ndim) cs = capi.create_cs(c_uint(1), c_uint(ndim)) i = iter(coords) capi.cs_setx(cs, 0, next(i)) capi.cs_sety(cs, 0, next(i)) if ndim == 3: capi.cs_setz(cs, 0, next(i)) return capi.create_point(cs) def _set_list(self, length, items): ptr = self._create_point(length, items) if ptr: capi.destroy_geom(self.ptr) self._ptr = ptr self._post_init() else: # can this happen? raise GEOSException('Geometry resulting from slice deletion was invalid.') def _set_single(self, index, value): self._cs.setOrdinate(index, 0, value) def __iter__(self): "Iterate over coordinates of this Point." for i in range(len(self)): yield self[i] def __len__(self): "Return the number of dimensions for this Point (either 0, 2 or 3)." if self.empty: return 0 if self.hasz: return 3 else: return 2 def _get_single_external(self, index): if index == 0: return self.x elif index == 1: return self.y elif index == 2: return self.z _get_single_internal = _get_single_external @property def x(self): "Return the X component of the Point." return self._cs.getOrdinate(0, 0) @x.setter def x(self, value): "Set the X component of the Point." self._cs.setOrdinate(0, 0, value) @property def y(self): "Return the Y component of the Point." return self._cs.getOrdinate(1, 0) @y.setter def y(self, value): "Set the Y component of the Point." self._cs.setOrdinate(1, 0, value) @property def z(self): "Return the Z component of the Point." return self._cs.getOrdinate(2, 0) if self.hasz else None @z.setter def z(self, value): "Set the Z component of the Point." if not self.hasz: raise GEOSException('Cannot set Z on 2D Point.') self._cs.setOrdinate(2, 0, value) # ### Tuple setting and retrieval routines. ### @property def tuple(self): "Return a tuple of the point." return self._cs.tuple @tuple.setter def tuple(self, tup): "Set the coordinates of the point with the given tuple." self._cs[0] = tup # The tuple and coords properties coords = tuple
5c125e7f33635f5c855d5ce77156044f4cccde378c8e9361e1de1ac79bce756c
""" This module houses the ctypes initialization procedures, as well as the notice and error handler function callbacks (get called when an error occurs in GEOS). This module also houses GEOS Pointer utilities, including get_pointer_arr(), and GEOM_PTR. """ import logging import os from ctypes import CDLL, CFUNCTYPE, POINTER, Structure, c_char_p from ctypes.util import find_library from django.core.exceptions import ImproperlyConfigured from django.utils.functional import SimpleLazyObject, cached_property from django.utils.version import get_version_tuple logger = logging.getLogger('django.contrib.gis') def load_geos(): # Custom library path set? try: from django.conf import settings lib_path = settings.GEOS_LIBRARY_PATH except (AttributeError, ImportError, ImproperlyConfigured, OSError): lib_path = None # Setting the appropriate names for the GEOS-C library. if lib_path: lib_names = None elif os.name == 'nt': # Windows NT libraries lib_names = ['geos_c', 'libgeos_c-1'] elif os.name == 'posix': # *NIX libraries lib_names = ['geos_c', 'GEOS'] else: raise ImportError('Unsupported OS "%s"' % os.name) # Using the ctypes `find_library` utility to find the path to the GEOS # shared library. This is better than manually specifying each library name # and extension (e.g., libgeos_c.[so|so.1|dylib].). if lib_names: for lib_name in lib_names: lib_path = find_library(lib_name) if lib_path is not None: break # No GEOS library could be found. if lib_path is None: raise ImportError( 'Could not find the GEOS library (tried "%s"). ' 'Try setting GEOS_LIBRARY_PATH in your settings.' % '", "'.join(lib_names) ) # Getting the GEOS C library. The C interface (CDLL) is used for # both *NIX and Windows. # See the GEOS C API source code for more details on the library function calls: # http://geos.refractions.net/ro/doxygen_docs/html/geos__c_8h-source.html _lgeos = CDLL(lib_path) # Here we set up the prototypes for the initGEOS_r and finishGEOS_r # routines. These functions aren't actually called until they are # attached to a GEOS context handle -- this actually occurs in # geos/prototypes/threadsafe.py. _lgeos.initGEOS_r.restype = CONTEXT_PTR _lgeos.finishGEOS_r.argtypes = [CONTEXT_PTR] # Set restype for compatibility across 32 and 64-bit platforms. _lgeos.GEOSversion.restype = c_char_p return _lgeos # The notice and error handler C function callback definitions. # Supposed to mimic the GEOS message handler (C below): # typedef void (*GEOSMessageHandler)(const char *fmt, ...); NOTICEFUNC = CFUNCTYPE(None, c_char_p, c_char_p) def notice_h(fmt, lst): fmt, lst = fmt.decode(), lst.decode() try: warn_msg = fmt % lst except TypeError: warn_msg = fmt logger.warning('GEOS_NOTICE: %s\n', warn_msg) notice_h = NOTICEFUNC(notice_h) ERRORFUNC = CFUNCTYPE(None, c_char_p, c_char_p) def error_h(fmt, lst): fmt, lst = fmt.decode(), lst.decode() try: err_msg = fmt % lst except TypeError: err_msg = fmt logger.error('GEOS_ERROR: %s\n', err_msg) error_h = ERRORFUNC(error_h) # #### GEOS Geometry C data structures, and utility functions. #### # Opaque GEOS geometry structures, used for GEOM_PTR and CS_PTR class GEOSGeom_t(Structure): pass class GEOSPrepGeom_t(Structure): pass class GEOSCoordSeq_t(Structure): pass class GEOSContextHandle_t(Structure): pass # Pointers to opaque GEOS geometry structures. GEOM_PTR = POINTER(GEOSGeom_t) PREPGEOM_PTR = POINTER(GEOSPrepGeom_t) CS_PTR = POINTER(GEOSCoordSeq_t) CONTEXT_PTR = POINTER(GEOSContextHandle_t) lgeos = SimpleLazyObject(load_geos) class GEOSFuncFactory: """ Lazy loading of GEOS functions. """ argtypes = None restype = None errcheck = None def __init__(self, func_name, *args, restype=None, errcheck=None, argtypes=None, **kwargs): self.func_name = func_name if restype is not None: self.restype = restype if errcheck is not None: self.errcheck = errcheck if argtypes is not None: self.argtypes = argtypes self.args = args self.kwargs = kwargs def __call__(self, *args, **kwargs): return self.func(*args, **kwargs) @cached_property def func(self): from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc func = GEOSFunc(self.func_name) func.argtypes = self.argtypes or [] func.restype = self.restype if self.errcheck: func.errcheck = self.errcheck return func def geos_version(): """Return the string version of the GEOS library.""" return lgeos.GEOSversion() def geos_version_tuple(): """Return the GEOS version as a tuple (major, minor, subminor).""" return get_version_tuple(geos_version().decode())
9c0245b1a0576c833d640fe04b1541ff758d5c91f06a49a197137f5732ace164
from ctypes import byref, c_uint from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.geometry import GEOSGeometry from django.contrib.gis.geos.libgeos import GEOM_PTR from django.contrib.gis.geos.linestring import LinearRing class Polygon(GEOSGeometry): _minlength = 1 def __init__(self, *args, **kwargs): """ Initialize on an exterior ring and a sequence of holes (both instances may be either LinearRing instances, or a tuple/list that may be constructed into a LinearRing). Examples of initialization, where shell, hole1, and hole2 are valid LinearRing geometries: >>> from django.contrib.gis.geos import LinearRing, Polygon >>> shell = hole1 = hole2 = LinearRing() >>> poly = Polygon(shell, hole1, hole2) >>> poly = Polygon(shell, (hole1, hole2)) >>> # Example where a tuple parameters are used: >>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)), ... ((4, 4), (4, 6), (6, 6), (6, 4), (4, 4))) """ if not args: super().__init__(self._create_polygon(0, None), **kwargs) return # Getting the ext_ring and init_holes parameters from the argument list ext_ring, *init_holes = args n_holes = len(init_holes) # If initialized as Polygon(shell, (LinearRing, LinearRing)) [for backward-compatibility] if n_holes == 1 and isinstance(init_holes[0], (tuple, list)): if not init_holes[0]: init_holes = () n_holes = 0 elif isinstance(init_holes[0][0], LinearRing): init_holes = init_holes[0] n_holes = len(init_holes) polygon = self._create_polygon(n_holes + 1, [ext_ring, *init_holes]) super().__init__(polygon, **kwargs) def __iter__(self): "Iterate over each ring in the polygon." for i in range(len(self)): yield self[i] def __len__(self): "Return the number of rings in this Polygon." return self.num_interior_rings + 1 @classmethod def from_bbox(cls, bbox): "Construct a Polygon from a bounding box (4-tuple)." x0, y0, x1, y1 = bbox for z in bbox: if not isinstance(z, (float, int)): return GEOSGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (x0, y0, x0, y1, x1, y1, x1, y0, x0, y0)) return Polygon(((x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0))) # ### These routines are needed for list-like operation w/ListMixin ### def _create_polygon(self, length, items): # Instantiate LinearRing objects if necessary, but don't clone them yet # _construct_ring will throw a TypeError if a parameter isn't a valid ring # If we cloned the pointers here, we wouldn't be able to clean up # in case of error. if not length: return capi.create_empty_polygon() rings = [] for r in items: if isinstance(r, GEOM_PTR): rings.append(r) else: rings.append(self._construct_ring(r)) shell = self._clone(rings.pop(0)) n_holes = length - 1 if n_holes: holes = (GEOM_PTR * n_holes)(*[self._clone(r) for r in rings]) holes_param = byref(holes) else: holes_param = None return capi.create_polygon(shell, holes_param, c_uint(n_holes)) def _clone(self, g): if isinstance(g, GEOM_PTR): return capi.geom_clone(g) else: return capi.geom_clone(g.ptr) def _construct_ring(self, param, msg=( 'Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings')): "Try to construct a ring from the given parameter." if isinstance(param, LinearRing): return param try: ring = LinearRing(param) return ring except TypeError: raise TypeError(msg) def _set_list(self, length, items): # Getting the current pointer, replacing with the newly constructed # geometry, and destroying the old geometry. prev_ptr = self.ptr srid = self.srid self.ptr = self._create_polygon(length, items) if srid: self.srid = srid capi.destroy_geom(prev_ptr) def _get_single_internal(self, index): """ Return the ring at the specified index. The first index, 0, will always return the exterior ring. Indices > 0 will return the interior ring at the given index (e.g., poly[1] and poly[2] would return the first and second interior ring, respectively). CAREFUL: Internal/External are not the same as Interior/Exterior! Return a pointer from the existing geometries for use internally by the object's methods. _get_single_external() returns a clone of the same geometry for use by external code. """ if index == 0: return capi.get_extring(self.ptr) else: # Getting the interior ring, have to subtract 1 from the index. return capi.get_intring(self.ptr, index - 1) def _get_single_external(self, index): return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid) _set_single = GEOSGeometry._set_single_rebuild _assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild # #### Polygon Properties #### @property def num_interior_rings(self): "Return the number of interior rings." # Getting the number of rings return capi.get_nrings(self.ptr) def _get_ext_ring(self): "Get the exterior ring of the Polygon." return self[0] def _set_ext_ring(self, ring): "Set the exterior ring of the Polygon." self[0] = ring # Properties for the exterior ring/shell. exterior_ring = property(_get_ext_ring, _set_ext_ring) shell = exterior_ring @property def tuple(self): "Get the tuple for each ring in this Polygon." return tuple(self[i].tuple for i in range(len(self))) coords = tuple @property def kml(self): "Return the KML representation of this Polygon." inner_kml = ''.join( "<innerBoundaryIs>%s</innerBoundaryIs>" % self[i + 1].kml for i in range(self.num_interior_rings) ) return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)
3bc00d8f288c1b6780f76fde7cb7de72bd3a51f2c7582c5fe2277b8e80a61470
from django.contrib.gis.db.models.fields import ( ExtentField, GeometryCollectionField, GeometryField, LineStringField, ) from django.db.models.aggregates import Aggregate from django.utils.functional import cached_property __all__ = ['Collect', 'Extent', 'Extent3D', 'MakeLine', 'Union'] class GeoAggregate(Aggregate): function = None is_extent = False @cached_property def output_field(self): return self.output_field_class(self.source_expressions[0].output_field.srid) def as_sql(self, compiler, connection, function=None, **extra_context): # this will be called again in parent, but it's needed now - before # we get the spatial_aggregate_name connection.ops.check_expression_support(self) return super().as_sql( compiler, connection, function=function or connection.ops.spatial_aggregate_name(self.name), **extra_context ) def as_oracle(self, compiler, connection, **extra_context): tolerance = self.extra.get('tolerance') or getattr(self, 'tolerance', 0.05) template = None if self.is_extent else '%(function)s(SDOAGGRTYPE(%(expressions)s,%(tolerance)s))' return self.as_sql(compiler, connection, template=template, tolerance=tolerance, **extra_context) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) for expr in c.get_source_expressions(): if not hasattr(expr.field, 'geom_type'): raise ValueError('Geospatial aggregates only allowed on geometry fields.') return c class Collect(GeoAggregate): name = 'Collect' output_field_class = GeometryCollectionField class Extent(GeoAggregate): name = 'Extent' is_extent = '2D' def __init__(self, expression, **extra): super().__init__(expression, output_field=ExtentField(), **extra) def convert_value(self, value, expression, connection): return connection.ops.convert_extent(value) class Extent3D(GeoAggregate): name = 'Extent3D' is_extent = '3D' def __init__(self, expression, **extra): super().__init__(expression, output_field=ExtentField(), **extra) def convert_value(self, value, expression, connection): return connection.ops.convert_extent3d(value) class MakeLine(GeoAggregate): name = 'MakeLine' output_field_class = LineStringField class Union(GeoAggregate): name = 'Union' output_field_class = GeometryField
9c7fb798311a18d7f880d8a1305dfe0fd5afb4895e1271510a5ff5960a742683
from collections import defaultdict, namedtuple from django.contrib.gis import forms, gdal from django.contrib.gis.db.models.proxy import SpatialProxy from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.geos import ( GeometryCollection, GEOSException, GEOSGeometry, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, ) from django.core.exceptions import ImproperlyConfigured from django.db.models.fields import Field from django.utils.translation import gettext_lazy as _ # Local cache of the spatial_ref_sys table, which holds SRID data for each # spatial database alias. This cache exists so that the database isn't queried # for SRID info each time a distance query is constructed. _srid_cache = defaultdict(dict) SRIDCacheEntry = namedtuple('SRIDCacheEntry', ['units', 'units_name', 'spheroid', 'geodetic']) def get_srid_info(srid, connection): """ Return the units, unit name, and spheroid WKT associated with the given SRID from the `spatial_ref_sys` (or equivalent) spatial database table for the given database connection. These results are cached. """ from django.contrib.gis.gdal import SpatialReference global _srid_cache try: # The SpatialRefSys model for the spatial backend. SpatialRefSys = connection.ops.spatial_ref_sys() except NotImplementedError: SpatialRefSys = None alias, get_srs = ( (connection.alias, lambda srid: SpatialRefSys.objects.using(connection.alias).get(srid=srid).srs) if SpatialRefSys else (None, SpatialReference) ) if srid not in _srid_cache[alias]: srs = get_srs(srid) units, units_name = srs.units _srid_cache[alias][srid] = SRIDCacheEntry( units=units, units_name=units_name, spheroid='SPHEROID["%s",%s,%s]' % (srs['spheroid'], srs.semi_major, srs.inverse_flattening), geodetic=srs.geographic, ) return _srid_cache[alias][srid] class BaseSpatialField(Field): """ The Base GIS Field. It's used as a base class for GeometryField and RasterField. Defines properties that are common to all GIS fields such as the characteristics of the spatial reference system of the field. """ description = _("The base GIS field.") empty_strings_allowed = False def __init__(self, verbose_name=None, srid=4326, spatial_index=True, **kwargs): """ The initialization function for base spatial fields. Takes the following as keyword arguments: srid: The spatial reference system identifier, an OGC standard. Defaults to 4326 (WGS84). spatial_index: Indicates whether to create a spatial index. Defaults to True. Set this instead of 'db_index' for geographic fields since index creation is different for geometry columns. """ # Setting the index flag with the value of the `spatial_index` keyword. self.spatial_index = spatial_index # Setting the SRID and getting the units. Unit information must be # easily available in the field instance for distance queries. self.srid = srid # Setting the verbose_name keyword argument with the positional # first parameter, so this works like normal fields. kwargs['verbose_name'] = verbose_name super().__init__(**kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() # Always include SRID for less fragility; include spatial index if it's # not the default value. kwargs['srid'] = self.srid if self.spatial_index is not True: kwargs['spatial_index'] = self.spatial_index return name, path, args, kwargs def db_type(self, connection): return connection.ops.geo_db_type(self) def spheroid(self, connection): return get_srid_info(self.srid, connection).spheroid def units(self, connection): return get_srid_info(self.srid, connection).units def units_name(self, connection): return get_srid_info(self.srid, connection).units_name def geodetic(self, connection): """ Return true if this field's SRID corresponds with a coordinate system that uses non-projected units (e.g., latitude/longitude). """ return get_srid_info(self.srid, connection).geodetic def get_placeholder(self, value, compiler, connection): """ Return the placeholder for the spatial column for the given value. """ return connection.ops.get_geom_placeholder(self, value, compiler) def get_srid(self, obj): """ Return the default SRID for the given geometry or raster, taking into account the SRID set for the field. For example, if the input geometry or raster doesn't have an SRID, then the SRID of the field will be returned. """ srid = obj.srid # SRID of given geometry. if srid is None or self.srid == -1 or (srid == -1 and self.srid != -1): return self.srid else: return srid def get_db_prep_value(self, value, connection, *args, **kwargs): if value is None: return None return connection.ops.Adapter( super().get_db_prep_value(value, connection, *args, **kwargs), **({'geography': True} if self.geography and connection.ops.geography else {}) ) def get_raster_prep_value(self, value, is_candidate): """ Return a GDALRaster if conversion is successful, otherwise return None. """ if isinstance(value, gdal.GDALRaster): return value elif is_candidate: try: return gdal.GDALRaster(value) except GDALException: pass elif isinstance(value, dict): try: return gdal.GDALRaster(value) except GDALException: raise ValueError("Couldn't create spatial object from lookup value '%s'." % value) def get_prep_value(self, value): obj = super().get_prep_value(value) if obj is None: return None # When the input is not a geometry or raster, attempt to construct one # from the given string input. if isinstance(obj, GEOSGeometry): pass else: # Check if input is a candidate for conversion to raster or geometry. is_candidate = isinstance(obj, (bytes, str)) or hasattr(obj, '__geo_interface__') # Try to convert the input to raster. raster = self.get_raster_prep_value(obj, is_candidate) if raster: obj = raster elif is_candidate: try: obj = GEOSGeometry(obj) except (GEOSException, GDALException): raise ValueError("Couldn't create spatial object from lookup value '%s'." % obj) else: raise ValueError('Cannot use object with type %s for a spatial lookup parameter.' % type(obj).__name__) # Assigning the SRID value. obj.srid = self.get_srid(obj) return obj class GeometryField(BaseSpatialField): """ The base Geometry field -- maps to the OpenGIS Specification Geometry type. """ description = _("The base Geometry field -- maps to the OpenGIS Specification Geometry type.") form_class = forms.GeometryField # The OpenGIS Geometry name. geom_type = 'GEOMETRY' geom_class = None def __init__(self, verbose_name=None, dim=2, geography=False, *, extent=(-180.0, -90.0, 180.0, 90.0), tolerance=0.05, **kwargs): """ The initialization function for geometry fields. In addition to the parameters from BaseSpatialField, it takes the following as keyword arguments: dim: The number of dimensions for this geometry. Defaults to 2. extent: Customize the extent, in a 4-tuple of WGS 84 coordinates, for the geometry field entry in the `USER_SDO_GEOM_METADATA` table. Defaults to (-180.0, -90.0, 180.0, 90.0). tolerance: Define the tolerance, in meters, to use for the geometry field entry in the `USER_SDO_GEOM_METADATA` table. Defaults to 0.05. """ # Setting the dimension of the geometry field. self.dim = dim # Is this a geography rather than a geometry column? self.geography = geography # Oracle-specific private attributes for creating the entry in # `USER_SDO_GEOM_METADATA` self._extent = extent self._tolerance = tolerance super().__init__(verbose_name=verbose_name, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() # Include kwargs if they're not the default values. if self.dim != 2: kwargs['dim'] = self.dim if self.geography is not False: kwargs['geography'] = self.geography if self._extent != (-180.0, -90.0, 180.0, 90.0): kwargs['extent'] = self._extent if self._tolerance != 0.05: kwargs['tolerance'] = self._tolerance return name, path, args, kwargs def contribute_to_class(self, cls, name, **kwargs): super().contribute_to_class(cls, name, **kwargs) # Setup for lazy-instantiated Geometry object. setattr(cls, self.attname, SpatialProxy(self.geom_class or GEOSGeometry, self, load_func=GEOSGeometry)) def formfield(self, **kwargs): defaults = { 'form_class': self.form_class, 'geom_type': self.geom_type, 'srid': self.srid, **kwargs, } if self.dim > 2 and not getattr(defaults['form_class'].widget, 'supports_3d', False): defaults.setdefault('widget', forms.Textarea) return super().formfield(**defaults) def select_format(self, compiler, sql, params): """ Return the selection format string, depending on the requirements of the spatial backend. For example, Oracle and MySQL require custom selection formats in order to retrieve geometries in OGC WKB. """ return compiler.connection.ops.select % sql, params # The OpenGIS Geometry Type Fields class PointField(GeometryField): geom_type = 'POINT' geom_class = Point form_class = forms.PointField description = _("Point") class LineStringField(GeometryField): geom_type = 'LINESTRING' geom_class = LineString form_class = forms.LineStringField description = _("Line string") class PolygonField(GeometryField): geom_type = 'POLYGON' geom_class = Polygon form_class = forms.PolygonField description = _("Polygon") class MultiPointField(GeometryField): geom_type = 'MULTIPOINT' geom_class = MultiPoint form_class = forms.MultiPointField description = _("Multi-point") class MultiLineStringField(GeometryField): geom_type = 'MULTILINESTRING' geom_class = MultiLineString form_class = forms.MultiLineStringField description = _("Multi-line string") class MultiPolygonField(GeometryField): geom_type = 'MULTIPOLYGON' geom_class = MultiPolygon form_class = forms.MultiPolygonField description = _("Multi polygon") class GeometryCollectionField(GeometryField): geom_type = 'GEOMETRYCOLLECTION' geom_class = GeometryCollection form_class = forms.GeometryCollectionField description = _("Geometry collection") class ExtentField(Field): "Used as a return value from an extent aggregate" description = _("Extent Aggregate Field") def get_internal_type(self): return "ExtentField" def select_format(self, compiler, sql, params): select = compiler.connection.ops.select_extent return select % sql if select else sql, params class RasterField(BaseSpatialField): """ Raster field for GeoDjango -- evaluates into GDALRaster objects. """ description = _("Raster Field") geom_type = 'RASTER' geography = False def _check_connection(self, connection): # Make sure raster fields are used only on backends with raster support. if not connection.features.gis_enabled or not connection.features.supports_raster: raise ImproperlyConfigured('Raster fields require backends with raster support.') def db_type(self, connection): self._check_connection(connection) return super().db_type(connection) def from_db_value(self, value, expression, connection): return connection.ops.parse_raster(value) def contribute_to_class(self, cls, name, **kwargs): super().contribute_to_class(cls, name, **kwargs) # Setup for lazy-instantiated Raster object. For large querysets, the # instantiation of all GDALRasters can potentially be expensive. This # delays the instantiation of the objects to the moment of evaluation # of the raster attribute. setattr(cls, self.attname, SpatialProxy(gdal.GDALRaster, self)) def get_transform(self, name): from django.contrib.gis.db.models.lookups import RasterBandTransform try: band_index = int(name) return type( 'SpecificRasterBandTransform', (RasterBandTransform,), {'band_index': band_index} ) except ValueError: pass return super().get_transform(name)
fcf24b6a697cc8a6bfb69d55d8f9c65ed4f973a50648aa218f3c2ce68cf3802a
from decimal import Decimal from django.contrib.gis.db.models.fields import BaseSpatialField, GeometryField from django.contrib.gis.db.models.sql import AreaField, DistanceField from django.contrib.gis.geos import GEOSGeometry from django.core.exceptions import FieldError from django.db.models import ( BooleanField, FloatField, IntegerField, TextField, Transform, ) from django.db.models.expressions import Func, Value from django.db.models.functions import Cast from django.db.utils import NotSupportedError from django.utils.functional import cached_property NUMERIC_TYPES = (int, float, Decimal) class GeoFuncMixin: function = None geom_param_pos = (0,) def __init__(self, *expressions, **extra): super().__init__(*expressions, **extra) # Ensure that value expressions are geometric. for pos in self.geom_param_pos: expr = self.source_expressions[pos] if not isinstance(expr, Value): continue try: output_field = expr.output_field except FieldError: output_field = None geom = expr.value if not isinstance(geom, GEOSGeometry) or output_field and not isinstance(output_field, GeometryField): raise TypeError("%s function requires a geometric argument in position %d." % (self.name, pos + 1)) if not geom.srid and not output_field: raise ValueError("SRID is required for all geometries.") if not output_field: self.source_expressions[pos] = Value(geom, output_field=GeometryField(srid=geom.srid)) @property def name(self): return self.__class__.__name__ @cached_property def geo_field(self): return self.source_expressions[self.geom_param_pos[0]].field def as_sql(self, compiler, connection, function=None, **extra_context): if self.function is None and function is None: function = connection.ops.spatial_function_name(self.name) return super().as_sql(compiler, connection, function=function, **extra_context) def resolve_expression(self, *args, **kwargs): res = super().resolve_expression(*args, **kwargs) # Ensure that expressions are geometric. source_fields = res.get_source_fields() for pos in self.geom_param_pos: field = source_fields[pos] if not isinstance(field, GeometryField): raise TypeError( "%s function requires a GeometryField in position %s, got %s." % ( self.name, pos + 1, type(field).__name__, ) ) base_srid = res.geo_field.srid for pos in self.geom_param_pos[1:]: expr = res.source_expressions[pos] expr_srid = expr.output_field.srid if expr_srid != base_srid: # Automatic SRID conversion so objects are comparable. res.source_expressions[pos] = Transform(expr, base_srid).resolve_expression(*args, **kwargs) return res def _handle_param(self, value, param_name='', check_types=None): if not hasattr(value, 'resolve_expression'): if check_types and not isinstance(value, check_types): raise TypeError( "The %s parameter has the wrong type: should be %s." % ( param_name, check_types) ) return value class GeoFunc(GeoFuncMixin, Func): pass class GeomOutputGeoFunc(GeoFunc): @cached_property def output_field(self): return GeometryField(srid=self.geo_field.srid) class SQLiteDecimalToFloatMixin: """ By default, Decimal values are converted to str by the SQLite backend, which is not acceptable by the GIS functions expecting numeric values. """ def as_sqlite(self, compiler, connection, **extra_context): for expr in self.get_source_expressions(): if hasattr(expr, 'value') and isinstance(expr.value, Decimal): expr.value = float(expr.value) return super().as_sql(compiler, connection, **extra_context) class OracleToleranceMixin: tolerance = 0.05 def as_oracle(self, compiler, connection, **extra_context): tol = self.extra.get('tolerance', self.tolerance) return self.as_sql( compiler, connection, template="%%(function)s(%%(expressions)s, %s)" % tol, **extra_context ) class Area(OracleToleranceMixin, GeoFunc): arity = 1 @cached_property def output_field(self): return AreaField(self.geo_field) def as_sql(self, compiler, connection, **extra_context): if not connection.features.supports_area_geodetic and self.geo_field.geodetic(connection): raise NotSupportedError('Area on geodetic coordinate systems not supported.') return super().as_sql(compiler, connection, **extra_context) def as_sqlite(self, compiler, connection, **extra_context): if self.geo_field.geodetic(connection): extra_context['template'] = '%(function)s(%(expressions)s, %(spheroid)d)' extra_context['spheroid'] = True return self.as_sql(compiler, connection, **extra_context) class Azimuth(GeoFunc): output_field = FloatField() arity = 2 geom_param_pos = (0, 1) class AsGeoJSON(GeoFunc): output_field = TextField() def __init__(self, expression, bbox=False, crs=False, precision=8, **extra): expressions = [expression] if precision is not None: expressions.append(self._handle_param(precision, 'precision', int)) options = 0 if crs and bbox: options = 3 elif bbox: options = 1 elif crs: options = 2 if options: expressions.append(options) super().__init__(*expressions, **extra) class AsGML(GeoFunc): geom_param_pos = (1,) output_field = TextField() def __init__(self, expression, version=2, precision=8, **extra): expressions = [version, expression] if precision is not None: expressions.append(self._handle_param(precision, 'precision', int)) super().__init__(*expressions, **extra) def as_oracle(self, compiler, connection, **extra_context): source_expressions = self.get_source_expressions() version = source_expressions[0] clone = self.copy() clone.set_source_expressions([source_expressions[1]]) extra_context['function'] = 'SDO_UTIL.TO_GML311GEOMETRY' if version.value == 3 else 'SDO_UTIL.TO_GMLGEOMETRY' return super(AsGML, clone).as_sql(compiler, connection, **extra_context) class AsKML(AsGML): def as_sqlite(self, compiler, connection, **extra_context): # No version parameter clone = self.copy() clone.set_source_expressions(self.get_source_expressions()[1:]) return clone.as_sql(compiler, connection, **extra_context) class AsSVG(GeoFunc): output_field = TextField() def __init__(self, expression, relative=False, precision=8, **extra): relative = relative if hasattr(relative, 'resolve_expression') else int(relative) expressions = [ expression, relative, self._handle_param(precision, 'precision', int), ] super().__init__(*expressions, **extra) class BoundingCircle(OracleToleranceMixin, GeoFunc): def __init__(self, expression, num_seg=48, **extra): super().__init__(expression, num_seg, **extra) def as_oracle(self, compiler, connection, **extra_context): clone = self.copy() clone.set_source_expressions([self.get_source_expressions()[0]]) return super(BoundingCircle, clone).as_oracle(compiler, connection, **extra_context) class Centroid(OracleToleranceMixin, GeomOutputGeoFunc): arity = 1 class Difference(OracleToleranceMixin, GeomOutputGeoFunc): arity = 2 geom_param_pos = (0, 1) class DistanceResultMixin: @cached_property def output_field(self): return DistanceField(self.geo_field) def source_is_geography(self): return self.geo_field.geography and self.geo_field.srid == 4326 class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFunc): geom_param_pos = (0, 1) spheroid = None def __init__(self, expr1, expr2, spheroid=None, **extra): expressions = [expr1, expr2] if spheroid is not None: self.spheroid = self._handle_param(spheroid, 'spheroid', bool) super().__init__(*expressions, **extra) def as_postgresql(self, compiler, connection, **extra_context): clone = self.copy() function = None expr2 = clone.source_expressions[1] geography = self.source_is_geography() if expr2.output_field.geography != geography: if isinstance(expr2, Value): expr2.output_field.geography = geography else: clone.source_expressions[1] = Cast( expr2, GeometryField(srid=expr2.output_field.srid, geography=geography), ) if not geography and self.geo_field.geodetic(connection): # Geometry fields with geodetic (lon/lat) coordinates need special distance functions if self.spheroid: # DistanceSpheroid is more accurate and resource intensive than DistanceSphere function = connection.ops.spatial_function_name('DistanceSpheroid') # Replace boolean param by the real spheroid of the base field clone.source_expressions.append(Value(self.geo_field.spheroid(connection))) else: function = connection.ops.spatial_function_name('DistanceSphere') return super(Distance, clone).as_sql(compiler, connection, function=function, **extra_context) def as_sqlite(self, compiler, connection, **extra_context): if self.geo_field.geodetic(connection): # SpatiaLite returns NULL instead of zero on geodetic coordinates extra_context['template'] = 'COALESCE(%(function)s(%(expressions)s, %(spheroid)s), 0)' extra_context['spheroid'] = int(bool(self.spheroid)) return super().as_sql(compiler, connection, **extra_context) class Envelope(GeomOutputGeoFunc): arity = 1 class ForcePolygonCW(GeomOutputGeoFunc): arity = 1 class GeoHash(GeoFunc): output_field = TextField() def __init__(self, expression, precision=None, **extra): expressions = [expression] if precision is not None: expressions.append(self._handle_param(precision, 'precision', int)) super().__init__(*expressions, **extra) def as_mysql(self, compiler, connection, **extra_context): clone = self.copy() # If no precision is provided, set it to the maximum. if len(clone.source_expressions) < 2: clone.source_expressions.append(Value(100)) return clone.as_sql(compiler, connection, **extra_context) class GeometryDistance(GeoFunc): output_field = FloatField() arity = 2 function = '' arg_joiner = ' <-> ' geom_param_pos = (0, 1) class Intersection(OracleToleranceMixin, GeomOutputGeoFunc): arity = 2 geom_param_pos = (0, 1) @BaseSpatialField.register_lookup class IsValid(OracleToleranceMixin, GeoFuncMixin, Transform): lookup_name = 'isvalid' output_field = BooleanField() def as_oracle(self, compiler, connection, **extra_context): sql, params = super().as_oracle(compiler, connection, **extra_context) return "CASE %s WHEN 'TRUE' THEN 1 ELSE 0 END" % sql, params class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc): def __init__(self, expr1, spheroid=True, **extra): self.spheroid = spheroid super().__init__(expr1, **extra) def as_sql(self, compiler, connection, **extra_context): if self.geo_field.geodetic(connection) and not connection.features.supports_length_geodetic: raise NotSupportedError("This backend doesn't support Length on geodetic fields") return super().as_sql(compiler, connection, **extra_context) def as_postgresql(self, compiler, connection, **extra_context): clone = self.copy() function = None if self.source_is_geography(): clone.source_expressions.append(Value(self.spheroid)) elif self.geo_field.geodetic(connection): # Geometry fields with geodetic (lon/lat) coordinates need length_spheroid function = connection.ops.spatial_function_name('LengthSpheroid') clone.source_expressions.append(Value(self.geo_field.spheroid(connection))) else: dim = min(f.dim for f in self.get_source_fields() if f) if dim > 2: function = connection.ops.length3d return super(Length, clone).as_sql(compiler, connection, function=function, **extra_context) def as_sqlite(self, compiler, connection, **extra_context): function = None if self.geo_field.geodetic(connection): function = 'GeodesicLength' if self.spheroid else 'GreatCircleLength' return super().as_sql(compiler, connection, function=function, **extra_context) class LineLocatePoint(GeoFunc): output_field = FloatField() arity = 2 geom_param_pos = (0, 1) class MakeValid(GeoFunc): pass class MemSize(GeoFunc): output_field = IntegerField() arity = 1 class NumGeometries(GeoFunc): output_field = IntegerField() arity = 1 class NumPoints(GeoFunc): output_field = IntegerField() arity = 1 class Perimeter(DistanceResultMixin, OracleToleranceMixin, GeoFunc): arity = 1 def as_postgresql(self, compiler, connection, **extra_context): function = None if self.geo_field.geodetic(connection) and not self.source_is_geography(): raise NotSupportedError("ST_Perimeter cannot use a non-projected non-geography field.") dim = min(f.dim for f in self.get_source_fields()) if dim > 2: function = connection.ops.perimeter3d return super().as_sql(compiler, connection, function=function, **extra_context) def as_sqlite(self, compiler, connection, **extra_context): if self.geo_field.geodetic(connection): raise NotSupportedError("Perimeter cannot use a non-projected field.") return super().as_sql(compiler, connection, **extra_context) class PointOnSurface(OracleToleranceMixin, GeomOutputGeoFunc): arity = 1 class Reverse(GeoFunc): arity = 1 class Scale(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc): def __init__(self, expression, x, y, z=0.0, **extra): expressions = [ expression, self._handle_param(x, 'x', NUMERIC_TYPES), self._handle_param(y, 'y', NUMERIC_TYPES), ] if z != 0.0: expressions.append(self._handle_param(z, 'z', NUMERIC_TYPES)) super().__init__(*expressions, **extra) class SnapToGrid(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc): def __init__(self, expression, *args, **extra): nargs = len(args) expressions = [expression] if nargs in (1, 2): expressions.extend( [self._handle_param(arg, '', NUMERIC_TYPES) for arg in args] ) elif nargs == 4: # Reverse origin and size param ordering expressions += [ *(self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[2:]), *(self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[0:2]), ] else: raise ValueError('Must provide 1, 2, or 4 arguments to `SnapToGrid`.') super().__init__(*expressions, **extra) class SymDifference(OracleToleranceMixin, GeomOutputGeoFunc): arity = 2 geom_param_pos = (0, 1) class Transform(GeomOutputGeoFunc): def __init__(self, expression, srid, **extra): expressions = [ expression, self._handle_param(srid, 'srid', int), ] if 'output_field' not in extra: extra['output_field'] = GeometryField(srid=srid) super().__init__(*expressions, **extra) class Translate(Scale): def as_sqlite(self, compiler, connection, **extra_context): clone = self.copy() if len(self.source_expressions) < 4: # Always provide the z parameter for ST_Translate clone.source_expressions.append(Value(0)) return super(Translate, clone).as_sqlite(compiler, connection, **extra_context) class Union(OracleToleranceMixin, GeomOutputGeoFunc): arity = 2 geom_param_pos = (0, 1)
0837e5f4cf4f7025eee4a7cb6d73a3c047ccbf1779b26708e324c54f92a75ec8
import cx_Oracle from django.db.backends.oracle.introspection import DatabaseIntrospection class OracleIntrospection(DatabaseIntrospection): # Associating any OBJECTVAR instances with GeometryField. Of course, # this won't work right on Oracle objects that aren't MDSYS.SDO_GEOMETRY, # but it is the only object type supported within Django anyways. data_types_reverse = DatabaseIntrospection.data_types_reverse.copy() data_types_reverse[cx_Oracle.OBJECT] = 'GeometryField' def get_geometry_type(self, table_name, description): with self.connection.cursor() as cursor: # Querying USER_SDO_GEOM_METADATA to get the SRID and dimension information. try: cursor.execute( 'SELECT "DIMINFO", "SRID" FROM "USER_SDO_GEOM_METADATA" ' 'WHERE "TABLE_NAME"=%s AND "COLUMN_NAME"=%s', (table_name.upper(), description.name.upper()) ) row = cursor.fetchone() except Exception as exc: raise Exception( 'Could not find entry in USER_SDO_GEOM_METADATA ' 'corresponding to "%s"."%s"' % (table_name, description.name) ) from exc # TODO: Research way to find a more specific geometry field type for # the column's contents. field_type = 'GeometryField' # Getting the field parameters. field_params = {} dim, srid = row if srid != 4326: field_params['srid'] = srid # Size of object array (SDO_DIM_ARRAY) is number of dimensions. dim = dim.size() if dim != 2: field_params['dim'] = dim return field_type, field_params
a53df67ff0351516303b9856322817ecf53fa23a5198885d52584ea9dcf647d9
""" The GeometryColumns and SpatialRefSys models for the Oracle spatial backend. It should be noted that Oracle Spatial does not have database tables named according to the OGC standard, so the closest analogs are used. For example, the `USER_SDO_GEOM_METADATA` is used for the GeometryColumns model and the `SDO_COORD_REF_SYS` is used for the SpatialRefSys model. """ from django.contrib.gis.db import models from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin class OracleGeometryColumns(models.Model): "Maps to the Oracle USER_SDO_GEOM_METADATA table." table_name = models.CharField(max_length=32) column_name = models.CharField(max_length=1024) srid = models.IntegerField(primary_key=True) # TODO: Add support for `diminfo` column (type MDSYS.SDO_DIM_ARRAY). class Meta: app_label = 'gis' db_table = 'USER_SDO_GEOM_METADATA' managed = False def __str__(self): return '%s - %s (SRID: %s)' % (self.table_name, self.column_name, self.srid) @classmethod def table_name_col(cls): """ Return the name of the metadata column used to store the feature table name. """ return 'table_name' @classmethod def geom_col_name(cls): """ Return the name of the metadata column used to store the feature geometry column. """ return 'column_name' class OracleSpatialRefSys(models.Model, SpatialRefSysMixin): "Maps to the Oracle MDSYS.CS_SRS table." cs_name = models.CharField(max_length=68) srid = models.IntegerField(primary_key=True) auth_srid = models.IntegerField() auth_name = models.CharField(max_length=256) wktext = models.CharField(max_length=2046) # Optional geometry representing the bounds of this coordinate # system. By default, all are NULL in the table. cs_bounds = models.PolygonField(null=True) class Meta: app_label = 'gis' db_table = 'CS_SRS' managed = False @property def wkt(self): return self.wktext
68f2b6750da34d01c87f99a18b95472d572bff88b42b2146f7d20baac6d6d907
""" This module contains the spatial lookup types, and the `get_geo_where_clause` routine for Oracle Spatial. Please note that WKT support is broken on the XE version, and thus this backend will not work on such platforms. Specifically, XE lacks support for an internal JVM, and Java libraries are required to use the WKT constructors. """ import re from django.contrib.gis.db.backends.base.operations import ( BaseSpatialOperations, ) from django.contrib.gis.db.backends.oracle.adapter import OracleSpatialAdapter from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.db.models import aggregates from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase from django.contrib.gis.geos.prototypes.io import wkb_r from django.contrib.gis.measure import Distance from django.db.backends.oracle.operations import DatabaseOperations DEFAULT_TOLERANCE = '0.05' class SDOOperator(SpatialOperator): sql_template = "%(func)s(%(lhs)s, %(rhs)s) = 'TRUE'" class SDODWithin(SpatialOperator): sql_template = "SDO_WITHIN_DISTANCE(%(lhs)s, %(rhs)s, %%s) = 'TRUE'" class SDODisjoint(SpatialOperator): sql_template = "SDO_GEOM.RELATE(%%(lhs)s, 'DISJOINT', %%(rhs)s, %s) = 'DISJOINT'" % DEFAULT_TOLERANCE class SDORelate(SpatialOperator): sql_template = "SDO_RELATE(%(lhs)s, %(rhs)s, 'mask=%(mask)s') = 'TRUE'" def check_relate_argument(self, arg): masks = 'TOUCH|OVERLAPBDYDISJOINT|OVERLAPBDYINTERSECT|EQUAL|INSIDE|COVEREDBY|CONTAINS|COVERS|ANYINTERACT|ON' mask_regex = re.compile(r'^(%s)(\+(%s))*$' % (masks, masks), re.I) if not isinstance(arg, str) or not mask_regex.match(arg): raise ValueError('Invalid SDO_RELATE mask: "%s"' % arg) def as_sql(self, connection, lookup, template_params, sql_params): template_params['mask'] = sql_params.pop() return super().as_sql(connection, lookup, template_params, sql_params) class OracleOperations(BaseSpatialOperations, DatabaseOperations): name = 'oracle' oracle = True disallowed_aggregates = (aggregates.Collect, aggregates.Extent3D, aggregates.MakeLine) Adapter = OracleSpatialAdapter extent = 'SDO_AGGR_MBR' unionagg = 'SDO_AGGR_UNION' from_text = 'SDO_GEOMETRY' function_names = { 'Area': 'SDO_GEOM.SDO_AREA', 'BoundingCircle': 'SDO_GEOM.SDO_MBC', 'Centroid': 'SDO_GEOM.SDO_CENTROID', 'Difference': 'SDO_GEOM.SDO_DIFFERENCE', 'Distance': 'SDO_GEOM.SDO_DISTANCE', 'Envelope': 'SDO_GEOM_MBR', 'Intersection': 'SDO_GEOM.SDO_INTERSECTION', 'IsValid': 'SDO_GEOM.VALIDATE_GEOMETRY_WITH_CONTEXT', 'Length': 'SDO_GEOM.SDO_LENGTH', 'NumGeometries': 'SDO_UTIL.GETNUMELEM', 'NumPoints': 'SDO_UTIL.GETNUMVERTICES', 'Perimeter': 'SDO_GEOM.SDO_LENGTH', 'PointOnSurface': 'SDO_GEOM.SDO_POINTONSURFACE', 'Reverse': 'SDO_UTIL.REVERSE_LINESTRING', 'SymDifference': 'SDO_GEOM.SDO_XOR', 'Transform': 'SDO_CS.TRANSFORM', 'Union': 'SDO_GEOM.SDO_UNION', } # We want to get SDO Geometries as WKT because it is much easier to # instantiate GEOS proxies from WKT than SDO_GEOMETRY(...) strings. # However, this adversely affects performance (i.e., Java is called # to convert to WKT on every query). If someone wishes to write a # SDO_GEOMETRY(...) parser in Python, let me know =) select = 'SDO_UTIL.TO_WKBGEOMETRY(%s)' gis_operators = { 'contains': SDOOperator(func='SDO_CONTAINS'), 'coveredby': SDOOperator(func='SDO_COVEREDBY'), 'covers': SDOOperator(func='SDO_COVERS'), 'disjoint': SDODisjoint(), 'intersects': SDOOperator(func='SDO_OVERLAPBDYINTERSECT'), # TODO: Is this really the same as ST_Intersects()? 'equals': SDOOperator(func='SDO_EQUAL'), 'exact': SDOOperator(func='SDO_EQUAL'), 'overlaps': SDOOperator(func='SDO_OVERLAPS'), 'same_as': SDOOperator(func='SDO_EQUAL'), 'relate': SDORelate(), # Oracle uses a different syntax, e.g., 'mask=inside+touch' 'touches': SDOOperator(func='SDO_TOUCH'), 'within': SDOOperator(func='SDO_INSIDE'), 'dwithin': SDODWithin(), } unsupported_functions = { 'AsGeoJSON', 'AsKML', 'AsSVG', 'Azimuth', 'ForcePolygonCW', 'GeoHash', 'GeometryDistance', 'LineLocatePoint', 'MakeValid', 'MemSize', 'Scale', 'SnapToGrid', 'Translate', } def geo_quote_name(self, name): return super().geo_quote_name(name).upper() def convert_extent(self, clob): if clob: # Generally, Oracle returns a polygon for the extent -- however, # it can return a single point if there's only one Point in the # table. ext_geom = GEOSGeometry(memoryview(clob.read())) gtype = str(ext_geom.geom_type) if gtype == 'Polygon': # Construct the 4-tuple from the coordinates in the polygon. shell = ext_geom.shell ll, ur = shell[0][:2], shell[2][:2] elif gtype == 'Point': ll = ext_geom.coords[:2] ur = ll else: raise Exception('Unexpected geometry type returned for extent: %s' % gtype) xmin, ymin = ll xmax, ymax = ur return (xmin, ymin, xmax, ymax) else: return None def geo_db_type(self, f): """ Return the geometry database type for Oracle. Unlike other spatial backends, no stored procedure is necessary and it's the same for all geometry types. """ return 'MDSYS.SDO_GEOMETRY' def get_distance(self, f, value, lookup_type): """ Return the distance parameters given the value and the lookup type. On Oracle, geometry columns with a geodetic coordinate system behave implicitly like a geography column, and thus meters will be used as the distance parameter on them. """ if not value: return [] value = value[0] if isinstance(value, Distance): if f.geodetic(self.connection): dist_param = value.m else: dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection))) else: dist_param = value # dwithin lookups on Oracle require a special string parameter # that starts with "distance=". if lookup_type == 'dwithin': dist_param = 'distance=%s' % dist_param return [dist_param] def get_geom_placeholder(self, f, value, compiler): if value is None: return 'NULL' return super().get_geom_placeholder(f, value, compiler) def spatial_aggregate_name(self, agg_name): """ Return the spatial aggregate SQL name. """ agg_name = 'unionagg' if agg_name.lower() == 'union' else agg_name.lower() return getattr(self, agg_name) # Routines for getting the OGC-compliant models. def geometry_columns(self): from django.contrib.gis.db.backends.oracle.models import OracleGeometryColumns return OracleGeometryColumns def spatial_ref_sys(self): from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys return OracleSpatialRefSys def modify_insert_params(self, placeholder, params): """Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial backend due to #10888. """ if placeholder == 'NULL': return [] return super().modify_insert_params(placeholder, params) def get_geometry_converter(self, expression): read = wkb_r().read srid = expression.output_field.srid if srid == -1: srid = None geom_class = expression.output_field.geom_class def converter(value, expression, connection): if value is not None: geom = GEOSGeometryBase(read(memoryview(value.read())), geom_class) if srid: geom.srid = srid return geom return converter def get_area_att_for_field(self, field): return 'sq_m'
b1c55e47be588fcbecd2aff4a3bf065589fdf01c6c3822a62cedc5ea581c1877
import re from django.contrib.gis.db.models import aggregates class BaseSpatialFeatures: gis_enabled = True # Does the database contain a SpatialRefSys model to store SRID information? has_spatialrefsys_table = True # Does the backend support the django.contrib.gis.utils.add_srs_entry() utility? supports_add_srs_entry = True # Does the backend introspect GeometryField to its subtypes? supports_geometry_field_introspection = True # Does the backend support storing 3D geometries? supports_3d_storage = False # Reference implementation of 3D functions is: # https://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions supports_3d_functions = False # Does the database support SRID transform operations? supports_transform = True # Can geometry fields be null? supports_null_geometries = True # Are empty geometries supported? supports_empty_geometries = False # Can the function be applied on geodetic coordinate systems? supports_distance_geodetic = True supports_length_geodetic = True supports_perimeter_geodetic = False supports_area_geodetic = True # Is the database able to count vertices on polygons (with `num_points`)? supports_num_points_poly = True # The following properties indicate if the database backend support # certain lookups (dwithin, left and right, relate, ...) supports_left_right_lookups = False # Does the database have raster support? supports_raster = False # Does the database support a unique index on geometry fields? supports_geometry_field_unique_index = True @property def supports_bbcontains_lookup(self): return 'bbcontains' in self.connection.ops.gis_operators @property def supports_contained_lookup(self): return 'contained' in self.connection.ops.gis_operators @property def supports_crosses_lookup(self): return 'crosses' in self.connection.ops.gis_operators @property def supports_distances_lookups(self): return self.has_Distance_function @property def supports_dwithin_lookup(self): return 'dwithin' in self.connection.ops.gis_operators @property def supports_relate_lookup(self): return 'relate' in self.connection.ops.gis_operators @property def supports_isvalid_lookup(self): return self.has_IsValid_function # Is the aggregate supported by the database? @property def supports_collect_aggr(self): return aggregates.Collect not in self.connection.ops.disallowed_aggregates @property def supports_extent_aggr(self): return aggregates.Extent not in self.connection.ops.disallowed_aggregates @property def supports_make_line_aggr(self): return aggregates.MakeLine not in self.connection.ops.disallowed_aggregates @property def supports_union_aggr(self): return aggregates.Union not in self.connection.ops.disallowed_aggregates def __getattr__(self, name): m = re.match(r'has_(\w*)_function$', name) if m: func_name = m.group(1) return func_name not in self.connection.ops.unsupported_functions raise AttributeError
82b5a186100d9a2da3171bdfea158b1abb3659480dbfa7fb1aa5fc9495364bd1
from django.contrib.gis.db.models import GeometryField from django.contrib.gis.db.models.functions import Distance from django.contrib.gis.measure import ( Area as AreaMeasure, Distance as DistanceMeasure, ) from django.db.utils import NotSupportedError from django.utils.functional import cached_property class BaseSpatialOperations: # Quick booleans for the type of this spatial backend, and # an attribute for the spatial database version tuple (if applicable) postgis = False spatialite = False mysql = False oracle = False spatial_version = None # How the geometry column should be selected. select = '%s' @cached_property def select_extent(self): return self.select # Does the spatial database have a geometry or geography type? geography = False geometry = False # Aggregates disallowed_aggregates = () geom_func_prefix = '' # Mapping between Django function names and backend names, when names do not # match; used in spatial_function_name(). function_names = {} # Blacklist/set of known unsupported functions of the backend unsupported_functions = { 'Area', 'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'Azimuth', 'BoundingCircle', 'Centroid', 'Difference', 'Distance', 'Envelope', 'GeoHash', 'GeometryDistance', 'Intersection', 'IsValid', 'Length', 'LineLocatePoint', 'MakeValid', 'MemSize', 'NumGeometries', 'NumPoints', 'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid', 'SymDifference', 'Transform', 'Translate', 'Union', } # Constructors from_text = False # Default conversion functions for aggregates; will be overridden if implemented # for the spatial backend. def convert_extent(self, box, srid): raise NotImplementedError('Aggregate extent not implemented for this spatial backend.') def convert_extent3d(self, box, srid): raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.') # For quoting column values, rather than columns. def geo_quote_name(self, name): return "'%s'" % name # GeometryField operations def geo_db_type(self, f): """ Return the database column type for the geometry field on the spatial backend. """ raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method') def get_distance(self, f, value, lookup_type): """ Return the distance parameters for the given geometry field, lookup value, and lookup type. """ raise NotImplementedError('Distance operations not available on this spatial backend.') def get_geom_placeholder(self, f, value, compiler): """ Return the placeholder for the given geometry field with the given value. Depending on the spatial backend, the placeholder may contain a stored procedure call to the transformation function of the spatial backend. """ def transform_value(value, field): return value is not None and value.srid != field.srid if hasattr(value, 'as_sql'): return ( '%s(%%s, %s)' % (self.spatial_function_name('Transform'), f.srid) if transform_value(value.output_field, f) else '%s' ) if transform_value(value, f): # Add Transform() to the SQL placeholder. return '%s(%s(%%s,%s), %s)' % ( self.spatial_function_name('Transform'), self.from_text, value.srid, f.srid, ) elif self.connection.features.has_spatialrefsys_table: return '%s(%%s,%s)' % (self.from_text, f.srid) else: # For backwards compatibility on MySQL (#27464). return '%s(%%s)' % self.from_text def check_expression_support(self, expression): if isinstance(expression, self.disallowed_aggregates): raise NotSupportedError( "%s spatial aggregation is not supported by this database backend." % expression.name ) super().check_expression_support(expression) def spatial_aggregate_name(self, agg_name): raise NotImplementedError('Aggregate support not implemented for this spatial backend.') def spatial_function_name(self, func_name): if func_name in self.unsupported_functions: raise NotSupportedError("This backend doesn't support the %s function." % func_name) return self.function_names.get(func_name, self.geom_func_prefix + func_name) # Routines for getting the OGC-compliant models. def geometry_columns(self): raise NotImplementedError('Subclasses of BaseSpatialOperations must provide a geometry_columns() method.') def spatial_ref_sys(self): raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method') distance_expr_for_lookup = staticmethod(Distance) def get_db_converters(self, expression): converters = super().get_db_converters(expression) if isinstance(expression.output_field, GeometryField): converters.append(self.get_geometry_converter(expression)) return converters def get_geometry_converter(self, expression): raise NotImplementedError( 'Subclasses of BaseSpatialOperations must provide a ' 'get_geometry_converter() method.' ) def get_area_att_for_field(self, field): if field.geodetic(self.connection): if self.connection.features.supports_area_geodetic: return 'sq_m' raise NotImplementedError('Area on geodetic coordinate systems not supported.') else: units_name = field.units_name(self.connection) if units_name: return AreaMeasure.unit_attname(units_name) def get_distance_att_for_field(self, field): dist_att = None if field.geodetic(self.connection): if self.connection.features.supports_distance_geodetic: dist_att = 'm' else: units = field.units_name(self.connection) if units: dist_att = DistanceMeasure.unit_attname(units) return dist_att
08365b2de32070645dab046dcaea7b57d70bfe3fd8a720b339d201cb40a7d59d
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures from django.db.backends.mysql.features import ( DatabaseFeatures as MySQLDatabaseFeatures, ) from django.utils.functional import cached_property class DatabaseFeatures(BaseSpatialFeatures, MySQLDatabaseFeatures): has_spatialrefsys_table = False supports_add_srs_entry = False supports_distance_geodetic = False supports_length_geodetic = False supports_area_geodetic = False supports_transform = False supports_null_geometries = False supports_num_points_poly = False @cached_property def supports_empty_geometry_collection(self): return self.connection.mysql_version >= (5, 7, 5) @cached_property def supports_geometry_field_unique_index(self): # Not supported in MySQL since https://dev.mysql.com/worklog/task/?id=11808 return self.connection.mysql_is_mariadb
42ea0939a1de4f1aabd1e8eef07580e409b3198a42d792adf54e6e08dc495870
from MySQLdb.constants import FIELD_TYPE from django.contrib.gis.gdal import OGRGeomType from django.db.backends.mysql.introspection import DatabaseIntrospection class MySQLIntrospection(DatabaseIntrospection): # Updating the data_types_reverse dictionary with the appropriate # type for Geometry fields. data_types_reverse = DatabaseIntrospection.data_types_reverse.copy() data_types_reverse[FIELD_TYPE.GEOMETRY] = 'GeometryField' def get_geometry_type(self, table_name, description): with self.connection.cursor() as cursor: # In order to get the specific geometry type of the field, # we introspect on the table definition using `DESCRIBE`. cursor.execute('DESCRIBE %s' % self.connection.ops.quote_name(table_name)) # Increment over description info until we get to the geometry # column. for column, typ, null, key, default, extra in cursor.fetchall(): if column == description.name: # Using OGRGeomType to convert from OGC name to Django field. # MySQL does not support 3D or SRIDs, so the field params # are empty. field_type = OGRGeomType(typ).django field_params = {} break return field_type, field_params def supports_spatial_index(self, cursor, table_name): # Supported with MyISAM/Aria, or InnoDB on MySQL 5.7.5+/MariaDB 10.2.2+ storage_engine = self.get_storage_engine(cursor, table_name) if storage_engine == 'InnoDB': return self.connection.mysql_version >= ( (10, 2, 2) if self.connection.mysql_is_mariadb else (5, 7, 5) ) return storage_engine in ('MyISAM', 'Aria')
c7a50e5d0ce7bfd8d503ee356f74249593cce91a604e1e858d58c9085126f9df
from django.contrib.gis.db.backends.base.adapter import WKTAdapter from django.contrib.gis.db.backends.base.operations import ( BaseSpatialOperations, ) from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.db.models import aggregates from django.contrib.gis.geos.geometry import GEOSGeometryBase from django.contrib.gis.geos.prototypes.io import wkb_r from django.contrib.gis.measure import Distance from django.db.backends.mysql.operations import DatabaseOperations from django.utils.functional import cached_property class MySQLOperations(BaseSpatialOperations, DatabaseOperations): mysql = True name = 'mysql' geom_func_prefix = 'ST_' Adapter = WKTAdapter @cached_property def select(self): return self.geom_func_prefix + 'AsBinary(%s)' @cached_property def from_text(self): return self.geom_func_prefix + 'GeomFromText' @cached_property def gis_operators(self): return { 'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API 'bboverlaps': SpatialOperator(func='MBROverlaps'), # ... 'contained': SpatialOperator(func='MBRWithin'), # ... 'contains': SpatialOperator(func='ST_Contains'), 'crosses': SpatialOperator(func='ST_Crosses'), 'disjoint': SpatialOperator(func='ST_Disjoint'), 'equals': SpatialOperator(func='ST_Equals'), 'exact': SpatialOperator(func='ST_Equals'), 'intersects': SpatialOperator(func='ST_Intersects'), 'overlaps': SpatialOperator(func='ST_Overlaps'), 'same_as': SpatialOperator(func='ST_Equals'), 'touches': SpatialOperator(func='ST_Touches'), 'within': SpatialOperator(func='ST_Within'), } disallowed_aggregates = ( aggregates.Collect, aggregates.Extent, aggregates.Extent3D, aggregates.MakeLine, aggregates.Union, ) @cached_property def unsupported_functions(self): unsupported = { 'AsGML', 'AsKML', 'AsSVG', 'Azimuth', 'BoundingCircle', 'ForcePolygonCW', 'GeometryDistance', 'LineLocatePoint', 'MakeValid', 'MemSize', 'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid', 'Transform', 'Translate', } if self.connection.mysql_is_mariadb: unsupported.remove('PointOnSurface') unsupported.update({'GeoHash', 'IsValid'}) if self.connection.mysql_version < (10, 2, 4): unsupported.add('AsGeoJSON') elif self.connection.mysql_version < (5, 7, 5): unsupported.update({'AsGeoJSON', 'GeoHash', 'IsValid'}) return unsupported def geo_db_type(self, f): return f.geom_type def get_distance(self, f, value, lookup_type): value = value[0] if isinstance(value, Distance): if f.geodetic(self.connection): raise ValueError( 'Only numeric values of degree units are allowed on ' 'geodetic distance queries.' ) dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection))) else: dist_param = value return [dist_param] def get_geometry_converter(self, expression): read = wkb_r().read srid = expression.output_field.srid if srid == -1: srid = None geom_class = expression.output_field.geom_class def converter(value, expression, connection): if value is not None: geom = GEOSGeometryBase(read(memoryview(value)), geom_class) if srid: geom.srid = srid return geom return converter
499a5efcef2107e88628ff5de918904318e4f2995196acf8024f45ac9d7f0187
from django.contrib.gis.gdal import OGRGeomType from django.db.backends.sqlite3.introspection import ( DatabaseIntrospection, FlexibleFieldLookupDict, ) class GeoFlexibleFieldLookupDict(FlexibleFieldLookupDict): """ Sublcass that includes updates the `base_data_types_reverse` dict for geometry field types. """ base_data_types_reverse = { **FlexibleFieldLookupDict.base_data_types_reverse, 'point': 'GeometryField', 'linestring': 'GeometryField', 'polygon': 'GeometryField', 'multipoint': 'GeometryField', 'multilinestring': 'GeometryField', 'multipolygon': 'GeometryField', 'geometrycollection': 'GeometryField', } class SpatiaLiteIntrospection(DatabaseIntrospection): data_types_reverse = GeoFlexibleFieldLookupDict() def get_geometry_type(self, table_name, description): with self.connection.cursor() as cursor: # Querying the `geometry_columns` table to get additional metadata. cursor.execute('SELECT coord_dimension, srid, geometry_type ' 'FROM geometry_columns ' 'WHERE f_table_name=%s AND f_geometry_column=%s', (table_name, description.name)) row = cursor.fetchone() if not row: raise Exception('Could not find a geometry column for "%s"."%s"' % (table_name, description.name)) # OGRGeomType does not require GDAL and makes it easy to convert # from OGC geom type name to Django field. ogr_type = row[2] if isinstance(ogr_type, int) and ogr_type > 1000: # SpatiaLite uses SFSQL 1.2 offsets 1000 (Z), 2000 (M), and # 3000 (ZM) to indicate the presence of higher dimensional # coordinates (M not yet supported by Django). ogr_type = ogr_type % 1000 + OGRGeomType.wkb25bit field_type = OGRGeomType(ogr_type).django # Getting any GeometryField keyword arguments that are not the default. dim = row[0] srid = row[1] field_params = {} if srid != 4326: field_params['srid'] = srid if (isinstance(dim, str) and 'Z' in dim) or dim == 3: field_params['dim'] = 3 return field_type, field_params def get_constraints(self, cursor, table_name): constraints = super().get_constraints(cursor, table_name) cursor.execute('SELECT f_geometry_column ' 'FROM geometry_columns ' 'WHERE f_table_name=%s AND spatial_index_enabled=1', (table_name,)) for row in cursor.fetchall(): constraints['%s__spatial__index' % row[0]] = { "columns": [row[0]], "primary_key": False, "unique": False, "foreign_key": None, "check": False, "index": True, } return constraints
8a2a1d70a61600c233ff1ac96a0afed676e288ed98258fd0d2fb7fd2ec9a0f9e
""" The GeometryColumns and SpatialRefSys models for the SpatiaLite backend. """ from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin from django.db import models class SpatialiteGeometryColumns(models.Model): """ The 'geometry_columns' table from SpatiaLite. """ f_table_name = models.CharField(max_length=256) f_geometry_column = models.CharField(max_length=256) coord_dimension = models.IntegerField() srid = models.IntegerField(primary_key=True) spatial_index_enabled = models.IntegerField() type = models.IntegerField(db_column='geometry_type') class Meta: app_label = 'gis' db_table = 'geometry_columns' managed = False def __str__(self): return '%s.%s - %dD %s field (SRID: %d)' % ( self.f_table_name, self.f_geometry_column, self.coord_dimension, self.type, self.srid, ) @classmethod def table_name_col(cls): """ Return the name of the metadata column used to store the feature table name. """ return 'f_table_name' @classmethod def geom_col_name(cls): """ Return the name of the metadata column used to store the feature geometry column. """ return 'f_geometry_column' class SpatialiteSpatialRefSys(models.Model, SpatialRefSysMixin): """ The 'spatial_ref_sys' table from SpatiaLite. """ srid = models.IntegerField(primary_key=True) auth_name = models.CharField(max_length=256) auth_srid = models.IntegerField() ref_sys_name = models.CharField(max_length=256) proj4text = models.CharField(max_length=2048) srtext = models.CharField(max_length=2048) class Meta: app_label = 'gis' db_table = 'spatial_ref_sys' managed = False @property def wkt(self): return self.srtext
b8a3a24efb51c4eb8356a722c649999c07651d6e8973e82208a32e63c6e6f3a1
""" SQL functions reference lists: https://www.gaia-gis.it/gaia-sins/spatialite-sql-4.3.0.html """ from django.contrib.gis.db.backends.base.operations import ( BaseSpatialOperations, ) from django.contrib.gis.db.backends.spatialite.adapter import SpatiaLiteAdapter from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.db.models import aggregates from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase from django.contrib.gis.geos.prototypes.io import wkb_r from django.contrib.gis.measure import Distance from django.core.exceptions import ImproperlyConfigured from django.db.backends.sqlite3.operations import DatabaseOperations from django.utils.functional import cached_property from django.utils.version import get_version_tuple class SpatialiteNullCheckOperator(SpatialOperator): def as_sql(self, connection, lookup, template_params, sql_params): sql, params = super().as_sql(connection, lookup, template_params, sql_params) return '%s > 0' % sql, params class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations): name = 'spatialite' spatialite = True Adapter = SpatiaLiteAdapter collect = 'Collect' extent = 'Extent' makeline = 'MakeLine' unionagg = 'GUnion' from_text = 'GeomFromText' gis_operators = { # Binary predicates 'equals': SpatialiteNullCheckOperator(func='Equals'), 'disjoint': SpatialiteNullCheckOperator(func='Disjoint'), 'touches': SpatialiteNullCheckOperator(func='Touches'), 'crosses': SpatialiteNullCheckOperator(func='Crosses'), 'within': SpatialiteNullCheckOperator(func='Within'), 'overlaps': SpatialiteNullCheckOperator(func='Overlaps'), 'contains': SpatialiteNullCheckOperator(func='Contains'), 'intersects': SpatialiteNullCheckOperator(func='Intersects'), 'relate': SpatialiteNullCheckOperator(func='Relate'), 'coveredby': SpatialiteNullCheckOperator(func='CoveredBy'), 'covers': SpatialiteNullCheckOperator(func='Covers'), # Returns true if B's bounding box completely contains A's bounding box. 'contained': SpatialOperator(func='MbrWithin'), # Returns true if A's bounding box completely contains B's bounding box. 'bbcontains': SpatialOperator(func='MbrContains'), # Returns true if A's bounding box overlaps B's bounding box. 'bboverlaps': SpatialOperator(func='MbrOverlaps'), # These are implemented here as synonyms for Equals 'same_as': SpatialiteNullCheckOperator(func='Equals'), 'exact': SpatialiteNullCheckOperator(func='Equals'), # Distance predicates 'dwithin': SpatialOperator(func='PtDistWithin'), } disallowed_aggregates = (aggregates.Extent3D,) select = 'CAST (AsEWKB(%s) AS BLOB)' function_names = { 'ForcePolygonCW': 'ST_ForceLHR', 'Length': 'ST_Length', 'LineLocatePoint': 'ST_Line_Locate_Point', 'NumPoints': 'ST_NPoints', 'Reverse': 'ST_Reverse', 'Scale': 'ScaleCoords', 'Translate': 'ST_Translate', 'Union': 'ST_Union', } @cached_property def unsupported_functions(self): unsupported = {'BoundingCircle', 'GeometryDistance', 'MemSize'} if not self.lwgeom_version(): unsupported |= {'Azimuth', 'GeoHash', 'IsValid', 'MakeValid'} return unsupported @cached_property def spatial_version(self): """Determine the version of the SpatiaLite library.""" try: version = self.spatialite_version_tuple()[1:] except Exception as exc: raise ImproperlyConfigured( 'Cannot determine the SpatiaLite version for the "%s" database. ' 'Was the SpatiaLite initialization SQL loaded on this database?' % ( self.connection.settings_dict['NAME'], ) ) from exc if version < (4, 3, 0): raise ImproperlyConfigured('GeoDjango supports SpatiaLite 4.3.0 and above.') return version def convert_extent(self, box): """ Convert the polygon data received from SpatiaLite to min/max values. """ if box is None: return None shell = GEOSGeometry(box).shell xmin, ymin = shell[0][:2] xmax, ymax = shell[2][:2] return (xmin, ymin, xmax, ymax) def geo_db_type(self, f): """ Return None because geometry columns are added via the `AddGeometryColumn` stored procedure on SpatiaLite. """ return None def get_distance(self, f, value, lookup_type): """ Return the distance parameters for the given geometry field, lookup value, and lookup type. """ if not value: return [] value = value[0] if isinstance(value, Distance): if f.geodetic(self.connection): if lookup_type == 'dwithin': raise ValueError( 'Only numeric values of degree units are allowed on ' 'geographic DWithin queries.' ) dist_param = value.m else: dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection))) else: dist_param = value return [dist_param] def _get_spatialite_func(self, func): """ Helper routine for calling SpatiaLite functions and returning their result. Any error occurring in this method should be handled by the caller. """ cursor = self.connection._cursor() try: cursor.execute('SELECT %s' % func) row = cursor.fetchone() finally: cursor.close() return row[0] def geos_version(self): "Return the version of GEOS used by SpatiaLite as a string." return self._get_spatialite_func('geos_version()') def proj4_version(self): "Return the version of the PROJ.4 library used by SpatiaLite." return self._get_spatialite_func('proj4_version()') def lwgeom_version(self): """Return the version of LWGEOM library used by SpatiaLite.""" return self._get_spatialite_func('lwgeom_version()') def spatialite_version(self): "Return the SpatiaLite library version as a string." return self._get_spatialite_func('spatialite_version()') def spatialite_version_tuple(self): """ Return the SpatiaLite version as a tuple (version string, major, minor, subminor). """ version = self.spatialite_version() return (version,) + get_version_tuple(version) def spatial_aggregate_name(self, agg_name): """ Return the spatial aggregate SQL template and function for the given Aggregate instance. """ agg_name = 'unionagg' if agg_name.lower() == 'union' else agg_name.lower() return getattr(self, agg_name) # Routines for getting the OGC-compliant models. def geometry_columns(self): from django.contrib.gis.db.backends.spatialite.models import SpatialiteGeometryColumns return SpatialiteGeometryColumns def spatial_ref_sys(self): from django.contrib.gis.db.backends.spatialite.models import SpatialiteSpatialRefSys return SpatialiteSpatialRefSys def get_geometry_converter(self, expression): geom_class = expression.output_field.geom_class read = wkb_r().read def converter(value, expression, connection): return None if value is None else GEOSGeometryBase(read(value), geom_class) return converter
86dcf8e4fa273150ec76248bb10260e313a5cac69775a5ddca3883349c66e962
from django.contrib.gis.gdal import OGRGeomType from django.db.backends.postgresql.introspection import DatabaseIntrospection class PostGISIntrospection(DatabaseIntrospection): postgis_oid_lookup = {} # Populated when introspection is performed. ignored_tables = DatabaseIntrospection.ignored_tables + [ 'geography_columns', 'geometry_columns', 'raster_columns', 'spatial_ref_sys', 'raster_overviews', ] def get_field_type(self, data_type, description): if not self.postgis_oid_lookup: # Query PostgreSQL's pg_type table to determine the OID integers # for the PostGIS data types used in reverse lookup (the integers # may be different across versions). To prevent unnecessary # requests upon connection initialization, the `data_types_reverse` # dictionary isn't updated until introspection is performed here. with self.connection.cursor() as cursor: cursor.execute("SELECT oid, typname FROM pg_type WHERE typname IN ('geometry', 'geography')") self.postgis_oid_lookup = dict(cursor.fetchall()) self.data_types_reverse.update((oid, 'GeometryField') for oid in self.postgis_oid_lookup) return super().get_field_type(data_type, description) def get_geometry_type(self, table_name, description): """ The geometry type OID used by PostGIS does not indicate the particular type of field that a geometry column is (e.g., whether it's a PointField or a PolygonField). Thus, this routine queries the PostGIS metadata tables to determine the geometry type. """ with self.connection.cursor() as cursor: cursor.execute(""" SELECT t.coord_dimension, t.srid, t.type FROM ( SELECT * FROM geometry_columns UNION ALL SELECT * FROM geography_columns ) AS t WHERE t.f_table_name = %s AND t.f_geometry_column = %s """, (table_name, description.name)) row = cursor.fetchone() if not row: raise Exception('Could not find a geometry or geography column for "%s"."%s"' % (table_name, description.name)) dim, srid, field_type = row # OGRGeomType does not require GDAL and makes it easy to convert # from OGC geom type name to Django field. field_type = OGRGeomType(field_type).django # Getting any GeometryField keyword arguments that are not the default. field_params = {} if self.postgis_oid_lookup.get(description.type_code) == 'geography': field_params['geography'] = True if srid != 4326: field_params['srid'] = srid if dim != 2: field_params['dim'] = dim return field_type, field_params
b4a8916733baa766099cf6cf5d178c9457008858a3f82fc7ffac3c14784bab19
""" The GeometryColumns and SpatialRefSys models for the PostGIS backend. """ from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin from django.db import models class PostGISGeometryColumns(models.Model): """ The 'geometry_columns' view from PostGIS. See the PostGIS documentation at Ch. 4.3.2. """ f_table_catalog = models.CharField(max_length=256) f_table_schema = models.CharField(max_length=256) f_table_name = models.CharField(max_length=256) f_geometry_column = models.CharField(max_length=256) coord_dimension = models.IntegerField() srid = models.IntegerField(primary_key=True) type = models.CharField(max_length=30) class Meta: app_label = 'gis' db_table = 'geometry_columns' managed = False def __str__(self): return '%s.%s - %dD %s field (SRID: %d)' % ( self.f_table_name, self.f_geometry_column, self.coord_dimension, self.type, self.srid, ) @classmethod def table_name_col(cls): """ Return the name of the metadata column used to store the feature table name. """ return 'f_table_name' @classmethod def geom_col_name(cls): """ Return the name of the metadata column used to store the feature geometry column. """ return 'f_geometry_column' class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin): """ The 'spatial_ref_sys' table from PostGIS. See the PostGIS documentation at Ch. 4.2.1. """ srid = models.IntegerField(primary_key=True) auth_name = models.CharField(max_length=256) auth_srid = models.IntegerField() srtext = models.CharField(max_length=2048) proj4text = models.CharField(max_length=2048) class Meta: app_label = 'gis' db_table = 'spatial_ref_sys' managed = False @property def wkt(self): return self.srtext
1deee656926b25d6cc1a33c08074579269b6db90ae57155cfaf8a22524ca3a29
""" PostGIS to GDAL conversion constant definitions """ # Lookup to convert pixel type values from GDAL to PostGIS GDAL_TO_POSTGIS = [None, 4, 6, 5, 8, 7, 10, 11, None, None, None, None] # Lookup to convert pixel type values from PostGIS to GDAL POSTGIS_TO_GDAL = [1, 1, 1, 3, 1, 3, 2, 5, 4, None, 6, 7, None, None] # Struct pack structure for raster header, the raster header has the # following structure: # # Endianness, PostGIS raster version, number of bands, scale, origin, # skew, srid, width, and height. # # Scale, origin, and skew have x and y values. PostGIS currently uses # a fixed endianness (1) and there is only one version (0). POSTGIS_HEADER_STRUCTURE = 'B H H d d d d d d i H H' # Lookup values to convert GDAL pixel types to struct characters. This is # used to pack and unpack the pixel values of PostGIS raster bands. GDAL_TO_STRUCT = [ None, 'B', 'H', 'h', 'L', 'l', 'f', 'd', None, None, None, None, ] # Size of the packed value in bytes for different numerical types. # This is needed to cut chunks of band data out of PostGIS raster strings # when decomposing them into GDALRasters. # See https://docs.python.org/library/struct.html#format-characters STRUCT_SIZE = { 'b': 1, # Signed char 'B': 1, # Unsigned char '?': 1, # _Bool 'h': 2, # Short 'H': 2, # Unsigned short 'i': 4, # Integer 'I': 4, # Unsigned Integer 'l': 4, # Long 'L': 4, # Unsigned Long 'f': 4, # Float 'd': 8, # Double }
4d727f90d34909feb73c9f6525eea3abf8470ab21b72e7ff554229ceab8537b1
import re from django.conf import settings from django.contrib.gis.db.backends.base.operations import ( BaseSpatialOperations, ) from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.db.models import GeometryField, RasterField from django.contrib.gis.gdal import GDALRaster from django.contrib.gis.geos.geometry import GEOSGeometryBase from django.contrib.gis.geos.prototypes.io import wkb_r from django.contrib.gis.measure import Distance from django.core.exceptions import ImproperlyConfigured from django.db.backends.postgresql.operations import DatabaseOperations from django.db.models import Func, Value from django.db.utils import NotSupportedError, ProgrammingError from django.utils.functional import cached_property from django.utils.version import get_version_tuple from .adapter import PostGISAdapter from .models import PostGISGeometryColumns, PostGISSpatialRefSys from .pgraster import from_pgraster # Identifier to mark raster lookups as bilateral. BILATERAL = 'bilateral' class PostGISOperator(SpatialOperator): def __init__(self, geography=False, raster=False, **kwargs): # Only a subset of the operators and functions are available for the # geography type. self.geography = geography # Only a subset of the operators and functions are available for the # raster type. Lookups that don't support raster will be converted to # polygons. If the raster argument is set to BILATERAL, then the # operator cannot handle mixed geom-raster lookups. self.raster = raster super().__init__(**kwargs) def as_sql(self, connection, lookup, template_params, *args): if lookup.lhs.output_field.geography and not self.geography: raise ValueError('PostGIS geography does not support the "%s" ' 'function/operator.' % (self.func or self.op,)) template_params = self.check_raster(lookup, template_params) return super().as_sql(connection, lookup, template_params, *args) def check_raster(self, lookup, template_params): spheroid = lookup.rhs_params and lookup.rhs_params[-1] == 'spheroid' # Check which input is a raster. lhs_is_raster = lookup.lhs.field.geom_type == 'RASTER' rhs_is_raster = isinstance(lookup.rhs, GDALRaster) # Look for band indices and inject them if provided. if lookup.band_lhs is not None and lhs_is_raster: if not self.func: raise ValueError('Band indices are not allowed for this operator, it works on bbox only.') template_params['lhs'] = '%s, %s' % (template_params['lhs'], lookup.band_lhs) if lookup.band_rhs is not None and rhs_is_raster: if not self.func: raise ValueError('Band indices are not allowed for this operator, it works on bbox only.') template_params['rhs'] = '%s, %s' % (template_params['rhs'], lookup.band_rhs) # Convert rasters to polygons if necessary. if not self.raster or spheroid: # Operators without raster support. if lhs_is_raster: template_params['lhs'] = 'ST_Polygon(%s)' % template_params['lhs'] if rhs_is_raster: template_params['rhs'] = 'ST_Polygon(%s)' % template_params['rhs'] elif self.raster == BILATERAL: # Operators with raster support but don't support mixed (rast-geom) # lookups. if lhs_is_raster and not rhs_is_raster: template_params['lhs'] = 'ST_Polygon(%s)' % template_params['lhs'] elif rhs_is_raster and not lhs_is_raster: template_params['rhs'] = 'ST_Polygon(%s)' % template_params['rhs'] return template_params class ST_Polygon(Func): function = 'ST_Polygon' def __init__(self, expr): super().__init__(expr) expr = self.source_expressions[0] if isinstance(expr, Value) and not expr._output_field_or_none: self.source_expressions[0] = Value(expr.value, output_field=RasterField(srid=expr.value.srid)) @cached_property def output_field(self): return GeometryField(srid=self.source_expressions[0].field.srid) class PostGISOperations(BaseSpatialOperations, DatabaseOperations): name = 'postgis' postgis = True geography = True geom_func_prefix = 'ST_' Adapter = PostGISAdapter collect = geom_func_prefix + 'Collect' extent = geom_func_prefix + 'Extent' extent3d = geom_func_prefix + '3DExtent' length3d = geom_func_prefix + '3DLength' makeline = geom_func_prefix + 'MakeLine' perimeter3d = geom_func_prefix + '3DPerimeter' unionagg = geom_func_prefix + 'Union' gis_operators = { 'bbcontains': PostGISOperator(op='~', raster=True), 'bboverlaps': PostGISOperator(op='&&', geography=True, raster=True), 'contained': PostGISOperator(op='@', raster=True), 'overlaps_left': PostGISOperator(op='&<', raster=BILATERAL), 'overlaps_right': PostGISOperator(op='&>', raster=BILATERAL), 'overlaps_below': PostGISOperator(op='&<|'), 'overlaps_above': PostGISOperator(op='|&>'), 'left': PostGISOperator(op='<<'), 'right': PostGISOperator(op='>>'), 'strictly_below': PostGISOperator(op='<<|'), 'strictly_above': PostGISOperator(op='|>>'), 'same_as': PostGISOperator(op='~=', raster=BILATERAL), 'exact': PostGISOperator(op='~=', raster=BILATERAL), # alias of same_as 'contains': PostGISOperator(func='ST_Contains', raster=BILATERAL), 'contains_properly': PostGISOperator(func='ST_ContainsProperly', raster=BILATERAL), 'coveredby': PostGISOperator(func='ST_CoveredBy', geography=True, raster=BILATERAL), 'covers': PostGISOperator(func='ST_Covers', geography=True, raster=BILATERAL), 'crosses': PostGISOperator(func='ST_Crosses'), 'disjoint': PostGISOperator(func='ST_Disjoint', raster=BILATERAL), 'equals': PostGISOperator(func='ST_Equals'), 'intersects': PostGISOperator(func='ST_Intersects', geography=True, raster=BILATERAL), 'overlaps': PostGISOperator(func='ST_Overlaps', raster=BILATERAL), 'relate': PostGISOperator(func='ST_Relate'), 'touches': PostGISOperator(func='ST_Touches', raster=BILATERAL), 'within': PostGISOperator(func='ST_Within', raster=BILATERAL), 'dwithin': PostGISOperator(func='ST_DWithin', geography=True, raster=BILATERAL), } unsupported_functions = set() select = '%s::bytea' select_extent = None @cached_property def function_names(self): function_names = { 'BoundingCircle': 'ST_MinimumBoundingCircle', 'NumPoints': 'ST_NPoints', } if self.spatial_version < (2, 4, 0): function_names['ForcePolygonCW'] = 'ST_ForceRHR' return function_names @cached_property def spatial_version(self): """Determine the version of the PostGIS library.""" # Trying to get the PostGIS version because the function # signatures will depend on the version used. The cost # here is a database query to determine the version, which # can be mitigated by setting `POSTGIS_VERSION` with a 3-tuple # comprising user-supplied values for the major, minor, and # subminor revision of PostGIS. if hasattr(settings, 'POSTGIS_VERSION'): version = settings.POSTGIS_VERSION else: # Run a basic query to check the status of the connection so we're # sure we only raise the error below if the problem comes from # PostGIS and not from PostgreSQL itself (see #24862). self._get_postgis_func('version') try: vtup = self.postgis_version_tuple() except ProgrammingError: raise ImproperlyConfigured( 'Cannot determine PostGIS version for database "%s" ' 'using command "SELECT postgis_lib_version()". ' 'GeoDjango requires at least PostGIS version 2.2. ' 'Was the database created from a spatial database ' 'template?' % self.connection.settings_dict['NAME'] ) version = vtup[1:] return version def convert_extent(self, box): """ Return a 4-tuple extent for the `Extent` aggregate by converting the bounding box text returned by PostGIS (`box` argument), for example: "BOX(-90.0 30.0, -85.0 40.0)". """ if box is None: return None ll, ur = box[4:-1].split(',') xmin, ymin = map(float, ll.split()) xmax, ymax = map(float, ur.split()) return (xmin, ymin, xmax, ymax) def convert_extent3d(self, box3d): """ Return a 6-tuple extent for the `Extent3D` aggregate by converting the 3d bounding-box text returned by PostGIS (`box3d` argument), for example: "BOX3D(-90.0 30.0 1, -85.0 40.0 2)". """ if box3d is None: return None ll, ur = box3d[6:-1].split(',') xmin, ymin, zmin = map(float, ll.split()) xmax, ymax, zmax = map(float, ur.split()) return (xmin, ymin, zmin, xmax, ymax, zmax) def geo_db_type(self, f): """ Return the database field type for the given spatial field. """ if f.geom_type == 'RASTER': return 'raster' # Type-based geometries. # TODO: Support 'M' extension. if f.dim == 3: geom_type = f.geom_type + 'Z' else: geom_type = f.geom_type if f.geography: if f.srid != 4326: raise NotSupportedError('PostGIS only supports geography columns with an SRID of 4326.') return 'geography(%s,%d)' % (geom_type, f.srid) else: return 'geometry(%s,%d)' % (geom_type, f.srid) def get_distance(self, f, dist_val, lookup_type): """ Retrieve the distance parameters for the given geometry field, distance lookup value, and the distance lookup type. This is the most complex implementation of the spatial backends due to what is supported on geodetic geometry columns vs. what's available on projected geometry columns. In addition, it has to take into account the geography column type. """ # Getting the distance parameter value = dist_val[0] # Shorthand boolean flags. geodetic = f.geodetic(self.connection) geography = f.geography if isinstance(value, Distance): if geography: dist_param = value.m elif geodetic: if lookup_type == 'dwithin': raise ValueError('Only numeric values of degree units are ' 'allowed on geographic DWithin queries.') dist_param = value.m else: dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection))) else: # Assuming the distance is in the units of the field. dist_param = value return [dist_param] def get_geom_placeholder(self, f, value, compiler): """ Provide a proper substitution value for Geometries or rasters that are not in the SRID of the field. Specifically, this routine will substitute in the ST_Transform() function call. """ transform_func = self.spatial_function_name('Transform') if hasattr(value, 'as_sql'): if value.field.srid == f.srid: placeholder = '%s' else: placeholder = '%s(%%s, %s)' % (transform_func, f.srid) return placeholder # Get the srid for this object if value is None: value_srid = None else: value_srid = value.srid # Adding Transform() to the SQL placeholder if the value srid # is not equal to the field srid. if value_srid is None or value_srid == f.srid: placeholder = '%s' else: placeholder = '%s(%%s, %s)' % (transform_func, f.srid) return placeholder def _get_postgis_func(self, func): """ Helper routine for calling PostGIS functions and returning their result. """ # Close out the connection. See #9437. with self.connection.temporary_connection() as cursor: cursor.execute('SELECT %s()' % func) return cursor.fetchone()[0] def postgis_geos_version(self): "Return the version of the GEOS library used with PostGIS." return self._get_postgis_func('postgis_geos_version') def postgis_lib_version(self): "Return the version number of the PostGIS library used with PostgreSQL." return self._get_postgis_func('postgis_lib_version') def postgis_proj_version(self): "Return the version of the PROJ.4 library used with PostGIS." return self._get_postgis_func('postgis_proj_version') def postgis_version(self): "Return PostGIS version number and compile-time options." return self._get_postgis_func('postgis_version') def postgis_full_version(self): "Return PostGIS version number and compile-time options." return self._get_postgis_func('postgis_full_version') def postgis_version_tuple(self): """ Return the PostGIS version as a tuple (version string, major, minor, subminor). """ version = self.postgis_lib_version() return (version,) + get_version_tuple(version) def proj_version_tuple(self): """ Return the version of PROJ.4 used by PostGIS as a tuple of the major, minor, and subminor release numbers. """ proj_regex = re.compile(r'(\d+)\.(\d+)\.(\d+)') proj_ver_str = self.postgis_proj_version() m = proj_regex.search(proj_ver_str) if m: return tuple(map(int, [m.group(1), m.group(2), m.group(3)])) else: raise Exception('Could not determine PROJ.4 version from PostGIS.') def spatial_aggregate_name(self, agg_name): if agg_name == 'Extent3D': return self.extent3d else: return self.geom_func_prefix + agg_name # Routines for getting the OGC-compliant models. def geometry_columns(self): return PostGISGeometryColumns def spatial_ref_sys(self): return PostGISSpatialRefSys def parse_raster(self, value): """Convert a PostGIS HEX String into a dict readable by GDALRaster.""" return from_pgraster(value) def distance_expr_for_lookup(self, lhs, rhs, **kwargs): return super().distance_expr_for_lookup( self._normalize_distance_lookup_arg(lhs), self._normalize_distance_lookup_arg(rhs), **kwargs ) @staticmethod def _normalize_distance_lookup_arg(arg): is_raster = ( arg.field.geom_type == 'RASTER' if hasattr(arg, 'field') else isinstance(arg, GDALRaster) ) return ST_Polygon(arg) if is_raster else arg def get_geometry_converter(self, expression): read = wkb_r().read geom_class = expression.output_field.geom_class def converter(value, expression, connection): return None if value is None else GEOSGeometryBase(read(value), geom_class) return converter def get_area_att_for_field(self, field): return 'sq_m'
1862b73d2a215f04568681dac460571e70e465d5128b3f13c4e6a581d0c570d4
from django.db.backends.postgresql.schema import DatabaseSchemaEditor class PostGISSchemaEditor(DatabaseSchemaEditor): geom_index_type = 'GIST' geom_index_ops_nd = 'GIST_GEOMETRY_OPS_ND' rast_index_wrapper = 'ST_ConvexHull(%s)' sql_alter_column_to_3d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force3D(%(column)s)::%(type)s" sql_alter_column_to_2d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force2D(%(column)s)::%(type)s" def geo_quote_name(self, name): return self.connection.ops.geo_quote_name(name) def _field_should_be_indexed(self, model, field): if getattr(field, 'spatial_index', False): return True return super()._field_should_be_indexed(model, field) def _create_index_sql(self, model, fields, **kwargs): if len(fields) != 1 or not hasattr(fields[0], 'geodetic'): return super()._create_index_sql(model, fields, **kwargs) field = fields[0] field_column = self.quote_name(field.column) if field.geom_type == 'RASTER': # For raster fields, wrap index creation SQL statement with ST_ConvexHull. # Indexes on raster columns are based on the convex hull of the raster. field_column = self.rast_index_wrapper % field_column elif field.dim > 2 and not field.geography: # Use "nd" ops which are fast on multidimensional cases field_column = "%s %s" % (field_column, self.geom_index_ops_nd) return self.sql_create_index % { "name": self.quote_name('%s_%s_id' % (model._meta.db_table, field.column)), "table": self.quote_name(model._meta.db_table), "using": "USING %s" % self.geom_index_type, "columns": field_column, "extra": '', "condition": '', } def _alter_column_type_sql(self, table, old_field, new_field, new_type): """ Special case when dimension changed. """ if not hasattr(old_field, 'dim') or not hasattr(new_field, 'dim'): return super()._alter_column_type_sql(table, old_field, new_field, new_type) if old_field.dim == 2 and new_field.dim == 3: sql_alter = self.sql_alter_column_to_3d elif old_field.dim == 3 and new_field.dim == 2: sql_alter = self.sql_alter_column_to_2d else: sql_alter = self.sql_alter_column_type return ( ( sql_alter % { "column": self.quote_name(new_field.column), "type": new_type, }, [], ), [], )
b34ed28579cba030e1b6fce0bb555e47397193fa3af917057e5cec6e32990859
import argparse from django.contrib.gis import gdal from django.core.management.base import BaseCommand, CommandError from django.utils.inspect import get_func_args class LayerOptionAction(argparse.Action): """ Custom argparse action for the `ogrinspect` `layer_key` keyword option which may be an integer or a string. """ def __call__(self, parser, namespace, value, option_string=None): try: setattr(namespace, self.dest, int(value)) except ValueError: setattr(namespace, self.dest, value) class ListOptionAction(argparse.Action): """ Custom argparse action for `ogrinspect` keywords that require a string list. If the string is 'True'/'true' then the option value will be a boolean instead. """ def __call__(self, parser, namespace, value, option_string=None): if value.lower() == 'true': setattr(namespace, self.dest, True) else: setattr(namespace, self.dest, value.split(',')) class Command(BaseCommand): help = ( 'Inspects the given OGR-compatible data source (e.g., a shapefile) and outputs\n' 'a GeoDjango model with the given model name. For example:\n' ' ./manage.py ogrinspect zipcode.shp Zipcode' ) requires_system_checks = False def add_arguments(self, parser): parser.add_argument('data_source', help='Path to the data source.') parser.add_argument('model_name', help='Name of the model to create.') parser.add_argument( '--blank', action=ListOptionAction, default=False, help='Use a comma separated list of OGR field names to add ' 'the `blank=True` option to the field definition. Set to `true` ' 'to apply to all applicable fields.', ) parser.add_argument( '--decimal', action=ListOptionAction, default=False, help='Use a comma separated list of OGR float fields to ' 'generate `DecimalField` instead of the default ' '`FloatField`. Set to `true` to apply to all OGR float fields.', ) parser.add_argument( '--geom-name', default='geom', help='Specifies the model name for the Geometry Field (defaults to `geom`)' ) parser.add_argument( '--layer', dest='layer_key', action=LayerOptionAction, default=0, help='The key for specifying which layer in the OGR data ' 'source to use. Defaults to 0 (the first layer). May be ' 'an integer or a string identifier for the layer.', ) parser.add_argument( '--multi-geom', action='store_true', help='Treat the geometry in the data source as a geometry collection.', ) parser.add_argument( '--name-field', help='Specifies a field name to return for the __str__() method.', ) parser.add_argument( '--no-imports', action='store_false', dest='imports', help='Do not include `from django.contrib.gis.db import models` statement.', ) parser.add_argument( '--null', action=ListOptionAction, default=False, help='Use a comma separated list of OGR field names to add ' 'the `null=True` option to the field definition. Set to `true` ' 'to apply to all applicable fields.', ) parser.add_argument( '--srid', help='The SRID to use for the Geometry Field. If it can be ' 'determined, the SRID of the data source is used.', ) parser.add_argument( '--mapping', action='store_true', help='Generate mapping dictionary for use with `LayerMapping`.', ) def handle(self, *args, **options): data_source, model_name = options.pop('data_source'), options.pop('model_name') # Getting the OGR DataSource from the string parameter. try: ds = gdal.DataSource(data_source) except gdal.GDALException as msg: raise CommandError(msg) # Returning the output of ogrinspect with the given arguments # and options. from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping # Filter options to params accepted by `_ogrinspect` ogr_options = {k: v for k, v in options.items() if k in get_func_args(_ogrinspect) and v is not None} output = [s for s in _ogrinspect(ds, model_name, **ogr_options)] if options['mapping']: # Constructing the keyword arguments for `mapping`, and # calling it on the data source. kwargs = { 'geom_name': options['geom_name'], 'layer_key': options['layer_key'], 'multi_geom': options['multi_geom'], } mapping_dict = mapping(ds, **kwargs) # This extra legwork is so that the dictionary definition comes # out in the same order as the fields in the model definition. rev_mapping = {v: k for k, v in mapping_dict.items()} output.extend(['', '', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name, '%s_mapping = {' % model_name.lower()]) output.extend(" '%s': '%s'," % ( rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields ) output.extend([" '%s': '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}']) return '\n'.join(output) + '\n'
b69c9916872379e44dea1135c977e9d4200dcf269062a42923c44b84ab46cc10
from django.core.management.commands.inspectdb import ( Command as InspectDBCommand, ) class Command(InspectDBCommand): db_module = 'django.contrib.gis.db' def get_field_type(self, connection, table_name, row): field_type, field_params, field_notes = super().get_field_type(connection, table_name, row) if field_type == 'GeometryField': # Getting a more specific field type and any additional parameters # from the `get_geometry_type` routine for the spatial backend. field_type, geo_params = connection.introspection.get_geometry_type(table_name, row) field_params.update(geo_params) return field_type, field_params, field_notes
d12e27d99d726e0c0c8dc79956318e52493d775bf008b91e381a6c4293394090
import json import os import sys import uuid from ctypes import ( addressof, byref, c_buffer, c_char_p, c_double, c_int, c_void_p, string_at, ) from django.contrib.gis.gdal.driver import Driver from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import raster as capi from django.contrib.gis.gdal.raster.band import BandList from django.contrib.gis.gdal.raster.base import GDALRasterBase from django.contrib.gis.gdal.raster.const import ( GDAL_RESAMPLE_ALGORITHMS, VSI_DELETE_BUFFER_ON_READ, VSI_FILESYSTEM_BASE_PATH, VSI_TAKE_BUFFER_OWNERSHIP, ) from django.contrib.gis.gdal.srs import SpatialReference, SRSException from django.contrib.gis.geometry import json_regex from django.utils.encoding import force_bytes, force_str from django.utils.functional import cached_property class TransformPoint(list): indices = { 'origin': (0, 3), 'scale': (1, 5), 'skew': (2, 4), } def __init__(self, raster, prop): x = raster.geotransform[self.indices[prop][0]] y = raster.geotransform[self.indices[prop][1]] super().__init__([x, y]) self._raster = raster self._prop = prop @property def x(self): return self[0] @x.setter def x(self, value): gtf = self._raster.geotransform gtf[self.indices[self._prop][0]] = value self._raster.geotransform = gtf @property def y(self): return self[1] @y.setter def y(self, value): gtf = self._raster.geotransform gtf[self.indices[self._prop][1]] = value self._raster.geotransform = gtf class GDALRaster(GDALRasterBase): """ Wrap a raster GDAL Data Source object. """ destructor = capi.close_ds def __init__(self, ds_input, write=False): self._write = 1 if write else 0 Driver.ensure_registered() # Preprocess json inputs. This converts json strings to dictionaries, # which are parsed below the same way as direct dictionary inputs. if isinstance(ds_input, str) and json_regex.match(ds_input): ds_input = json.loads(ds_input) # If input is a valid file path, try setting file as source. if isinstance(ds_input, str): try: # GDALOpen will auto-detect the data source type. self._ptr = capi.open_ds(force_bytes(ds_input), self._write) except GDALException as err: raise GDALException('Could not open the datasource at "{}" ({}).'.format(ds_input, err)) elif isinstance(ds_input, bytes): # Create a new raster in write mode. self._write = 1 # Get size of buffer. size = sys.getsizeof(ds_input) # Pass data to ctypes, keeping a reference to the ctypes object so # that the vsimem file remains available until the GDALRaster is # deleted. self._ds_input = c_buffer(ds_input) # Create random name to reference in vsimem filesystem. vsi_path = os.path.join(VSI_FILESYSTEM_BASE_PATH, str(uuid.uuid4())) # Create vsimem file from buffer. capi.create_vsi_file_from_mem_buffer( force_bytes(vsi_path), byref(self._ds_input), size, VSI_TAKE_BUFFER_OWNERSHIP, ) # Open the new vsimem file as a GDALRaster. try: self._ptr = capi.open_ds(force_bytes(vsi_path), self._write) except GDALException: # Remove the broken file from the VSI filesystem. capi.unlink_vsi_file(force_bytes(vsi_path)) raise GDALException('Failed creating VSI raster from the input buffer.') elif isinstance(ds_input, dict): # A new raster needs to be created in write mode self._write = 1 # Create driver (in memory by default) driver = Driver(ds_input.get('driver', 'MEM')) # For out of memory drivers, check filename argument if driver.name != 'MEM' and 'name' not in ds_input: raise GDALException('Specify name for creation of raster with driver "{}".'.format(driver.name)) # Check if width and height where specified if 'width' not in ds_input or 'height' not in ds_input: raise GDALException('Specify width and height attributes for JSON or dict input.') # Check if srid was specified if 'srid' not in ds_input: raise GDALException('Specify srid for JSON or dict input.') # Create null terminated gdal options array. papsz_options = [] for key, val in ds_input.get('papsz_options', {}).items(): option = '{}={}'.format(key, val) papsz_options.append(option.upper().encode()) papsz_options.append(None) # Convert papszlist to ctypes array. papsz_options = (c_char_p * len(papsz_options))(*papsz_options) # Create GDAL Raster self._ptr = capi.create_ds( driver._ptr, force_bytes(ds_input.get('name', '')), ds_input['width'], ds_input['height'], ds_input.get('nr_of_bands', len(ds_input.get('bands', []))), ds_input.get('datatype', 6), byref(papsz_options), ) # Set band data if provided for i, band_input in enumerate(ds_input.get('bands', [])): band = self.bands[i] if 'nodata_value' in band_input: band.nodata_value = band_input['nodata_value'] # Instantiate band filled with nodata values if only # partial input data has been provided. if band.nodata_value is not None and ( 'data' not in band_input or 'size' in band_input or 'shape' in band_input): band.data(data=(band.nodata_value,), shape=(1, 1)) # Set band data values from input. band.data( data=band_input.get('data'), size=band_input.get('size'), shape=band_input.get('shape'), offset=band_input.get('offset'), ) # Set SRID self.srs = ds_input.get('srid') # Set additional properties if provided if 'origin' in ds_input: self.origin.x, self.origin.y = ds_input['origin'] if 'scale' in ds_input: self.scale.x, self.scale.y = ds_input['scale'] if 'skew' in ds_input: self.skew.x, self.skew.y = ds_input['skew'] elif isinstance(ds_input, c_void_p): # Instantiate the object using an existing pointer to a gdal raster. self._ptr = ds_input else: raise GDALException('Invalid data source input type: "{}".'.format(type(ds_input))) def __del__(self): if self.is_vsi_based: # Remove the temporary file from the VSI in-memory filesystem. capi.unlink_vsi_file(force_bytes(self.name)) super().__del__() def __str__(self): return self.name def __repr__(self): """ Short-hand representation because WKB may be very large. """ return '<Raster object at %s>' % hex(addressof(self._ptr)) def _flush(self): """ Flush all data from memory into the source file if it exists. The data that needs flushing are geotransforms, coordinate systems, nodata_values and pixel values. This function will be called automatically wherever it is needed. """ # Raise an Exception if the value is being changed in read mode. if not self._write: raise GDALException('Raster needs to be opened in write mode to change values.') capi.flush_ds(self._ptr) @property def vsi_buffer(self): if not self.is_vsi_based: return None # Prepare an integer that will contain the buffer length. out_length = c_int() # Get the data using the vsi file name. dat = capi.get_mem_buffer_from_vsi_file( force_bytes(self.name), byref(out_length), VSI_DELETE_BUFFER_ON_READ, ) # Read the full buffer pointer. return string_at(dat, out_length.value) @cached_property def is_vsi_based(self): return self.name.startswith(VSI_FILESYSTEM_BASE_PATH) @property def name(self): """ Return the name of this raster. Corresponds to filename for file-based rasters. """ return force_str(capi.get_ds_description(self._ptr)) @cached_property def driver(self): """ Return the GDAL Driver used for this raster. """ ds_driver = capi.get_ds_driver(self._ptr) return Driver(ds_driver) @property def width(self): """ Width (X axis) in pixels. """ return capi.get_ds_xsize(self._ptr) @property def height(self): """ Height (Y axis) in pixels. """ return capi.get_ds_ysize(self._ptr) @property def srs(self): """ Return the SpatialReference used in this GDALRaster. """ try: wkt = capi.get_ds_projection_ref(self._ptr) if not wkt: return None return SpatialReference(wkt, srs_type='wkt') except SRSException: return None @srs.setter def srs(self, value): """ Set the spatial reference used in this GDALRaster. The input can be a SpatialReference or any parameter accepted by the SpatialReference constructor. """ if isinstance(value, SpatialReference): srs = value elif isinstance(value, (int, str)): srs = SpatialReference(value) else: raise ValueError('Could not create a SpatialReference from input.') capi.set_ds_projection_ref(self._ptr, srs.wkt.encode()) self._flush() @property def srid(self): """ Shortcut to access the srid of this GDALRaster. """ return self.srs.srid @srid.setter def srid(self, value): """ Shortcut to set this GDALRaster's srs from an srid. """ self.srs = value @property def geotransform(self): """ Return the geotransform of the data source. Return the default geotransform if it does not exist or has not been set previously. The default is [0.0, 1.0, 0.0, 0.0, 0.0, -1.0]. """ # Create empty ctypes double array for data gtf = (c_double * 6)() capi.get_ds_geotransform(self._ptr, byref(gtf)) return list(gtf) @geotransform.setter def geotransform(self, values): "Set the geotransform for the data source." if len(values) != 6 or not all(isinstance(x, (int, float)) for x in values): raise ValueError('Geotransform must consist of 6 numeric values.') # Create ctypes double array with input and write data values = (c_double * 6)(*values) capi.set_ds_geotransform(self._ptr, byref(values)) self._flush() @property def origin(self): """ Coordinates of the raster origin. """ return TransformPoint(self, 'origin') @property def scale(self): """ Pixel scale in units of the raster projection. """ return TransformPoint(self, 'scale') @property def skew(self): """ Skew of pixels (rotation parameters). """ return TransformPoint(self, 'skew') @property def extent(self): """ Return the extent as a 4-tuple (xmin, ymin, xmax, ymax). """ # Calculate boundary values based on scale and size xval = self.origin.x + self.scale.x * self.width yval = self.origin.y + self.scale.y * self.height # Calculate min and max values xmin = min(xval, self.origin.x) xmax = max(xval, self.origin.x) ymin = min(yval, self.origin.y) ymax = max(yval, self.origin.y) return xmin, ymin, xmax, ymax @property def bands(self): return BandList(self) def warp(self, ds_input, resampling='NearestNeighbour', max_error=0.0): """ Return a warped GDALRaster with the given input characteristics. The input is expected to be a dictionary containing the parameters of the target raster. Allowed values are width, height, SRID, origin, scale, skew, datatype, driver, and name (filename). By default, the warp functions keeps all parameters equal to the values of the original source raster. For the name of the target raster, the name of the source raster will be used and appended with _copy. + source_driver_name. In addition, the resampling algorithm can be specified with the "resampling" input parameter. The default is NearestNeighbor. For a list of all options consult the GDAL_RESAMPLE_ALGORITHMS constant. """ # Get the parameters defining the geotransform, srid, and size of the raster ds_input.setdefault('width', self.width) ds_input.setdefault('height', self.height) ds_input.setdefault('srid', self.srs.srid) ds_input.setdefault('origin', self.origin) ds_input.setdefault('scale', self.scale) ds_input.setdefault('skew', self.skew) # Get the driver, name, and datatype of the target raster ds_input.setdefault('driver', self.driver.name) if 'name' not in ds_input: ds_input['name'] = self.name + '_copy.' + self.driver.name if 'datatype' not in ds_input: ds_input['datatype'] = self.bands[0].datatype() # Instantiate raster bands filled with nodata values. ds_input['bands'] = [{'nodata_value': bnd.nodata_value} for bnd in self.bands] # Create target raster target = GDALRaster(ds_input, write=True) # Select resampling algorithm algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling] # Reproject image capi.reproject_image( self._ptr, self.srs.wkt.encode(), target._ptr, target.srs.wkt.encode(), algorithm, 0.0, max_error, c_void_p(), c_void_p(), c_void_p() ) # Make sure all data is written to file target._flush() return target def transform(self, srid, driver=None, name=None, resampling='NearestNeighbour', max_error=0.0): """ Return a copy of this raster reprojected into the given SRID. """ # Convert the resampling algorithm name into an algorithm id algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling] # Instantiate target spatial reference system target_srs = SpatialReference(srid) # Create warped virtual dataset in the target reference system target = capi.auto_create_warped_vrt( self._ptr, self.srs.wkt.encode(), target_srs.wkt.encode(), algorithm, max_error, c_void_p() ) target = GDALRaster(target) # Construct the target warp dictionary from the virtual raster data = { 'srid': srid, 'width': target.width, 'height': target.height, 'origin': [target.origin.x, target.origin.y], 'scale': [target.scale.x, target.scale.y], 'skew': [target.skew.x, target.skew.y], } # Set the driver and filepath if provided if driver: data['driver'] = driver if name: data['name'] = name # Warp the raster into new srid return self.warp(data, resampling=resampling, max_error=max_error) @property def info(self): """ Return information about this raster in a string format equivalent to the output of the gdalinfo command line utility. """ if not capi.get_ds_info: raise ValueError('GDAL ≥ 2.1 is required for using the info property.') return capi.get_ds_info(self.ptr, None).decode()
b8f93cf39f5849110cb6242d5c69153a12129a0b05ea05cfec951fb9f4035c82
""" GDAL - Constant definitions """ from ctypes import ( c_double, c_float, c_int16, c_int32, c_ubyte, c_uint16, c_uint32, ) # See https://www.gdal.org/gdal_8h.html#a22e22ce0a55036a96f652765793fb7a4 GDAL_PIXEL_TYPES = { 0: 'GDT_Unknown', # Unknown or unspecified type 1: 'GDT_Byte', # Eight bit unsigned integer 2: 'GDT_UInt16', # Sixteen bit unsigned integer 3: 'GDT_Int16', # Sixteen bit signed integer 4: 'GDT_UInt32', # Thirty-two bit unsigned integer 5: 'GDT_Int32', # Thirty-two bit signed integer 6: 'GDT_Float32', # Thirty-two bit floating point 7: 'GDT_Float64', # Sixty-four bit floating point 8: 'GDT_CInt16', # Complex Int16 9: 'GDT_CInt32', # Complex Int32 10: 'GDT_CFloat32', # Complex Float32 11: 'GDT_CFloat64', # Complex Float64 } # A list of gdal datatypes that are integers. GDAL_INTEGER_TYPES = [1, 2, 3, 4, 5] # Lookup values to convert GDAL pixel type indices into ctypes objects. # The GDAL band-io works with ctypes arrays to hold data to be written # or to hold the space for data to be read into. The lookup below helps # selecting the right ctypes object for a given gdal pixel type. GDAL_TO_CTYPES = [ None, c_ubyte, c_uint16, c_int16, c_uint32, c_int32, c_float, c_double, None, None, None, None ] # List of resampling algorithms that can be used to warp a GDALRaster. GDAL_RESAMPLE_ALGORITHMS = { 'NearestNeighbour': 0, 'Bilinear': 1, 'Cubic': 2, 'CubicSpline': 3, 'Lanczos': 4, 'Average': 5, 'Mode': 6, } # See https://www.gdal.org/gdal_8h.html#ace76452d94514561fffa8ea1d2a5968c GDAL_COLOR_TYPES = { 0: 'GCI_Undefined', # Undefined, default value, i.e. not known 1: 'GCI_GrayIndex', # Greyscale 2: 'GCI_PaletteIndex', # Paletted 3: 'GCI_RedBand', # Red band of RGBA image 4: 'GCI_GreenBand', # Green band of RGBA image 5: 'GCI_BlueBand', # Blue band of RGBA image 6: 'GCI_AlphaBand', # Alpha (0=transparent, 255=opaque) 7: 'GCI_HueBand', # Hue band of HLS image 8: 'GCI_SaturationBand', # Saturation band of HLS image 9: 'GCI_LightnessBand', # Lightness band of HLS image 10: 'GCI_CyanBand', # Cyan band of CMYK image 11: 'GCI_MagentaBand', # Magenta band of CMYK image 12: 'GCI_YellowBand', # Yellow band of CMYK image 13: 'GCI_BlackBand', # Black band of CMLY image 14: 'GCI_YCbCr_YBand', # Y Luminance 15: 'GCI_YCbCr_CbBand', # Cb Chroma 16: 'GCI_YCbCr_CrBand', # Cr Chroma, also GCI_Max } # Fixed base path for buffer-based GDAL in-memory files. VSI_FILESYSTEM_BASE_PATH = '/vsimem/' # Should the memory file system take ownership of the buffer, freeing it when # the file is deleted? (No, GDALRaster.__del__() will delete the buffer.) VSI_TAKE_BUFFER_OWNERSHIP = False # Should a VSI file be removed when retrieving its buffer? VSI_DELETE_BUFFER_ON_READ = False
7518a4190ebe70a0a03a3ddb8d14a720a775f7a14644633611ef41b4f406564d
from ctypes import byref, c_double, c_int, c_void_p from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import raster as capi from django.contrib.gis.gdal.raster.base import GDALRasterBase from django.contrib.gis.shortcuts import numpy from django.utils.encoding import force_str from .const import ( GDAL_COLOR_TYPES, GDAL_INTEGER_TYPES, GDAL_PIXEL_TYPES, GDAL_TO_CTYPES, ) class GDALBand(GDALRasterBase): """ Wrap a GDAL raster band, needs to be obtained from a GDALRaster object. """ def __init__(self, source, index): self.source = source self._ptr = capi.get_ds_raster_band(source._ptr, index) def _flush(self): """ Call the flush method on the Band's parent raster and force a refresh of the statistics attribute when requested the next time. """ self.source._flush() self._stats_refresh = True @property def description(self): """ Return the description string of the band. """ return force_str(capi.get_band_description(self._ptr)) @property def width(self): """ Width (X axis) in pixels of the band. """ return capi.get_band_xsize(self._ptr) @property def height(self): """ Height (Y axis) in pixels of the band. """ return capi.get_band_ysize(self._ptr) @property def pixel_count(self): """ Return the total number of pixels in this band. """ return self.width * self.height _stats_refresh = False def statistics(self, refresh=False, approximate=False): """ Compute statistics on the pixel values of this band. The return value is a tuple with the following structure: (minimum, maximum, mean, standard deviation). If approximate=True, the statistics may be computed based on overviews or a subset of image tiles. If refresh=True, the statistics will be computed from the data directly, and the cache will be updated where applicable. For empty bands (where all pixel values are nodata), all statistics values are returned as None. For raster formats using Persistent Auxiliary Metadata (PAM) services, the statistics might be cached in an auxiliary file. """ # Prepare array with arguments for capi function smin, smax, smean, sstd = c_double(), c_double(), c_double(), c_double() stats_args = [ self._ptr, c_int(approximate), byref(smin), byref(smax), byref(smean), byref(sstd), c_void_p(), c_void_p(), ] if refresh or self._stats_refresh: func = capi.compute_band_statistics else: # Add additional argument to force computation if there is no # existing PAM file to take the values from. force = True stats_args.insert(2, c_int(force)) func = capi.get_band_statistics # Computation of statistics fails for empty bands. try: func(*stats_args) result = smin.value, smax.value, smean.value, sstd.value except GDALException: result = (None, None, None, None) self._stats_refresh = False return result @property def min(self): """ Return the minimum pixel value for this band. """ return self.statistics()[0] @property def max(self): """ Return the maximum pixel value for this band. """ return self.statistics()[1] @property def mean(self): """ Return the mean of all pixel values of this band. """ return self.statistics()[2] @property def std(self): """ Return the standard deviation of all pixel values of this band. """ return self.statistics()[3] @property def nodata_value(self): """ Return the nodata value for this band, or None if it isn't set. """ # Get value and nodata exists flag nodata_exists = c_int() value = capi.get_band_nodata_value(self._ptr, nodata_exists) if not nodata_exists: value = None # If the pixeltype is an integer, convert to int elif self.datatype() in GDAL_INTEGER_TYPES: value = int(value) return value @nodata_value.setter def nodata_value(self, value): """ Set the nodata value for this band. """ if value is None: if not capi.delete_band_nodata_value: raise ValueError('GDAL >= 2.1 required to delete nodata values.') capi.delete_band_nodata_value(self._ptr) elif not isinstance(value, (int, float)): raise ValueError('Nodata value must be numeric or None.') else: capi.set_band_nodata_value(self._ptr, value) self._flush() def datatype(self, as_string=False): """ Return the GDAL Pixel Datatype for this band. """ dtype = capi.get_band_datatype(self._ptr) if as_string: dtype = GDAL_PIXEL_TYPES[dtype] return dtype def color_interp(self, as_string=False): """Return the GDAL color interpretation for this band.""" color = capi.get_band_color_interp(self._ptr) if as_string: color = GDAL_COLOR_TYPES[color] return color def data(self, data=None, offset=None, size=None, shape=None, as_memoryview=False): """ Read or writes pixel values for this band. Blocks of data can be accessed by specifying the width, height and offset of the desired block. The same specification can be used to update parts of a raster by providing an array of values. Allowed input data types are bytes, memoryview, list, tuple, and array. """ offset = offset or (0, 0) size = size or (self.width - offset[0], self.height - offset[1]) shape = shape or size if any(x <= 0 for x in size): raise ValueError('Offset too big for this raster.') if size[0] > self.width or size[1] > self.height: raise ValueError('Size is larger than raster.') # Create ctypes type array generator ctypes_array = GDAL_TO_CTYPES[self.datatype()] * (shape[0] * shape[1]) if data is None: # Set read mode access_flag = 0 # Prepare empty ctypes array data_array = ctypes_array() else: # Set write mode access_flag = 1 # Instantiate ctypes array holding the input data if isinstance(data, (bytes, memoryview)) or (numpy and isinstance(data, numpy.ndarray)): data_array = ctypes_array.from_buffer_copy(data) else: data_array = ctypes_array(*data) # Access band capi.band_io(self._ptr, access_flag, offset[0], offset[1], size[0], size[1], byref(data_array), shape[0], shape[1], self.datatype(), 0, 0) # Return data as numpy array if possible, otherwise as list if data is None: if as_memoryview: return memoryview(data_array) elif numpy: # reshape() needs a reshape parameter with the height first. return numpy.frombuffer( data_array, dtype=numpy.dtype(data_array) ).reshape(tuple(reversed(size))) else: return list(data_array) else: self._flush() class BandList(list): def __init__(self, source): self.source = source super().__init__() def __iter__(self): for idx in range(1, len(self) + 1): yield GDALBand(self.source, idx) def __len__(self): return capi.get_ds_raster_count(self.source._ptr) def __getitem__(self, index): try: return GDALBand(self.source, index + 1) except GDALException: raise GDALException('Unable to get band index %d' % index)
f5474f4aa591dbc02c506ec70dd1cc446da7235fa22abd6bbb557df7cba27cff
""" This module contains functions that generate ctypes prototypes for the GDAL routines. """ from ctypes import ( POINTER, c_bool, c_char_p, c_double, c_int, c_int64, c_void_p, ) from functools import partial from django.contrib.gis.gdal.prototypes.errcheck import ( check_arg_errcode, check_const_string, check_errcode, check_geom, check_geom_offset, check_pointer, check_srs, check_str_arg, check_string, ) class gdal_char_p(c_char_p): pass def bool_output(func, argtypes, errcheck=None): """Generate a ctypes function that returns a boolean value.""" func.argtypes = argtypes func.restype = c_bool if errcheck: func.errcheck = errcheck return func def double_output(func, argtypes, errcheck=False, strarg=False, cpl=False): "Generate a ctypes function that returns a double value." func.argtypes = argtypes func.restype = c_double if errcheck: func.errcheck = partial(check_arg_errcode, cpl=cpl) if strarg: func.errcheck = check_str_arg return func def geom_output(func, argtypes, offset=None): """ Generate a function that returns a Geometry either by reference or directly (if the return_geom keyword is set to True). """ # Setting the argument types func.argtypes = argtypes if not offset: # When a geometry pointer is directly returned. func.restype = c_void_p func.errcheck = check_geom else: # Error code returned, geometry is returned by-reference. func.restype = c_int def geomerrcheck(result, func, cargs): return check_geom_offset(result, func, cargs, offset) func.errcheck = geomerrcheck return func def int_output(func, argtypes, errcheck=None): "Generate a ctypes function that returns an integer value." func.argtypes = argtypes func.restype = c_int if errcheck: func.errcheck = errcheck return func def int64_output(func, argtypes): "Generate a ctypes function that returns a 64-bit integer value." func.argtypes = argtypes func.restype = c_int64 return func def srs_output(func, argtypes): """ Generate a ctypes prototype for the given function with the given C arguments that returns a pointer to an OGR Spatial Reference System. """ func.argtypes = argtypes func.restype = c_void_p func.errcheck = check_srs return func def const_string_output(func, argtypes, offset=None, decoding=None, cpl=False): func.argtypes = argtypes if offset: func.restype = c_int else: func.restype = c_char_p def _check_const(result, func, cargs): res = check_const_string(result, func, cargs, offset=offset, cpl=cpl) if res and decoding: res = res.decode(decoding) return res func.errcheck = _check_const return func def string_output(func, argtypes, offset=-1, str_result=False, decoding=None): """ Generate a ctypes prototype for the given function with the given argument types that returns a string from a GDAL pointer. The `const` flag indicates whether the allocated pointer should be freed via the GDAL library routine VSIFree -- but only applies only when `str_result` is True. """ func.argtypes = argtypes if str_result: # Use subclass of c_char_p so the error checking routine # can free the memory at the pointer's address. func.restype = gdal_char_p else: # Error code is returned func.restype = c_int # Dynamically defining our error-checking function with the # given offset. def _check_str(result, func, cargs): res = check_string(result, func, cargs, offset=offset, str_result=str_result) if res and decoding: res = res.decode(decoding) return res func.errcheck = _check_str return func def void_output(func, argtypes, errcheck=True, cpl=False): """ For functions that don't only return an error code that needs to be examined. """ if argtypes: func.argtypes = argtypes if errcheck: # `errcheck` keyword may be set to False for routines that # return void, rather than a status code. func.restype = c_int func.errcheck = partial(check_errcode, cpl=cpl) else: func.restype = None return func def voidptr_output(func, argtypes, errcheck=True): "For functions that return c_void_p." func.argtypes = argtypes func.restype = c_void_p if errcheck: func.errcheck = check_pointer return func def chararray_output(func, argtypes, errcheck=True): """For functions that return a c_char_p array.""" func.argtypes = argtypes func.restype = POINTER(c_char_p) if errcheck: func.errcheck = check_pointer return func
7248f2a9c66dad565cb6bc3e1ef41bd62b036af85401b94ba8aba7a3d538ba9c
""" This module houses the error-checking routines used by the GDAL ctypes prototypes. """ from ctypes import c_void_p, string_at from django.contrib.gis.gdal.error import ( GDALException, SRSException, check_err, ) from django.contrib.gis.gdal.libgdal import lgdal # Helper routines for retrieving pointers and/or values from # arguments passed in by reference. def arg_byref(args, offset=-1): "Return the pointer argument's by-reference value." return args[offset]._obj.value def ptr_byref(args, offset=-1): "Return the pointer argument passed in by-reference." return args[offset]._obj # ### String checking Routines ### def check_const_string(result, func, cargs, offset=None, cpl=False): """ Similar functionality to `check_string`, but does not free the pointer. """ if offset: check_err(result, cpl=cpl) ptr = ptr_byref(cargs, offset) return ptr.value else: return result def check_string(result, func, cargs, offset=-1, str_result=False): """ Check the string output returned from the given function, and free the string pointer allocated by OGR. The `str_result` keyword may be used when the result is the string pointer, otherwise the OGR error code is assumed. The `offset` keyword may be used to extract the string pointer passed in by-reference at the given slice offset in the function arguments. """ if str_result: # For routines that return a string. ptr = result if not ptr: s = None else: s = string_at(result) else: # Error-code return specified. check_err(result) ptr = ptr_byref(cargs, offset) # Getting the string value s = ptr.value # Correctly freeing the allocated memory behind GDAL pointer # with the VSIFree routine. if ptr: lgdal.VSIFree(ptr) return s # ### DataSource, Layer error-checking ### # ### Envelope checking ### def check_envelope(result, func, cargs, offset=-1): "Check a function that returns an OGR Envelope by reference." return ptr_byref(cargs, offset) # ### Geometry error-checking routines ### def check_geom(result, func, cargs): "Check a function that returns a geometry." # OGR_G_Clone may return an integer, even though the # restype is set to c_void_p if isinstance(result, int): result = c_void_p(result) if not result: raise GDALException('Invalid geometry pointer returned from "%s".' % func.__name__) return result def check_geom_offset(result, func, cargs, offset=-1): "Check the geometry at the given offset in the C parameter list." check_err(result) geom = ptr_byref(cargs, offset=offset) return check_geom(geom, func, cargs) # ### Spatial Reference error-checking routines ### def check_srs(result, func, cargs): if isinstance(result, int): result = c_void_p(result) if not result: raise SRSException('Invalid spatial reference pointer returned from "%s".' % func.__name__) return result # ### Other error-checking routines ### def check_arg_errcode(result, func, cargs, cpl=False): """ The error code is returned in the last argument, by reference. Check its value with `check_err` before returning the result. """ check_err(arg_byref(cargs), cpl=cpl) return result def check_errcode(result, func, cargs, cpl=False): """ Check the error code returned (c_int). """ check_err(result, cpl=cpl) def check_pointer(result, func, cargs): "Make sure the result pointer is valid." if isinstance(result, int): result = c_void_p(result) if result: return result else: raise GDALException('Invalid pointer returned from "%s"' % func.__name__) def check_str_arg(result, func, cargs): """ This is for the OSRGet[Angular|Linear]Units functions, which require that the returned string pointer not be freed. This returns both the double and string values. """ dbl = result ptr = cargs[-1]._obj return dbl, ptr.value.decode()
ccf21cf956a1b5abb55d04c00eac5036dcdc02fe8bba76e154791690c38458aa
""" This module houses the ctypes function prototypes for GDAL DataSource (raster) related data structures. """ from ctypes import POINTER, c_bool, c_char_p, c_double, c_int, c_void_p from functools import partial from django.contrib.gis.gdal.libgdal import GDAL_VERSION, std_call from django.contrib.gis.gdal.prototypes.generation import ( chararray_output, const_string_output, double_output, int_output, void_output, voidptr_output, ) # For more detail about c function names and definitions see # https://gdal.org/gdal_8h.html # https://gdal.org/gdalwarper_8h.html # https://www.gdal.org/gdal__utils_8h.html # Prepare partial functions that use cpl error codes void_output = partial(void_output, cpl=True) const_string_output = partial(const_string_output, cpl=True) double_output = partial(double_output, cpl=True) # Raster Driver Routines register_all = void_output(std_call('GDALAllRegister'), [], errcheck=False) get_driver = voidptr_output(std_call('GDALGetDriver'), [c_int]) get_driver_by_name = voidptr_output(std_call('GDALGetDriverByName'), [c_char_p], errcheck=False) get_driver_count = int_output(std_call('GDALGetDriverCount'), []) get_driver_description = const_string_output(std_call('GDALGetDescription'), [c_void_p]) # Raster Data Source Routines create_ds = voidptr_output(std_call('GDALCreate'), [c_void_p, c_char_p, c_int, c_int, c_int, c_int, c_void_p]) open_ds = voidptr_output(std_call('GDALOpen'), [c_char_p, c_int]) close_ds = void_output(std_call('GDALClose'), [c_void_p], errcheck=False) flush_ds = int_output(std_call('GDALFlushCache'), [c_void_p]) copy_ds = voidptr_output( std_call('GDALCreateCopy'), [c_void_p, c_char_p, c_void_p, c_int, POINTER(c_char_p), c_void_p, c_void_p] ) add_band_ds = void_output(std_call('GDALAddBand'), [c_void_p, c_int]) get_ds_description = const_string_output(std_call('GDALGetDescription'), [c_void_p]) get_ds_driver = voidptr_output(std_call('GDALGetDatasetDriver'), [c_void_p]) get_ds_xsize = int_output(std_call('GDALGetRasterXSize'), [c_void_p]) get_ds_ysize = int_output(std_call('GDALGetRasterYSize'), [c_void_p]) get_ds_raster_count = int_output(std_call('GDALGetRasterCount'), [c_void_p]) get_ds_raster_band = voidptr_output(std_call('GDALGetRasterBand'), [c_void_p, c_int]) get_ds_projection_ref = const_string_output(std_call('GDALGetProjectionRef'), [c_void_p]) set_ds_projection_ref = void_output(std_call('GDALSetProjection'), [c_void_p, c_char_p]) get_ds_geotransform = void_output(std_call('GDALGetGeoTransform'), [c_void_p, POINTER(c_double * 6)], errcheck=False) set_ds_geotransform = void_output(std_call('GDALSetGeoTransform'), [c_void_p, POINTER(c_double * 6)]) get_ds_metadata = chararray_output(std_call('GDALGetMetadata'), [c_void_p, c_char_p], errcheck=False) set_ds_metadata = void_output(std_call('GDALSetMetadata'), [c_void_p, POINTER(c_char_p), c_char_p]) get_ds_metadata_domain_list = chararray_output(std_call('GDALGetMetadataDomainList'), [c_void_p], errcheck=False) get_ds_metadata_item = const_string_output(std_call('GDALGetMetadataItem'), [c_void_p, c_char_p, c_char_p]) set_ds_metadata_item = const_string_output(std_call('GDALSetMetadataItem'), [c_void_p, c_char_p, c_char_p, c_char_p]) free_dsl = void_output(std_call('CSLDestroy'), [POINTER(c_char_p)], errcheck=False) if GDAL_VERSION >= (2, 1): get_ds_info = const_string_output(std_call('GDALInfo'), [c_void_p, c_void_p]) else: get_ds_info = None # Raster Band Routines band_io = void_output( std_call('GDALRasterIO'), [c_void_p, c_int, c_int, c_int, c_int, c_int, c_void_p, c_int, c_int, c_int, c_int, c_int] ) get_band_xsize = int_output(std_call('GDALGetRasterBandXSize'), [c_void_p]) get_band_ysize = int_output(std_call('GDALGetRasterBandYSize'), [c_void_p]) get_band_index = int_output(std_call('GDALGetBandNumber'), [c_void_p]) get_band_description = const_string_output(std_call('GDALGetDescription'), [c_void_p]) get_band_ds = voidptr_output(std_call('GDALGetBandDataset'), [c_void_p]) get_band_datatype = int_output(std_call('GDALGetRasterDataType'), [c_void_p]) get_band_color_interp = int_output(std_call('GDALGetRasterColorInterpretation'), [c_void_p]) get_band_nodata_value = double_output(std_call('GDALGetRasterNoDataValue'), [c_void_p, POINTER(c_int)]) set_band_nodata_value = void_output(std_call('GDALSetRasterNoDataValue'), [c_void_p, c_double]) if GDAL_VERSION >= (2, 1): delete_band_nodata_value = void_output(std_call('GDALDeleteRasterNoDataValue'), [c_void_p]) else: delete_band_nodata_value = None get_band_statistics = void_output( std_call('GDALGetRasterStatistics'), [ c_void_p, c_int, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double), POINTER(c_double), c_void_p, c_void_p, ], ) compute_band_statistics = void_output( std_call('GDALComputeRasterStatistics'), [c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double), POINTER(c_double), c_void_p, c_void_p], ) # Reprojection routine reproject_image = void_output( std_call('GDALReprojectImage'), [c_void_p, c_char_p, c_void_p, c_char_p, c_int, c_double, c_double, c_void_p, c_void_p, c_void_p] ) auto_create_warped_vrt = voidptr_output( std_call('GDALAutoCreateWarpedVRT'), [c_void_p, c_char_p, c_char_p, c_int, c_double, c_void_p] ) # Create VSI gdal raster files from in-memory buffers. # https://gdal.org/cpl__vsi_8h.html create_vsi_file_from_mem_buffer = voidptr_output(std_call('VSIFileFromMemBuffer'), [c_char_p, c_void_p, c_int, c_int]) get_mem_buffer_from_vsi_file = voidptr_output(std_call('VSIGetMemFileBuffer'), [c_char_p, POINTER(c_int), c_bool]) unlink_vsi_file = int_output(std_call('VSIUnlink'), [c_char_p])
1a7c50e36dbd30e6793508c94ed98272cb7118f1fa1e151df40b025ac8a1ebfb
""" This module houses the ctypes function prototypes for OGR DataSource related data structures. OGR_Dr_*, OGR_DS_*, OGR_L_*, OGR_F_*, OGR_Fld_* routines are relevant here. """ from ctypes import POINTER, c_char_p, c_double, c_int, c_long, c_void_p from django.contrib.gis.gdal.envelope import OGREnvelope from django.contrib.gis.gdal.libgdal import GDAL_VERSION, lgdal from django.contrib.gis.gdal.prototypes.generation import ( bool_output, const_string_output, double_output, geom_output, int64_output, int_output, srs_output, void_output, voidptr_output, ) c_int_p = POINTER(c_int) # shortcut type # Driver Routines register_all = void_output(lgdal.OGRRegisterAll, [], errcheck=False) cleanup_all = void_output(lgdal.OGRCleanupAll, [], errcheck=False) get_driver = voidptr_output(lgdal.OGRGetDriver, [c_int]) get_driver_by_name = voidptr_output(lgdal.OGRGetDriverByName, [c_char_p], errcheck=False) get_driver_count = int_output(lgdal.OGRGetDriverCount, []) get_driver_name = const_string_output(lgdal.OGR_Dr_GetName, [c_void_p], decoding='ascii') # DataSource open_ds = voidptr_output(lgdal.OGROpen, [c_char_p, c_int, POINTER(c_void_p)]) destroy_ds = void_output(lgdal.OGR_DS_Destroy, [c_void_p], errcheck=False) release_ds = void_output(lgdal.OGRReleaseDataSource, [c_void_p]) get_ds_name = const_string_output(lgdal.OGR_DS_GetName, [c_void_p]) get_layer = voidptr_output(lgdal.OGR_DS_GetLayer, [c_void_p, c_int]) get_layer_by_name = voidptr_output(lgdal.OGR_DS_GetLayerByName, [c_void_p, c_char_p]) get_layer_count = int_output(lgdal.OGR_DS_GetLayerCount, [c_void_p]) # Layer Routines get_extent = void_output(lgdal.OGR_L_GetExtent, [c_void_p, POINTER(OGREnvelope), c_int]) get_feature = voidptr_output(lgdal.OGR_L_GetFeature, [c_void_p, c_long]) get_feature_count = int_output(lgdal.OGR_L_GetFeatureCount, [c_void_p, c_int]) get_layer_defn = voidptr_output(lgdal.OGR_L_GetLayerDefn, [c_void_p]) get_layer_srs = srs_output(lgdal.OGR_L_GetSpatialRef, [c_void_p]) get_next_feature = voidptr_output(lgdal.OGR_L_GetNextFeature, [c_void_p]) reset_reading = void_output(lgdal.OGR_L_ResetReading, [c_void_p], errcheck=False) test_capability = int_output(lgdal.OGR_L_TestCapability, [c_void_p, c_char_p]) get_spatial_filter = geom_output(lgdal.OGR_L_GetSpatialFilter, [c_void_p]) set_spatial_filter = void_output(lgdal.OGR_L_SetSpatialFilter, [c_void_p, c_void_p], errcheck=False) set_spatial_filter_rect = void_output( lgdal.OGR_L_SetSpatialFilterRect, [c_void_p, c_double, c_double, c_double, c_double], errcheck=False ) # Feature Definition Routines get_fd_geom_type = int_output(lgdal.OGR_FD_GetGeomType, [c_void_p]) get_fd_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p]) get_feat_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p]) get_field_count = int_output(lgdal.OGR_FD_GetFieldCount, [c_void_p]) get_field_defn = voidptr_output(lgdal.OGR_FD_GetFieldDefn, [c_void_p, c_int]) # Feature Routines clone_feature = voidptr_output(lgdal.OGR_F_Clone, [c_void_p]) destroy_feature = void_output(lgdal.OGR_F_Destroy, [c_void_p], errcheck=False) feature_equal = int_output(lgdal.OGR_F_Equal, [c_void_p, c_void_p]) get_feat_geom_ref = geom_output(lgdal.OGR_F_GetGeometryRef, [c_void_p]) get_feat_field_count = int_output(lgdal.OGR_F_GetFieldCount, [c_void_p]) get_feat_field_defn = voidptr_output(lgdal.OGR_F_GetFieldDefnRef, [c_void_p, c_int]) get_fid = int_output(lgdal.OGR_F_GetFID, [c_void_p]) get_field_as_datetime = int_output( lgdal.OGR_F_GetFieldAsDateTime, [c_void_p, c_int, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p] ) get_field_as_double = double_output(lgdal.OGR_F_GetFieldAsDouble, [c_void_p, c_int]) get_field_as_integer = int_output(lgdal.OGR_F_GetFieldAsInteger, [c_void_p, c_int]) get_field_as_integer64 = int64_output(lgdal.OGR_F_GetFieldAsInteger64, [c_void_p, c_int]) if GDAL_VERSION >= (2, 2): is_field_set = bool_output(lgdal.OGR_F_IsFieldSetAndNotNull, [c_void_p, c_int]) else: is_field_set = bool_output(lgdal.OGR_F_IsFieldSet, [c_void_p, c_int]) get_field_as_string = const_string_output(lgdal.OGR_F_GetFieldAsString, [c_void_p, c_int]) get_field_index = int_output(lgdal.OGR_F_GetFieldIndex, [c_void_p, c_char_p]) # Field Routines get_field_name = const_string_output(lgdal.OGR_Fld_GetNameRef, [c_void_p]) get_field_precision = int_output(lgdal.OGR_Fld_GetPrecision, [c_void_p]) get_field_type = int_output(lgdal.OGR_Fld_GetType, [c_void_p]) get_field_type_name = const_string_output(lgdal.OGR_GetFieldTypeName, [c_int]) get_field_width = int_output(lgdal.OGR_Fld_GetWidth, [c_void_p])
5764a55049e26467d59e3ffdaf5ed79de4fbc3e3a8094a642ffb2e98a2212db5
import threading from ctypes import POINTER, Structure, byref, c_byte, c_char_p, c_int, c_size_t from django.contrib.gis.geos.base import GEOSBase from django.contrib.gis.geos.libgeos import ( GEOM_PTR, GEOSFuncFactory, geos_version_tuple, ) from django.contrib.gis.geos.prototypes.errcheck import ( check_geom, check_sized_string, check_string, ) from django.contrib.gis.geos.prototypes.geom import c_uchar_p, geos_char_p from django.utils.encoding import force_bytes # ### The WKB/WKT Reader/Writer structures and pointers ### class WKTReader_st(Structure): pass class WKTWriter_st(Structure): pass class WKBReader_st(Structure): pass class WKBWriter_st(Structure): pass WKT_READ_PTR = POINTER(WKTReader_st) WKT_WRITE_PTR = POINTER(WKTWriter_st) WKB_READ_PTR = POINTER(WKBReader_st) WKB_WRITE_PTR = POINTER(WKBReader_st) # WKTReader routines wkt_reader_create = GEOSFuncFactory('GEOSWKTReader_create', restype=WKT_READ_PTR) wkt_reader_destroy = GEOSFuncFactory('GEOSWKTReader_destroy', argtypes=[WKT_READ_PTR]) wkt_reader_read = GEOSFuncFactory( 'GEOSWKTReader_read', argtypes=[WKT_READ_PTR, c_char_p], restype=GEOM_PTR, errcheck=check_geom ) # WKTWriter routines wkt_writer_create = GEOSFuncFactory('GEOSWKTWriter_create', restype=WKT_WRITE_PTR) wkt_writer_destroy = GEOSFuncFactory('GEOSWKTWriter_destroy', argtypes=[WKT_WRITE_PTR]) wkt_writer_write = GEOSFuncFactory( 'GEOSWKTWriter_write', argtypes=[WKT_WRITE_PTR, GEOM_PTR], restype=geos_char_p, errcheck=check_string ) wkt_writer_get_outdim = GEOSFuncFactory( 'GEOSWKTWriter_getOutputDimension', argtypes=[WKT_WRITE_PTR], restype=c_int ) wkt_writer_set_outdim = GEOSFuncFactory( 'GEOSWKTWriter_setOutputDimension', argtypes=[WKT_WRITE_PTR, c_int] ) wkt_writer_set_trim = GEOSFuncFactory('GEOSWKTWriter_setTrim', argtypes=[WKT_WRITE_PTR, c_byte]) wkt_writer_set_precision = GEOSFuncFactory('GEOSWKTWriter_setRoundingPrecision', argtypes=[WKT_WRITE_PTR, c_int]) # WKBReader routines wkb_reader_create = GEOSFuncFactory('GEOSWKBReader_create', restype=WKB_READ_PTR) wkb_reader_destroy = GEOSFuncFactory('GEOSWKBReader_destroy', argtypes=[WKB_READ_PTR]) class WKBReadFunc(GEOSFuncFactory): # Although the function definitions take `const unsigned char *` # as their parameter, we use c_char_p here so the function may # take Python strings directly as parameters. Inside Python there # is not a difference between signed and unsigned characters, so # it is not a problem. argtypes = [WKB_READ_PTR, c_char_p, c_size_t] restype = GEOM_PTR errcheck = staticmethod(check_geom) wkb_reader_read = WKBReadFunc('GEOSWKBReader_read') wkb_reader_read_hex = WKBReadFunc('GEOSWKBReader_readHEX') # WKBWriter routines wkb_writer_create = GEOSFuncFactory('GEOSWKBWriter_create', restype=WKB_WRITE_PTR) wkb_writer_destroy = GEOSFuncFactory('GEOSWKBWriter_destroy', argtypes=[WKB_WRITE_PTR]) # WKB Writing prototypes. class WKBWriteFunc(GEOSFuncFactory): argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)] restype = c_uchar_p errcheck = staticmethod(check_sized_string) wkb_writer_write = WKBWriteFunc('GEOSWKBWriter_write') wkb_writer_write_hex = WKBWriteFunc('GEOSWKBWriter_writeHEX') # WKBWriter property getter/setter prototypes. class WKBWriterGet(GEOSFuncFactory): argtypes = [WKB_WRITE_PTR] restype = c_int class WKBWriterSet(GEOSFuncFactory): argtypes = [WKB_WRITE_PTR, c_int] wkb_writer_get_byteorder = WKBWriterGet('GEOSWKBWriter_getByteOrder') wkb_writer_set_byteorder = WKBWriterSet('GEOSWKBWriter_setByteOrder') wkb_writer_get_outdim = WKBWriterGet('GEOSWKBWriter_getOutputDimension') wkb_writer_set_outdim = WKBWriterSet('GEOSWKBWriter_setOutputDimension') wkb_writer_get_include_srid = WKBWriterGet('GEOSWKBWriter_getIncludeSRID', restype=c_byte) wkb_writer_set_include_srid = WKBWriterSet('GEOSWKBWriter_setIncludeSRID', argtypes=[WKB_WRITE_PTR, c_byte]) # ### Base I/O Class ### class IOBase(GEOSBase): "Base class for GEOS I/O objects." def __init__(self): # Getting the pointer with the constructor. self.ptr = self._constructor() # Loading the real destructor function at this point as doing it in # __del__ is too late (import error). self.destructor.func # ### Base WKB/WKT Reading and Writing objects ### # Non-public WKB/WKT reader classes for internal use because # their `read` methods return _pointers_ instead of GEOSGeometry # objects. class _WKTReader(IOBase): _constructor = wkt_reader_create ptr_type = WKT_READ_PTR destructor = wkt_reader_destroy def read(self, wkt): if not isinstance(wkt, (bytes, str)): raise TypeError return wkt_reader_read(self.ptr, force_bytes(wkt)) class _WKBReader(IOBase): _constructor = wkb_reader_create ptr_type = WKB_READ_PTR destructor = wkb_reader_destroy def read(self, wkb): "Return a _pointer_ to C GEOS Geometry object from the given WKB." if isinstance(wkb, memoryview): wkb_s = bytes(wkb) return wkb_reader_read(self.ptr, wkb_s, len(wkb_s)) elif isinstance(wkb, (bytes, str)): return wkb_reader_read_hex(self.ptr, wkb, len(wkb)) else: raise TypeError # ### WKB/WKT Writer Classes ### class WKTWriter(IOBase): _constructor = wkt_writer_create ptr_type = WKT_WRITE_PTR destructor = wkt_writer_destroy _trim = False _precision = None def __init__(self, dim=2, trim=False, precision=None): super().__init__() if bool(trim) != self._trim: self.trim = trim if precision is not None: self.precision = precision self.outdim = dim def write(self, geom): "Return the WKT representation of the given geometry." return wkt_writer_write(self.ptr, geom.ptr) @property def outdim(self): return wkt_writer_get_outdim(self.ptr) @outdim.setter def outdim(self, new_dim): if new_dim not in (2, 3): raise ValueError('WKT output dimension must be 2 or 3') wkt_writer_set_outdim(self.ptr, new_dim) @property def trim(self): return self._trim @trim.setter def trim(self, flag): if bool(flag) != self._trim: self._trim = bool(flag) wkt_writer_set_trim(self.ptr, self._trim) @property def precision(self): return self._precision @precision.setter def precision(self, precision): if (not isinstance(precision, int) or precision < 0) and precision is not None: raise AttributeError('WKT output rounding precision must be non-negative integer or None.') if precision != self._precision: self._precision = precision wkt_writer_set_precision(self.ptr, -1 if precision is None else precision) class WKBWriter(IOBase): _constructor = wkb_writer_create ptr_type = WKB_WRITE_PTR destructor = wkb_writer_destroy geos_version = geos_version_tuple() def __init__(self, dim=2): super().__init__() self.outdim = dim def _handle_empty_point(self, geom): from django.contrib.gis.geos import Point if isinstance(geom, Point) and geom.empty: if self.srid: # PostGIS uses POINT(NaN NaN) for WKB representation of empty # points. Use it for EWKB as it's a PostGIS specific format. # https://trac.osgeo.org/postgis/ticket/3181 geom = Point(float('NaN'), float('NaN'), srid=geom.srid) else: raise ValueError('Empty point is not representable in WKB.') return geom def write(self, geom): "Return the WKB representation of the given geometry." from django.contrib.gis.geos import Polygon geom = self._handle_empty_point(geom) wkb = wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t())) if self.geos_version < (3, 6, 1) and isinstance(geom, Polygon) and geom.empty: # Fix GEOS output for empty polygon. # See https://trac.osgeo.org/geos/ticket/680. wkb = wkb[:-8] + b'\0' * 4 return memoryview(wkb) def write_hex(self, geom): "Return the HEXEWKB representation of the given geometry." from django.contrib.gis.geos.polygon import Polygon geom = self._handle_empty_point(geom) wkb = wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t())) if self.geos_version < (3, 6, 1) and isinstance(geom, Polygon) and geom.empty: wkb = wkb[:-16] + b'0' * 8 return wkb # ### WKBWriter Properties ### # Property for getting/setting the byteorder. def _get_byteorder(self): return wkb_writer_get_byteorder(self.ptr) def _set_byteorder(self, order): if order not in (0, 1): raise ValueError('Byte order parameter must be 0 (Big Endian) or 1 (Little Endian).') wkb_writer_set_byteorder(self.ptr, order) byteorder = property(_get_byteorder, _set_byteorder) # Property for getting/setting the output dimension. @property def outdim(self): return wkb_writer_get_outdim(self.ptr) @outdim.setter def outdim(self, new_dim): if new_dim not in (2, 3): raise ValueError('WKB output dimension must be 2 or 3') wkb_writer_set_outdim(self.ptr, new_dim) # Property for getting/setting the include srid flag. @property def srid(self): return bool(wkb_writer_get_include_srid(self.ptr)) @srid.setter def srid(self, include): wkb_writer_set_include_srid(self.ptr, bool(include)) # `ThreadLocalIO` object holds instances of the WKT and WKB reader/writer # objects that are local to the thread. The `GEOSGeometry` internals # access these instances by calling the module-level functions, defined # below. class ThreadLocalIO(threading.local): wkt_r = None wkt_w = None wkb_r = None wkb_w = None ewkb_w = None thread_context = ThreadLocalIO() # These module-level routines return the I/O object that is local to the # thread. If the I/O object does not exist yet it will be initialized. def wkt_r(): thread_context.wkt_r = thread_context.wkt_r or _WKTReader() return thread_context.wkt_r def wkt_w(dim=2, trim=False, precision=None): if not thread_context.wkt_w: thread_context.wkt_w = WKTWriter(dim=dim, trim=trim, precision=precision) else: thread_context.wkt_w.outdim = dim thread_context.wkt_w.trim = trim thread_context.wkt_w.precision = precision return thread_context.wkt_w def wkb_r(): thread_context.wkb_r = thread_context.wkb_r or _WKBReader() return thread_context.wkb_r def wkb_w(dim=2): if not thread_context.wkb_w: thread_context.wkb_w = WKBWriter(dim=dim) else: thread_context.wkb_w.outdim = dim return thread_context.wkb_w def ewkb_w(dim=2): if not thread_context.ewkb_w: thread_context.ewkb_w = WKBWriter(dim=dim) thread_context.ewkb_w.srid = True else: thread_context.ewkb_w.outdim = dim return thread_context.ewkb_w
c27aeec001c42eeabdc9f7f97720d1d65409d1c275a98b365b29eb9577f6e317
""" Tests for django.core.servers. """ import errno import os import socket from http.client import HTTPConnection from urllib.error import HTTPError from urllib.parse import urlencode from urllib.request import urlopen from django.test import LiveServerTestCase, override_settings from .models import Person TEST_ROOT = os.path.dirname(__file__) TEST_SETTINGS = { 'MEDIA_URL': '/media/', 'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'), 'STATIC_URL': '/static/', 'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'), } @override_settings(ROOT_URLCONF='servers.urls', **TEST_SETTINGS) class LiveServerBase(LiveServerTestCase): available_apps = [ 'servers', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', ] fixtures = ['testdata.json'] def urlopen(self, url): return urlopen(self.live_server_url + url) class LiveServerAddress(LiveServerBase): @classmethod def setUpClass(cls): super().setUpClass() # put it in a list to prevent descriptor lookups in test cls.live_server_url_test = [cls.live_server_url] def test_live_server_url_is_class_property(self): self.assertIsInstance(self.live_server_url_test[0], str) self.assertEqual(self.live_server_url_test[0], self.live_server_url) class LiveServerViews(LiveServerBase): def test_protocol(self): """Launched server serves with HTTP 1.1.""" with self.urlopen('/example_view/') as f: self.assertEqual(f.version, 11) def test_closes_connection_without_content_length(self): """ A HTTP 1.1 server is supposed to support keep-alive. Since our development server is rather simple we support it only in cases where we can detect a content length from the response. This should be doable for all simple views and streaming responses where an iterable with length of one is passed. The latter follows as result of `set_content_length` from https://github.com/python/cpython/blob/master/Lib/wsgiref/handlers.py. If we cannot detect a content length we explicitly set the `Connection` header to `close` to notify the client that we do not actually support it. """ conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port, timeout=1) try: conn.request('GET', '/streaming_example_view/', headers={'Connection': 'keep-alive'}) response = conn.getresponse() self.assertTrue(response.will_close) self.assertEqual(response.read(), b'Iamastream') self.assertEqual(response.status, 200) self.assertEqual(response.getheader('Connection'), 'close') conn.request('GET', '/streaming_example_view/', headers={'Connection': 'close'}) response = conn.getresponse() self.assertTrue(response.will_close) self.assertEqual(response.read(), b'Iamastream') self.assertEqual(response.status, 200) self.assertEqual(response.getheader('Connection'), 'close') finally: conn.close() def test_keep_alive_on_connection_with_content_length(self): """ See `test_closes_connection_without_content_length` for details. This is a follow up test, which ensure that we do not close the connection if not needed, hence allowing us to take advantage of keep-alive. """ conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port) try: conn.request('GET', '/example_view/', headers={"Connection": "keep-alive"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.read(), b'example view') self.assertEqual(response.status, 200) self.assertIsNone(response.getheader('Connection')) conn.request('GET', '/example_view/', headers={"Connection": "close"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.read(), b'example view') self.assertEqual(response.status, 200) self.assertIsNone(response.getheader('Connection')) finally: conn.close() def test_keep_alive_connection_clears_previous_request_data(self): conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port) try: conn.request('POST', '/method_view/', b'{}', headers={"Connection": "keep-alive"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.status, 200) self.assertEqual(response.read(), b'POST') conn.request('POST', '/method_view/', b'{}', headers={"Connection": "close"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.status, 200) self.assertEqual(response.read(), b'POST') finally: conn.close() def test_404(self): with self.assertRaises(HTTPError) as err: self.urlopen('/') err.exception.close() self.assertEqual(err.exception.code, 404, 'Expected 404 response') def test_view(self): with self.urlopen('/example_view/') as f: self.assertEqual(f.read(), b'example view') def test_static_files(self): with self.urlopen('/static/example_static_file.txt') as f: self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file') def test_no_collectstatic_emulation(self): """ LiveServerTestCase reports a 404 status code when HTTP client tries to access a static file that isn't explicitly put under STATIC_ROOT. """ with self.assertRaises(HTTPError) as err: self.urlopen('/static/another_app/another_app_static_file.txt') err.exception.close() self.assertEqual(err.exception.code, 404, 'Expected 404 response') def test_media_files(self): with self.urlopen('/media/example_media_file.txt') as f: self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file') def test_environ(self): with self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'})) as f: self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read()) class LiveServerDatabase(LiveServerBase): def test_fixtures_loaded(self): """ Fixtures are properly loaded and visible to the live server thread. """ with self.urlopen('/model_view/') as f: self.assertEqual(f.read().splitlines(), [b'jane', b'robert']) def test_database_writes(self): """ Data written to the database by a view can be read. """ with self.urlopen('/create_model_instance/'): pass self.assertQuerysetEqual( Person.objects.all().order_by('pk'), ['jane', 'robert', 'emily'], lambda b: b.name ) class LiveServerPort(LiveServerBase): def test_port_bind(self): """ Each LiveServerTestCase binds to a unique port or fails to start a server thread when run concurrently (#26011). """ TestCase = type("TestCase", (LiveServerBase,), {}) try: TestCase.setUpClass() except OSError as e: if e.errno == errno.EADDRINUSE: # We're out of ports, LiveServerTestCase correctly fails with # an OSError. return # Unexpected error. raise try: # We've acquired a port, ensure our server threads acquired # different addresses. self.assertNotEqual( self.live_server_url, TestCase.live_server_url, "Acquired duplicate server addresses for server threads: %s" % self.live_server_url ) finally: TestCase.tearDownClass() def test_specified_port_bind(self): """LiveServerTestCase.port customizes the server's port.""" TestCase = type(str('TestCase'), (LiveServerBase,), {}) # Find an open port and tell TestCase to use it. s = socket.socket() s.bind(('', 0)) TestCase.port = s.getsockname()[1] s.close() TestCase.setUpClass() try: self.assertEqual( TestCase.port, TestCase.server_thread.port, 'Did not use specified port for LiveServerTestCase thread: %s' % TestCase.port ) finally: TestCase.tearDownClass() class LiverServerThreadedTests(LiveServerBase): """If LiverServerTestCase isn't threaded, these tests will hang.""" def test_view_calls_subview(self): url = '/subview_calling_view/?%s' % urlencode({'url': self.live_server_url}) with self.urlopen(url) as f: self.assertEqual(f.read(), b'subview calling view: subview') def test_check_model_instance_from_subview(self): url = '/check_model_instance_from_subview/?%s' % urlencode({ 'url': self.live_server_url, }) with self.urlopen(url) as f: self.assertIn(b'emily', f.read())
9809af6d6239e50ed0b87cd4ffb3af03af789e8f4d324564e0d3125beccf71f5
from django.urls import path from . import views urlpatterns = [ path('example_view/', views.example_view), path('streaming_example_view/', views.streaming_example_view), path('model_view/', views.model_view), path('create_model_instance/', views.create_model_instance), path('environ_view/', views.environ_view), path('subview_calling_view/', views.subview_calling_view), path('subview/', views.subview), path('check_model_instance_from_subview/', views.check_model_instance_from_subview), path('method_view/', views.method_view), ]
525c04ef31ab6e366a2fe328ab59e7303ff2868f2e6591dda8a0366bb6035e23
from io import BytesIO from django.core.handlers.wsgi import WSGIRequest from django.core.servers.basehttp import WSGIRequestHandler from django.test import SimpleTestCase from django.test.client import RequestFactory class Stub: def __init__(self, **kwargs): self.__dict__.update(kwargs) def sendall(self, data): self.makefile('wb').write(data) class WSGIRequestHandlerTestCase(SimpleTestCase): request_factory = RequestFactory() def test_log_message(self): request = WSGIRequest(self.request_factory.get('/').environ) request.makefile = lambda *args, **kwargs: BytesIO() handler = WSGIRequestHandler(request, '192.168.0.2', None) level_status_codes = { 'info': [200, 301, 304], 'warning': [400, 403, 404], 'error': [500, 503], } for level, status_codes in level_status_codes.items(): for status_code in status_codes: # The correct level gets the message. with self.assertLogs('django.server', level.upper()) as cm: handler.log_message('GET %s %s', 'A', str(status_code)) self.assertIn('GET A %d' % status_code, cm.output[0]) # Incorrect levels don't have any messages. for wrong_level in level_status_codes: if wrong_level != level: with self.assertLogs('django.server', 'INFO') as cm: handler.log_message('GET %s %s', 'A', str(status_code)) self.assertNotEqual(cm.records[0].levelname, wrong_level.upper()) def test_https(self): request = WSGIRequest(self.request_factory.get('/').environ) request.makefile = lambda *args, **kwargs: BytesIO() handler = WSGIRequestHandler(request, '192.168.0.2', None) with self.assertLogs('django.server', 'ERROR') as cm: handler.log_message("GET %s %s", '\x16\x03', "4") self.assertIn( "You're accessing the development server over HTTPS, " "but it only supports HTTP.", cm.records[0].getMessage() ) def test_strips_underscore_headers(self): """WSGIRequestHandler ignores headers containing underscores. This follows the lead of nginx and Apache 2.4, and is to avoid ambiguity between dashes and underscores in mapping to WSGI environ, which can have security implications. """ def test_app(environ, start_response): """A WSGI app that just reflects its HTTP environ.""" start_response('200 OK', []) http_environ_items = sorted( '%s:%s' % (k, v) for k, v in environ.items() if k.startswith('HTTP_') ) yield (','.join(http_environ_items)).encode() rfile = BytesIO() rfile.write(b"GET / HTTP/1.0\r\n") rfile.write(b"Some-Header: good\r\n") rfile.write(b"Some_Header: bad\r\n") rfile.write(b"Other_Header: bad\r\n") rfile.seek(0) # WSGIRequestHandler closes the output file; we need to make this a # no-op so we can still read its contents. class UnclosableBytesIO(BytesIO): def close(self): pass wfile = UnclosableBytesIO() def makefile(mode, *a, **kw): if mode == 'rb': return rfile elif mode == 'wb': return wfile request = Stub(makefile=makefile) server = Stub(base_environ={}, get_app=lambda: test_app) # Prevent logging from appearing in test output. with self.assertLogs('django.server', 'INFO'): # instantiating a handler runs the request as side effect WSGIRequestHandler(request, '192.168.0.2', server) wfile.seek(0) body = list(wfile.readlines())[-1] self.assertEqual(body, b'HTTP_SOME_HEADER:good')
6cea017ab0f3ad355bbeec4454dde81022cd15565aa5b9ee653e7e27871de302
from django.db import DEFAULT_DB_ALIAS, connections from django.test import LiveServerTestCase, TestCase class LiveServerThreadTest(TestCase): def run_live_server_thread(self, connections_override=None): thread = LiveServerTestCase._create_server_thread(connections_override) thread.daemon = True thread.start() thread.is_ready.wait() thread.terminate() def test_closes_connections(self): conn = connections[DEFAULT_DB_ALIAS] if conn.vendor == 'sqlite' and conn.is_in_memory_db(): self.skipTest("the sqlite backend's close() method is a no-op when using an in-memory database") # Pass a connection to the thread to check they are being closed. connections_override = {DEFAULT_DB_ALIAS: conn} conn.inc_thread_sharing() try: self.assertTrue(conn.is_usable()) self.run_live_server_thread(connections_override) self.assertFalse(conn.is_usable()) finally: conn.dec_thread_sharing()
f3ff06a8aefcfb1c569c639e3cad0897624d1c26538dfbf9ef6791b1f5771900
from urllib.request import urlopen from django.http import HttpResponse, StreamingHttpResponse from django.views.decorators.csrf import csrf_exempt from .models import Person def example_view(request): return HttpResponse('example view') def streaming_example_view(request): return StreamingHttpResponse((b'I', b'am', b'a', b'stream')) def model_view(request): people = Person.objects.all() return HttpResponse('\n'.join(person.name for person in people)) def create_model_instance(request): person = Person(name='emily') person.save() return HttpResponse() def environ_view(request): return HttpResponse("\n".join("%s: %r" % (k, v) for k, v in request.environ.items())) def subview(request): return HttpResponse('subview') def subview_calling_view(request): with urlopen(request.GET['url'] + '/subview/') as response: return HttpResponse('subview calling view: {}'.format(response.read().decode())) def check_model_instance_from_subview(request): with urlopen(request.GET['url'] + '/create_model_instance/'): pass with urlopen(request.GET['url'] + '/model_view/') as response: return HttpResponse('subview calling view: {}'.format(response.read().decode())) @csrf_exempt def method_view(request): return HttpResponse(request.method)
ebbb8654be75e1567f480973f7a3542f0f7c2c04e900c81e2fe1eb8b9d981fd0
from django.db import migrations, models from django.db.migrations import operations from django.db.migrations.optimizer import MigrationOptimizer from django.db.migrations.serializer import serializer_factory from django.test import SimpleTestCase from .models import EmptyManager, UnicodeModel class OptimizerTests(SimpleTestCase): """ Tests the migration autodetector. """ def optimize(self, operations, app_label): """ Handy shortcut for getting results + number of loops """ optimizer = MigrationOptimizer() return optimizer.optimize(operations, app_label), optimizer._iterations def serialize(self, value): return serializer_factory(value).serialize()[0] def assertOptimizesTo(self, operations, expected, exact=None, less_than=None, app_label=None): result, iterations = self.optimize(operations, app_label) result = [self.serialize(f) for f in result] expected = [self.serialize(f) for f in expected] self.assertEqual(expected, result) if exact is not None and iterations != exact: raise self.failureException( "Optimization did not take exactly %s iterations (it took %s)" % (exact, iterations) ) if less_than is not None and iterations >= less_than: raise self.failureException( "Optimization did not take less than %s iterations (it took %s)" % (less_than, iterations) ) def assertDoesNotOptimize(self, operations, **kwargs): self.assertOptimizesTo(operations, operations, **kwargs) def test_single(self): """ The optimizer does nothing on a single operation, and that it does it in just one pass. """ self.assertOptimizesTo( [migrations.DeleteModel("Foo")], [migrations.DeleteModel("Foo")], exact=1, ) def test_create_delete_model(self): """ CreateModel and DeleteModel should collapse into nothing. """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.DeleteModel("Foo"), ], [], ) def test_create_rename_model(self): """ CreateModel should absorb RenameModels. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.RenameModel("Foo", "Bar"), ], [ migrations.CreateModel( "Bar", [("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ) ], ) def test_rename_model_self(self): """ RenameModels should absorb themselves. """ self.assertOptimizesTo( [ migrations.RenameModel("Foo", "Baa"), migrations.RenameModel("Baa", "Bar"), ], [ migrations.RenameModel("Foo", "Bar"), ], ) def test_create_alter_model_options(self): self.assertOptimizesTo( [ migrations.CreateModel('Foo', fields=[]), migrations.AlterModelOptions(name='Foo', options={'verbose_name_plural': 'Foozes'}), ], [ migrations.CreateModel('Foo', fields=[], options={'verbose_name_plural': 'Foozes'}), ] ) def _test_create_alter_foo_delete_model(self, alter_foo): """ CreateModel, AlterModelTable, AlterUniqueTogether/AlterIndexTogether/ AlterOrderWithRespectTo, and DeleteModel should collapse into nothing. """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.AlterModelTable("Foo", "woohoo"), alter_foo, migrations.DeleteModel("Foo"), ], [], ) def test_create_alter_unique_delete_model(self): self._test_create_alter_foo_delete_model(migrations.AlterUniqueTogether("Foo", [["a", "b"]])) def test_create_alter_index_delete_model(self): self._test_create_alter_foo_delete_model(migrations.AlterIndexTogether("Foo", [["a", "b"]])) def test_create_alter_owrt_delete_model(self): self._test_create_alter_foo_delete_model(migrations.AlterOrderWithRespectTo("Foo", "a")) def _test_alter_alter_model(self, alter_foo, alter_bar): """ Two AlterUniqueTogether/AlterIndexTogether/AlterOrderWithRespectTo should collapse into the second. """ self.assertOptimizesTo( [ alter_foo, alter_bar, ], [ alter_bar, ], ) def test_alter_alter_table_model(self): self._test_alter_alter_model( migrations.AlterModelTable("Foo", "a"), migrations.AlterModelTable("Foo", "b"), ) def test_alter_alter_unique_model(self): self._test_alter_alter_model( migrations.AlterUniqueTogether("Foo", [["a", "b"]]), migrations.AlterUniqueTogether("Foo", [["a", "c"]]), ) def test_alter_alter_index_model(self): self._test_alter_alter_model( migrations.AlterIndexTogether("Foo", [["a", "b"]]), migrations.AlterIndexTogether("Foo", [["a", "c"]]), ) def test_alter_alter_owrt_model(self): self._test_alter_alter_model( migrations.AlterOrderWithRespectTo("Foo", "a"), migrations.AlterOrderWithRespectTo("Foo", "b"), ) def test_optimize_through_create(self): """ We should be able to optimize away create/delete through a create or delete of a different model, but only if the create operation does not mention the model at all. """ # These should work self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Bar", [("size", models.IntegerField())]), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.DeleteModel("Bar"), migrations.DeleteModel("Foo"), ], [], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.DeleteModel("Foo"), migrations.DeleteModel("Bar"), ], [], ) # This should not work - FK should block it self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), migrations.DeleteModel("Foo"), ], ) # The same operations should be optimized if app_label is specified and # a FK references a model from the other app. self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), ], app_label="otherapp", ) # But it shouldn't work if a FK references a model with the same # app_label. self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), migrations.DeleteModel("Foo"), ], app_label="testapp", ) # This should not work - bases should block it self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)), migrations.DeleteModel("Foo"), ], ) # The same operations should be optimized if app_label and none of # bases belong to that app. self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)), ], app_label="otherapp", ) # But it shouldn't work if some of bases belongs to the specified app. self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)), migrations.DeleteModel("Foo"), ], app_label="testapp", ) self.assertOptimizesTo( [ migrations.CreateModel('Book', [('name', models.CharField(max_length=255))]), migrations.CreateModel('Person', [('name', models.CharField(max_length=255))]), migrations.AddField('book', 'author', models.ForeignKey('test_app.Person', models.CASCADE)), migrations.CreateModel('Review', [('book', models.ForeignKey('test_app.Book', models.CASCADE))]), migrations.CreateModel('Reviewer', [('name', models.CharField(max_length=255))]), migrations.AddField('review', 'reviewer', models.ForeignKey('test_app.Reviewer', models.CASCADE)), migrations.RemoveField('book', 'author'), migrations.DeleteModel('Person'), ], [ migrations.CreateModel('Book', [('name', models.CharField(max_length=255))]), migrations.CreateModel('Reviewer', [('name', models.CharField(max_length=255))]), migrations.CreateModel('Review', [ ('book', models.ForeignKey('test_app.Book', models.CASCADE)), ('reviewer', models.ForeignKey('test_app.Reviewer', models.CASCADE)), ]), ], ) def test_create_model_add_field(self): """ AddField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.AddField("Foo", "age", models.IntegerField()), ], [ migrations.CreateModel( name="Foo", fields=[ ("name", models.CharField(max_length=255)), ("age", models.IntegerField()), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), ], ) def test_create_model_reordering(self): """ AddField optimizes into CreateModel if it's a FK to a model that's between them (and there's no FK in the other direction), by changing the order of the CreateModel operations. """ self.assertOptimizesTo( [ migrations.CreateModel('Foo', [('name', models.CharField(max_length=255))]), migrations.CreateModel('Link', [('url', models.TextField())]), migrations.AddField('Foo', 'link', models.ForeignKey('migrations.Link', models.CASCADE)), ], [ migrations.CreateModel('Link', [('url', models.TextField())]), migrations.CreateModel('Foo', [ ('name', models.CharField(max_length=255)), ('link', models.ForeignKey('migrations.Link', models.CASCADE)) ]), ], ) def test_create_model_reordering_circular_fk(self): """ CreateModel reordering behavior doesn't result in an infinite loop if there are FKs in both directions. """ self.assertOptimizesTo( [ migrations.CreateModel('Bar', [('url', models.TextField())]), migrations.CreateModel('Foo', [('name', models.CharField(max_length=255))]), migrations.AddField('Bar', 'foo_fk', models.ForeignKey('migrations.Foo', models.CASCADE)), migrations.AddField('Foo', 'bar_fk', models.ForeignKey('migrations.Bar', models.CASCADE)), ], [ migrations.CreateModel('Foo', [('name', models.CharField(max_length=255))]), migrations.CreateModel('Bar', [ ('url', models.TextField()), ('foo_fk', models.ForeignKey('migrations.Foo', models.CASCADE)), ]), migrations.AddField('Foo', 'bar_fk', models.ForeignKey('migrations.Bar', models.CASCADE)), ], ) def test_create_model_no_reordering_for_unrelated_fk(self): """ CreateModel order remains unchanged if the later AddField operation isn't a FK between them. """ self.assertDoesNotOptimize( [ migrations.CreateModel('Foo', [('name', models.CharField(max_length=255))]), migrations.CreateModel('Link', [('url', models.TextField())]), migrations.AddField('Other', 'link', models.ForeignKey('migrations.Link', models.CASCADE)), ], ) def test_create_model_no_reordering_of_inherited_model(self): """ A CreateModel that inherits from another isn't reordered to avoid moving it earlier than its parent CreateModel operation. """ self.assertOptimizesTo( [ migrations.CreateModel('Other', [('foo', models.CharField(max_length=255))]), migrations.CreateModel('ParentModel', [('bar', models.CharField(max_length=255))]), migrations.CreateModel( 'ChildModel', [('baz', models.CharField(max_length=255))], bases=('migrations.parentmodel',), ), migrations.AddField('Other', 'fk', models.ForeignKey('migrations.ChildModel', models.CASCADE)), ], [ migrations.CreateModel('ParentModel', [('bar', models.CharField(max_length=255))]), migrations.CreateModel( 'ChildModel', [('baz', models.CharField(max_length=255))], bases=('migrations.parentmodel',), ), migrations.CreateModel( 'Other', [ ('foo', models.CharField(max_length=255)), ('fk', models.ForeignKey('migrations.ChildModel', models.CASCADE)), ] ), ], ) def test_create_model_add_field_not_through_m2m_through(self): """ AddField should NOT optimize into CreateModel if it's an M2M using a through that's created between them. """ self.assertDoesNotOptimize( [ migrations.CreateModel('Employee', []), migrations.CreateModel('Employer', []), migrations.CreateModel('Employment', [ ('employee', models.ForeignKey('migrations.Employee', models.CASCADE)), ('employment', models.ForeignKey('migrations.Employer', models.CASCADE)), ]), migrations.AddField( 'Employer', 'employees', models.ManyToManyField( 'migrations.Employee', through='migrations.Employment', ) ), ], ) def test_create_model_alter_field(self): """ AlterField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.AlterField("Foo", "name", models.IntegerField()), ], [ migrations.CreateModel( name="Foo", fields=[ ("name", models.IntegerField()), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), ], ) def test_create_model_rename_field(self): """ RenameField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.RenameField("Foo", "name", "title"), ], [ migrations.CreateModel( name="Foo", fields=[ ("title", models.CharField(max_length=255)), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), ], ) def test_add_field_rename_field(self): """ RenameField should optimize into AddField """ self.assertOptimizesTo( [ migrations.AddField("Foo", "name", models.CharField(max_length=255)), migrations.RenameField("Foo", "name", "title"), ], [ migrations.AddField("Foo", "title", models.CharField(max_length=255)), ], ) def test_alter_field_rename_field(self): """ RenameField should optimize to the other side of AlterField, and into itself. """ self.assertOptimizesTo( [ migrations.AlterField("Foo", "name", models.CharField(max_length=255)), migrations.RenameField("Foo", "name", "title"), migrations.RenameField("Foo", "title", "nom"), ], [ migrations.RenameField("Foo", "name", "nom"), migrations.AlterField("Foo", "nom", models.CharField(max_length=255)), ], ) def test_create_model_remove_field(self): """ RemoveField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[ ("name", models.CharField(max_length=255)), ("age", models.IntegerField()), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), migrations.RemoveField("Foo", "age"), ], [ migrations.CreateModel( name="Foo", fields=[ ("name", models.CharField(max_length=255)), ], options={'verbose_name': 'Foo'}, bases=(UnicodeModel,), managers=managers, ), ], ) def test_add_field_alter_field(self): """ AlterField should optimize into AddField. """ self.assertOptimizesTo( [ migrations.AddField("Foo", "age", models.IntegerField()), migrations.AlterField("Foo", "age", models.FloatField(default=2.4)), ], [ migrations.AddField("Foo", name="age", field=models.FloatField(default=2.4)), ], ) def test_add_field_delete_field(self): """ RemoveField should cancel AddField """ self.assertOptimizesTo( [ migrations.AddField("Foo", "age", models.IntegerField()), migrations.RemoveField("Foo", "age"), ], [], ) def test_alter_field_delete_field(self): """ RemoveField should absorb AlterField """ self.assertOptimizesTo( [ migrations.AlterField("Foo", "age", models.IntegerField()), migrations.RemoveField("Foo", "age"), ], [ migrations.RemoveField("Foo", "age"), ], ) def _test_create_alter_foo_field(self, alter): """ CreateModel, AlterFooTogether/AlterOrderWithRespectTo followed by an add/alter/rename field should optimize to CreateModel with options. """ option_value = getattr(alter, alter.option_name) options = {alter.option_name: option_value} # AddField self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.AddField("Foo", "c", models.IntegerField()), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ], options=options), ], ) # AlterField self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.AlterField("Foo", "b", models.CharField(max_length=255)), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.CharField(max_length=255)), ], options=options), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, migrations.AlterField("Foo", "c", models.CharField(max_length=255)), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.CharField(max_length=255)), ], options=options), ], ) # RenameField if isinstance(option_value, str): renamed_options = {alter.option_name: 'c'} else: renamed_options = { alter.option_name: { tuple('c' if value == 'b' else value for value in item) for item in option_value } } self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "b", "c"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("c", models.IntegerField()), ], options=renamed_options), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "b", "x"), migrations.RenameField("Foo", "x", "c"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("c", models.IntegerField()), ], options=renamed_options), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "c", "d"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("d", models.IntegerField()), ], options=options), ], ) # RemoveField if isinstance(option_value, str): removed_options = None else: removed_options = { alter.option_name: { tuple(value for value in item if value != 'b') for item in option_value } } self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RemoveField("Foo", "b"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ], options=removed_options), ] ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, migrations.RemoveField("Foo", "c"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ], options=options), ], ) def test_create_alter_unique_field(self): self._test_create_alter_foo_field(migrations.AlterUniqueTogether("Foo", [["a", "b"]])) def test_create_alter_index_field(self): self._test_create_alter_foo_field(migrations.AlterIndexTogether("Foo", [["a", "b"]])) def test_create_alter_owrt_field(self): self._test_create_alter_foo_field(migrations.AlterOrderWithRespectTo("Foo", "b")) def test_optimize_through_fields(self): """ field-level through checking is working. This should manage to collapse model Foo to nonexistence, and model Bar to a single IntegerField called "width". """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.AddField("Foo", "age", models.IntegerField()), migrations.AddField("Bar", "width", models.IntegerField()), migrations.AlterField("Foo", "age", models.IntegerField()), migrations.RenameField("Bar", "size", "dimensions"), migrations.RemoveField("Foo", "age"), migrations.RenameModel("Foo", "Phou"), migrations.RemoveField("Bar", "dimensions"), migrations.RenameModel("Phou", "Fou"), migrations.DeleteModel("Fou"), ], [ migrations.CreateModel("Bar", [("width", models.IntegerField())]), ], ) def test_optimize_elidable_operation(self): elidable_operation = operations.base.Operation() elidable_operation.elidable = True self.assertOptimizesTo( [ elidable_operation, migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), elidable_operation, migrations.CreateModel("Bar", [("size", models.IntegerField())]), elidable_operation, migrations.RenameModel("Foo", "Phou"), migrations.DeleteModel("Bar"), elidable_operation, ], [ migrations.CreateModel("Phou", [("name", models.CharField(max_length=255))]), ], )
706d3d6e5a8c45cfb839d1666c241ea52b3ce5d422f95309aa52dd5beed6d30a
import datetime from unittest import mock from django.db.migrations.questioner import ( InteractiveMigrationQuestioner, MigrationQuestioner, ) from django.test import SimpleTestCase from django.test.utils import captured_stdout, override_settings class QuestionerTests(SimpleTestCase): @override_settings( INSTALLED_APPS=['migrations'], MIGRATION_MODULES={'migrations': None}, ) def test_ask_initial_with_disabled_migrations(self): questioner = MigrationQuestioner() self.assertIs(False, questioner.ask_initial('migrations')) @mock.patch('builtins.input', return_value='datetime.timedelta(days=1)') def test_timedelta_default(self, mock): questioner = InteractiveMigrationQuestioner() with captured_stdout(): value = questioner._ask_default() self.assertEqual(value, datetime.timedelta(days=1))
6bb4144bb803fda655078c878ea87e35e6e7ddc1ef37d30146cec7beafffad01
from unittest import mock from django.apps.registry import apps as global_apps from django.db import connection from django.db.migrations.exceptions import InvalidMigrationPlan from django.db.migrations.executor import MigrationExecutor from django.db.migrations.graph import MigrationGraph from django.db.migrations.recorder import MigrationRecorder from django.db.utils import DatabaseError from django.test import ( SimpleTestCase, modify_settings, override_settings, skipUnlessDBFeature, ) from .test_base import MigrationTestBase @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) class ExecutorTests(MigrationTestBase): """ Tests the migration executor (full end-to-end running). Bear in mind that if these are failing you should fix the other test failures first, as they may be propagating into here. """ available_apps = ["migrations", "migrations2", "django.contrib.auth", "django.contrib.contenttypes"] @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_run(self): """ Tests running a simple set of migrations. """ executor = MigrationExecutor(connection) # Let's look at the plan first and make sure it's up to scratch plan = executor.migration_plan([("migrations", "0002_second")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), ], ) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") # Alright, let's try running it executor.migrate([("migrations", "0002_second")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Alright, let's undo what we did plan = executor.migration_plan([("migrations", None)]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0002_second"], True), (executor.loader.graph.nodes["migrations", "0001_initial"], True), ], ) executor.migrate([("migrations", None)]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_run_with_squashed(self): """ Tests running a squashed migration from zero (should ignore what it replaces) """ executor = MigrationExecutor(connection) # Check our leaf node is the squashed one leaves = [key for key in executor.loader.graph.leaf_nodes() if key[0] == "migrations"] self.assertEqual(leaves, [("migrations", "0001_squashed_0002")]) # Check the plan plan = executor.migration_plan([("migrations", "0001_squashed_0002")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_squashed_0002"], False), ], ) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") # Alright, let's try running it executor.migrate([("migrations", "0001_squashed_0002")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Alright, let's undo what we did. Should also just use squashed. plan = executor.migration_plan([("migrations", None)]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_squashed_0002"], True), ], ) executor.migrate([("migrations", None)]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_non_atomic_migration(self): """ Applying a non-atomic migration works as expected. """ executor = MigrationExecutor(connection) with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", "0001_initial")]) self.assertTableExists("migrations_publisher") migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps Publisher = migrations_apps.get_model("migrations", "Publisher") self.assertTrue(Publisher.objects.exists()) self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_atomic_operation"}) def test_atomic_operation_in_non_atomic_migration(self): """ An atomic operation is properly rolled back inside a non-atomic migration. """ executor = MigrationExecutor(connection) with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", "0001_initial")]) migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps Editor = migrations_apps.get_model("migrations", "Editor") self.assertFalse(Editor.objects.exists()) # Record previous migration as successful. executor.migrate([("migrations", "0001_initial")], fake=True) # Rebuild the graph to reflect the new DB state. executor.loader.build_graph() # Migrating backwards is also atomic. with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", None)]) self.assertFalse(Editor.objects.exists()) @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations", "migrations2": "migrations2.test_migrations_2", }) def test_empty_plan(self): """ Re-planning a full migration of a fully-migrated set doesn't perform spurious unmigrations and remigrations. There was previously a bug where the executor just always performed the backwards plan for applied migrations - which even for the most recent migration in an app, might include other, dependent apps, and these were being unmigrated. """ # Make the initial plan, check it executor = MigrationExecutor(connection) plan = executor.migration_plan([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), (executor.loader.graph.nodes["migrations2", "0001_initial"], False), ], ) # Fake-apply all migrations executor.migrate([ ("migrations", "0002_second"), ("migrations2", "0001_initial") ], fake=True) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Now plan a second time and make sure it's empty plan = executor.migration_plan([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertEqual(plan, []) # The resulting state should include applied migrations. state = executor.migrate([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertIn(('migrations', 'book'), state.models) self.assertIn(('migrations', 'author'), state.models) self.assertIn(('migrations2', 'otherauthor'), state.models) # Erase all the fake records executor.recorder.record_unapplied("migrations2", "0001_initial") executor.recorder.record_unapplied("migrations", "0002_second") executor.recorder.record_unapplied("migrations", "0001_initial") @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations", "migrations2": "migrations2.test_migrations_2_no_deps", }) def test_mixed_plan_not_supported(self): """ Although the MigrationExecutor interfaces allows for mixed migration plans (combined forwards and backwards migrations) this is not supported. """ # Prepare for mixed plan executor = MigrationExecutor(connection) plan = executor.migration_plan([("migrations", "0002_second")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), ], ) executor.migrate(None, plan) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() self.assertIn(('migrations', '0001_initial'), executor.loader.applied_migrations) self.assertIn(('migrations', '0002_second'), executor.loader.applied_migrations) self.assertNotIn(('migrations2', '0001_initial'), executor.loader.applied_migrations) # Generate mixed plan plan = executor.migration_plan([ ("migrations", None), ("migrations2", "0001_initial"), ]) msg = ( 'Migration plans with both forwards and backwards migrations are ' 'not supported. Please split your migration process into separate ' 'plans of only forwards OR backwards migrations.' ) with self.assertRaisesMessage(InvalidMigrationPlan, msg) as cm: executor.migrate(None, plan) self.assertEqual( cm.exception.args[1], [ (executor.loader.graph.nodes["migrations", "0002_second"], True), (executor.loader.graph.nodes["migrations", "0001_initial"], True), (executor.loader.graph.nodes["migrations2", "0001_initial"], False), ], ) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() executor.migrate([ ("migrations", None), ("migrations2", None), ]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") self.assertTableNotExists("migrations2_otherauthor") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_soft_apply(self): """ Tests detection of initial migrations already having been applied. """ state = {"faked": None} def fake_storer(phase, migration=None, fake=None): state["faked"] = fake executor = MigrationExecutor(connection, progress_callback=fake_storer) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run it normally self.assertEqual( executor.migration_plan([("migrations", "0001_initial")]), [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), ], ) executor.migrate([("migrations", "0001_initial")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # We shouldn't have faked that one self.assertIs(state["faked"], False) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Fake-reverse that executor.migrate([("migrations", None)], fake=True) # Are the tables still there? self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # Make sure that was faked self.assertIs(state["faked"], True) # Finally, migrate forwards; this should fake-apply our initial migration executor.loader.build_graph() self.assertEqual( executor.migration_plan([("migrations", "0001_initial")]), [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), ], ) # Applying the migration should raise a database level error # because we haven't given the --fake-initial option with self.assertRaises(DatabaseError): executor.migrate([("migrations", "0001_initial")]) # Reset the faked state state = {"faked": None} # Allow faking of initial CreateModel operations executor.migrate([("migrations", "0001_initial")], fake_initial=True) self.assertIs(state["faked"], True) # And migrate back to clean up the database executor.loader.build_graph() executor.migrate([("migrations", None)]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") @override_settings( MIGRATION_MODULES={ "migrations": "migrations.test_migrations_custom_user", "django.contrib.auth": "django.contrib.auth.migrations", }, AUTH_USER_MODEL="migrations.Author", ) def test_custom_user(self): """ Regression test for #22325 - references to a custom user model defined in the same app are not resolved correctly. """ executor = MigrationExecutor(connection) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Migrate forwards executor.migrate([("migrations", "0001_initial")]) self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # Make sure the soft-application detection works (#23093) # Change table_names to not return auth_user during this as # it wouldn't be there in a normal run, and ensure migrations.Author # exists in the global app registry temporarily. old_table_names = connection.introspection.table_names connection.introspection.table_names = lambda c: [x for x in old_table_names(c) if x != "auth_user"] migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps global_apps.get_app_config("migrations").models["author"] = migrations_apps.get_model("migrations", "author") try: migration = executor.loader.get_migration("auth", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) finally: connection.introspection.table_names = old_table_names del global_apps.get_app_config("migrations").models["author"] # And migrate back to clean up the database executor.loader.build_graph() executor.migrate([("migrations", None)]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") @override_settings( MIGRATION_MODULES={ "migrations": "migrations.test_add_many_to_many_field_initial", }, ) def test_detect_soft_applied_add_field_manytomanyfield(self): """ executor.detect_soft_applied() detects ManyToManyField tables from an AddField operation. This checks the case of AddField in a migration with other operations (0001) and the case of AddField in its own migration (0002). """ tables = [ # from 0001 "migrations_project", "migrations_task", "migrations_project_tasks", # from 0002 "migrations_task_projects", ] executor = MigrationExecutor(connection) # Create the tables for 0001 but make it look like the migration hasn't # been applied. executor.migrate([("migrations", "0001_initial")]) executor.migrate([("migrations", None)], fake=True) for table in tables[:3]: self.assertTableExists(table) # Table detection sees 0001 is applied but not 0002. migration = executor.loader.get_migration("migrations", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) migration = executor.loader.get_migration("migrations", "0002_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], False) # Create the tables for both migrations but make it look like neither # has been applied. executor.loader.build_graph() executor.migrate([("migrations", "0001_initial")], fake=True) executor.migrate([("migrations", "0002_initial")]) executor.loader.build_graph() executor.migrate([("migrations", None)], fake=True) # Table detection sees 0002 is applied. migration = executor.loader.get_migration("migrations", "0002_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) # Leave the tables for 0001 except the many-to-many table. That missing # table should cause detect_soft_applied() to return False. with connection.schema_editor() as editor: for table in tables[2:]: editor.execute(editor.sql_delete_table % {"table": table}) migration = executor.loader.get_migration("migrations", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], False) # Cleanup by removing the remaining tables. with connection.schema_editor() as editor: for table in tables[:2]: editor.execute(editor.sql_delete_table % {"table": table}) for table in tables: self.assertTableNotExists(table) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.lookuperror_a", "migrations.migrations_test_apps.lookuperror_b", "migrations.migrations_test_apps.lookuperror_c" ] ) def test_unrelated_model_lookups_forwards(self): """ #24123 - All models of apps already applied which are unrelated to the first app being applied are part of the initial model state. """ try: executor = MigrationExecutor(connection) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") executor.migrate([("lookuperror_b", "0003_b3")]) self.assertTableExists("lookuperror_b_b3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Migrate forwards -- This led to a lookup LookupErrors because # lookuperror_b.B2 is already applied executor.migrate([ ("lookuperror_a", "0004_a4"), ("lookuperror_c", "0003_c3"), ]) self.assertTableExists("lookuperror_a_a4") self.assertTableExists("lookuperror_c_c3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # Cleanup executor.migrate([ ("lookuperror_a", None), ("lookuperror_b", None), ("lookuperror_c", None), ]) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.lookuperror_a", "migrations.migrations_test_apps.lookuperror_b", "migrations.migrations_test_apps.lookuperror_c" ] ) def test_unrelated_model_lookups_backwards(self): """ #24123 - All models of apps being unapplied which are unrelated to the first app being unapplied are part of the initial model state. """ try: executor = MigrationExecutor(connection) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") executor.migrate([ ("lookuperror_a", "0004_a4"), ("lookuperror_b", "0003_b3"), ("lookuperror_c", "0003_c3"), ]) self.assertTableExists("lookuperror_b_b3") self.assertTableExists("lookuperror_a_a4") self.assertTableExists("lookuperror_c_c3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Migrate backwards -- This led to a lookup LookupErrors because # lookuperror_b.B2 is not in the initial state (unrelated to app c) executor.migrate([("lookuperror_a", None)]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # Cleanup executor.migrate([ ("lookuperror_b", None), ("lookuperror_c", None) ]) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") @override_settings( INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_a', 'migrations.migrations_test_apps.mutate_state_b', ] ) def test_unrelated_applied_migrations_mutate_state(self): """ #26647 - Unrelated applied migrations should be part of the final state in both directions. """ executor = MigrationExecutor(connection) executor.migrate([ ('mutate_state_b', '0002_add_field'), ]) # Migrate forward. executor.loader.build_graph() state = executor.migrate([ ('mutate_state_a', '0001_initial'), ]) self.assertIn('added', dict(state.models['mutate_state_b', 'b'].fields)) executor.loader.build_graph() # Migrate backward. state = executor.migrate([ ('mutate_state_a', None), ]) self.assertIn('added', dict(state.models['mutate_state_b', 'b'].fields)) executor.migrate([ ('mutate_state_b', None), ]) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_process_callback(self): """ #24129 - Tests callback process """ call_args_list = [] def callback(*args): call_args_list.append(args) executor = MigrationExecutor(connection, progress_callback=callback) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") executor.migrate([ ("migrations", "0001_initial"), ("migrations", "0002_second"), ]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() executor.migrate([ ("migrations", None), ("migrations", None), ]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") migrations = executor.loader.graph.nodes expected = [ ("render_start",), ("render_success",), ("apply_start", migrations['migrations', '0001_initial'], False), ("apply_success", migrations['migrations', '0001_initial'], False), ("apply_start", migrations['migrations', '0002_second'], False), ("apply_success", migrations['migrations', '0002_second'], False), ("render_start",), ("render_success",), ("unapply_start", migrations['migrations', '0002_second'], False), ("unapply_success", migrations['migrations', '0002_second'], False), ("unapply_start", migrations['migrations', '0001_initial'], False), ("unapply_success", migrations['migrations', '0001_initial'], False), ] self.assertEqual(call_args_list, expected) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.alter_fk.author_app", "migrations.migrations_test_apps.alter_fk.book_app", ] ) def test_alter_id_type_with_fk(self): try: executor = MigrationExecutor(connection) self.assertTableNotExists("author_app_author") self.assertTableNotExists("book_app_book") # Apply initial migrations executor.migrate([ ("author_app", "0001_initial"), ("book_app", "0001_initial"), ]) self.assertTableExists("author_app_author") self.assertTableExists("book_app_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Apply PK type alteration executor.migrate([("author_app", "0002_alter_id")]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # We can't simply unapply the migrations here because there is no # implicit cast from VARCHAR to INT on the database level. with connection.schema_editor() as editor: editor.execute(editor.sql_delete_table % {"table": "book_app_book"}) editor.execute(editor.sql_delete_table % {"table": "author_app_author"}) self.assertTableNotExists("author_app_author") self.assertTableNotExists("book_app_book") executor.migrate([("author_app", None)], fake=True) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_apply_all_replaced_marks_replacement_as_applied(self): """ Applying all replaced migrations marks replacement as applied (#24628). """ recorder = MigrationRecorder(connection) # Place the database in a state where the replaced migrations are # partially applied: 0001 is applied, 0002 is not. recorder.record_applied("migrations", "0001_initial") executor = MigrationExecutor(connection) # Use fake because we don't actually have the first migration # applied, so the second will fail. And there's no need to actually # create/modify tables here, we're just testing the # MigrationRecord, which works the same with or without fake. executor.migrate([("migrations", "0002_second")], fake=True) # Because we've now applied 0001 and 0002 both, their squashed # replacement should be marked as applied. self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_marks_replacement_applied_even_if_it_did_nothing(self): """ A new squash migration will be marked as applied even if all its replaced migrations were previously already applied (#24628). """ recorder = MigrationRecorder(connection) # Record all replaced migrations as applied recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") executor = MigrationExecutor(connection) executor.migrate([("migrations", "0001_squashed_0002")]) # Because 0001 and 0002 are both applied, even though this migrate run # didn't apply anything new, their squashed replacement should be # marked as applied. self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations(), ) # When the feature is False, the operation and the record won't be # performed in a transaction and the test will systematically pass. @skipUnlessDBFeature('can_rollback_ddl') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrations_applied_and_recorded_atomically(self): """Migrations are applied and recorded atomically.""" executor = MigrationExecutor(connection) with mock.patch('django.db.migrations.executor.MigrationExecutor.record_migration') as record_migration: record_migration.side_effect = RuntimeError('Recording migration failed.') with self.assertRaisesMessage(RuntimeError, 'Recording migration failed.'): executor.migrate([('migrations', '0001_initial')]) # The migration isn't recorded as applied since it failed. migration_recorder = MigrationRecorder(connection) self.assertFalse(migration_recorder.migration_qs.filter(app='migrations', name='0001_initial').exists()) self.assertTableNotExists('migrations_author') class FakeLoader: def __init__(self, graph, applied): self.graph = graph self.applied_migrations = applied class FakeMigration: """Really all we need is any object with a debug-useful repr.""" def __init__(self, name): self.name = name def __repr__(self): return 'M<%s>' % self.name class ExecutorUnitTests(SimpleTestCase): """(More) isolated unit tests for executor methods.""" def test_minimize_rollbacks(self): """ Minimize unnecessary rollbacks in connected apps. When you say "./manage.py migrate appA 0001", rather than migrating to just after appA-0001 in the linearized migration plan (which could roll back migrations in other apps that depend on appA 0001, but don't need to be rolled back since we're not rolling back appA 0001), we migrate to just before appA-0002. """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') b1_impl = FakeMigration('b1') b1 = ('b', '1') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(b1, b1_impl) graph.add_dependency(None, b1, a1) graph.add_dependency(None, a2, a1) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, { a1: a1_impl, b1: b1_impl, a2: a2_impl, }) plan = executor.migration_plan({a1}) self.assertEqual(plan, [(a2_impl, True)]) def test_minimize_rollbacks_branchy(self): r""" Minimize rollbacks when target has multiple in-app children. a: 1 <---- 3 <--\ \ \- 2 <--- 4 \ \ b: \- 1 <--- 2 """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') a3_impl = FakeMigration('a3') a3 = ('a', '3') a4_impl = FakeMigration('a4') a4 = ('a', '4') b1_impl = FakeMigration('b1') b1 = ('b', '1') b2_impl = FakeMigration('b2') b2 = ('b', '2') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(a3, a3_impl) graph.add_node(a4, a4_impl) graph.add_node(b1, b1_impl) graph.add_node(b2, b2_impl) graph.add_dependency(None, a2, a1) graph.add_dependency(None, a3, a1) graph.add_dependency(None, a4, a2) graph.add_dependency(None, a4, a3) graph.add_dependency(None, b2, b1) graph.add_dependency(None, b1, a1) graph.add_dependency(None, b2, a2) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, { a1: a1_impl, b1: b1_impl, a2: a2_impl, b2: b2_impl, a3: a3_impl, a4: a4_impl, }) plan = executor.migration_plan({a1}) should_be_rolled_back = [b2_impl, a4_impl, a2_impl, a3_impl] exp = [(m, True) for m in should_be_rolled_back] self.assertEqual(plan, exp) def test_backwards_nothing_to_do(self): r""" If the current state satisfies the given target, do nothing. a: 1 <--- 2 b: \- 1 c: \- 1 If a1 is applied already and a2 is not, and we're asked to migrate to a1, don't apply or unapply b1 or c1, regardless of their current state. """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') b1_impl = FakeMigration('b1') b1 = ('b', '1') c1_impl = FakeMigration('c1') c1 = ('c', '1') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(b1, b1_impl) graph.add_node(c1, c1_impl) graph.add_dependency(None, a2, a1) graph.add_dependency(None, b1, a1) graph.add_dependency(None, c1, a1) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, { a1: a1_impl, b1: b1_impl, }) plan = executor.migration_plan({a1}) self.assertEqual(plan, [])
217dd4da8d05756e50cc21ce1727d5a4a8a61ad31473738e20e996c27b98cfc8
import datetime import importlib import io import os import sys from unittest import mock from django.apps import apps from django.core.management import CommandError, call_command from django.db import ( ConnectionHandler, DatabaseError, connection, connections, models, ) from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.utils import truncate_name from django.db.migrations.exceptions import InconsistentMigrationHistory from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, override_settings from .models import UnicodeModel, UnserializableModel from .routers import TestRouter from .test_base import MigrationTestBase class MigrateTests(MigrationTestBase): """ Tests running the migrate command. """ databases = {'default', 'other'} @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_migrate(self): """ Tests basic usage of the migrate command. """ # No tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run the migrations to 0001 only stdout = io.StringIO() call_command('migrate', 'migrations', '0001', verbosity=1, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Target specific migration: 0001_initial, from migrations', stdout) self.assertIn('Applying migrations.0001_initial... OK', stdout) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run migrations all the way call_command("migrate", verbosity=0) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Unmigrate everything stdout = io.StringIO() call_command('migrate', 'migrations', 'zero', verbosity=1, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Unapply all migrations: migrations', stdout) self.assertIn('Unapplying migrations.0002_second... OK', stdout) # Tables are gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings(INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_with_system_checks(self): out = io.StringIO() call_command('migrate', skip_checks=False, no_color=True, stdout=out) self.assertIn('Apply all migrations: migrated_app', out.getvalue()) @override_settings(INSTALLED_APPS=['migrations', 'migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_app_without_migrations(self): msg = "App 'unmigrated_app_syncdb' does not have migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='unmigrated_app_syncdb') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_clashing_prefix'}) def test_ambigious_prefix(self): msg = ( "More than one migration matches 'a' in app 'migrations'. Please " "be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='a') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_unknown_prefix(self): msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='nonexistent') @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}) def test_migrate_initial_false(self): """ `Migration.initial = False` skips fake-initial detection. """ # Make sure no tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) # Fake rollback call_command("migrate", "migrations", "zero", fake=True, verbosity=0) # Make sure fake-initial detection does not run with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0) call_command("migrate", "migrations", "0001", fake=True, verbosity=0) # Real rollback call_command("migrate", "migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings( MIGRATION_MODULES={"migrations": "migrations.test_migrations"}, DATABASE_ROUTERS=['migrations.routers.TestRouter'], ) def test_migrate_fake_initial(self): """ --fake-initial only works if all tables created in the initial migration of an app exists. Database routers must be obeyed when doing that check. """ # Make sure no tables are created for db in connections: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) call_command("migrate", "migrations", "0001", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Also check the "other" database self.assertTableNotExists("migrations_author", using="other") self.assertTableExists("migrations_tribble", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble", using="other") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", verbosity=0) # Run initial migration with an explicit --fake-initial out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1) call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0, database="other") self.assertIn( "migrations.0001_initial... faked", out.getvalue().lower() ) # Run migrations all the way call_command("migrate", verbosity=0) call_command("migrate", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") self.assertTableNotExists("migrations_author", using="other") self.assertTableNotExists("migrations_tribble", using="other") self.assertTableNotExists("migrations_book", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", verbosity=0) # Run initial migration with an explicit --fake-initial with self.assertRaises(DatabaseError): # Fails because "migrations_tribble" does not exist but needs to in # order to make --fake-initial work. call_command("migrate", "migrations", fake_initial=True, verbosity=0) # Fake an apply call_command("migrate", "migrations", fake=True, verbosity=0) call_command("migrate", "migrations", fake=True, verbosity=0, database="other") # Unmigrate everything call_command("migrate", "migrations", "zero", verbosity=0) call_command("migrate", "migrations", "zero", verbosity=0, database="other") # Make sure it's all gone for db in connections: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) self.assertTableNotExists("migrations_book", using=db) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"}) def test_migrate_fake_split_initial(self): """ Split initial migrations can be faked with --fake-initial. """ call_command("migrate", "migrations", "0002", verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0) out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0002", fake_initial=True, stdout=out, verbosity=1) value = out.getvalue().lower() self.assertIn("migrations.0001_initial... faked", value) self.assertIn("migrations.0002_second... faked", value) # Fake an apply call_command("migrate", "migrations", fake=True, verbosity=0) # Unmigrate everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}) def test_migrate_conflict_exit(self): """ migrate exits if it detects a conflict. """ with self.assertRaisesMessage(CommandError, "Conflicting migrations detected"): call_command("migrate", "migrations") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_showmigrations_list(self): """ showmigrations --list displays migrations and whether or not they're applied. """ out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: True): call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False) self.assertEqual( '\x1b[1mmigrations\n\x1b[0m' ' [ ] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) call_command("migrate", "migrations", "0001", verbosity=0) out = io.StringIO() # Giving the explicit app_label tests for selective `show_list` in the command call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) out = io.StringIO() # Applied datetimes are displayed at verbosity 2+. call_command('showmigrations', 'migrations', stdout=out, verbosity=2, no_color=True) migration1 = MigrationRecorder(connection).migration_qs.get(app='migrations', name='0001_initial') self.assertEqual( 'migrations\n' ' [x] 0001_initial (applied at %s)\n' ' [ ] 0002_second\n' % migration1.applied.strftime('%Y-%m-%d %H:%M:%S'), out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_showmigrations_plan(self): """ Tests --plan output of showmigrations command """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "0003", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_plan'}) def test_migrate_plan(self): """Tests migrate --plan output.""" out = io.StringIO() # Show the plan up to the third migration. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n' 'migrations.0002_second\n' ' Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0003_third\n' ' Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_author']\n", out.getvalue() ) # Migrate to the third migration. call_command('migrate', 'migrations', '0003', verbosity=0) out = io.StringIO() # Show the plan for when there is nothing to apply. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' ' No planned migration operations.\n', out.getvalue() ) out = io.StringIO() # Show the plan for reverse migration back to 0001. call_command('migrate', 'migrations', '0001', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0003_third\n' ' Undo Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0002_second\n' ' Undo Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n", out.getvalue() ) out = io.StringIO() # Show the migration plan to fourth, with truncated details. call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> SELECT * FROM migrations_author WHE…\n', out.getvalue() ) # Show the plan when an operation is irreversible. # Migrate to the fourth migration. call_command('migrate', 'migrations', '0004', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> IRREVERSIBLE\n', out.getvalue() ) # Cleanup by unmigrating everything: fake the irreversible, then # migrate all to zero. call_command('migrate', 'migrations', '0003', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_empty'}) def test_showmigrations_no_migrations(self): out = io.StringIO() call_command('showmigrations', stdout=out, no_color=True) self.assertEqual('migrations\n (no migrations)\n', out.getvalue().lower()) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_unmigrated_app(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', stdout=out, no_color=True) self.assertEqual('unmigrated_app\n (no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}) def test_showmigrations_plan_no_migrations(self): """ Tests --plan output of showmigrations command without migrations """ out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, verbosity=2, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_showmigrations_plan_squashed(self): """ Tests --plan output of showmigrations command with squashed migrations. """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto\n" "[ ] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto ... (migrations.1_auto)\n" "[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "3_squashed_5", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto\n" "[x] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto ... (migrations.1_auto)\n" "[x] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_single_app_label(self): """ `showmigrations --plan app_label` output with a single app_label. """ # Single app with no dependencies on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Single app with dependencies. out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Some migrations already applied. call_command('migrate', 'author_app', '0001', verbosity=0) out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[X] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Cleanup by unmigrating author_app. call_command('migrate', 'author_app', 'zero', verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_multiple_app_labels(self): """ `showmigrations --plan app_label` output with multiple app_labels. """ # Multiple apps: author_app depends on book_app; mutate_state_b doesn't # depend on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Multiple apps: args order shouldn't matter (the same result is # expected as above). out = io.StringIO() call_command('showmigrations', 'author_app', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_plan_app_label_no_migrations(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_forwards(self): """ sqlmigrate outputs forward looking SQL. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_author = output.find('-- create model author') index_create_table = output.find('create table') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_unique_together = output.find('-- alter unique_together') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_tx_start, "Operation description (author) not found or found before transaction start" ) self.assertGreater( index_create_table, index_op_desc_author, "CREATE TABLE not found or found before operation description (author)" ) self.assertGreater( index_op_desc_tribble, index_create_table, "Operation description (tribble) not found or found before CREATE TABLE (author)" ) self.assertGreater( index_op_desc_unique_together, index_op_desc_tribble, "Operation description (unique_together) not found or found before operation description (tribble)" ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_backwards(self): """ sqlmigrate outputs reverse looking SQL. """ # Cannot generate the reverse SQL unless we've applied the migration. call_command("migrate", "migrations", verbosity=0) out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_unique_together = output.find('-- alter unique_together') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_author = output.find('-- create model author') index_drop_table = output.rfind('drop table') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before DROP TABLE" ) self.assertGreater( index_op_desc_unique_together, index_tx_start, "Operation description (unique_together) not found or found before transaction start" ) self.assertGreater( index_op_desc_tribble, index_op_desc_unique_together, "Operation description (tribble) not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_op_desc_tribble, "Operation description (author) not found or found before operation description (tribble)" ) self.assertGreater( index_drop_table, index_op_desc_author, "DROP TABLE not found or found before operation description (author)" ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_sqlmigrate_for_non_atomic_migration(self): """ Transaction wrappers aren't shown for non-atomic migrations. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] if connection.ops.start_transaction_sql(): self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_sqlmigrate_for_non_transactional_databases(self): """ Transaction wrappers aren't shown for databases that don't support transactional DDL. """ out = io.StringIO() with mock.patch.object(connection.features, 'can_rollback_ddl', False): call_command('sqlmigrate', 'migrations', '0001', stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] start_transaction_sql = connection.ops.start_transaction_sql() if start_transaction_sql: self.assertNotIn(start_transaction_sql.lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.migrated_unapplied_app", "migrations.migrations_test_apps.unmigrated_app", ], ) def test_regression_22823_unmigrated_fk_to_migrated_model(self): """ Assuming you have 3 apps, `A`, `B`, and `C`, such that: * `A` has migrations * `B` has a migration we want to apply * `C` has no migrations, but has an FK to `A` When we try to migrate "B", an exception occurs because the "B" was not included in the ProjectState that is used to detect soft-applied migrations (#22823). """ call_command("migrate", "migrated_unapplied_app", stdout=io.StringIO()) # unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble', # but that model is only defined in a migration, so the global app # registry never sees it and the reference is left dangling. Remove it # to avoid problems in subsequent tests. del apps._pending_operations[('migrations', 'tribble')] @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self): """ For an app without migrations, editor.execute() is used for executing the syncdb deferred SQL. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', run_syncdb=True, verbosity=1, stdout=stdout, no_color=True) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) # There's at least one deferred SQL for creating the foreign key # index. self.assertGreater(len(execute.mock_calls), 2) stdout = stdout.getvalue() self.assertIn('Synchronize unmigrated apps: unmigrated_app_syncdb', stdout) self.assertIn('Creating tables...', stdout) table_name = truncate_name('unmigrated_app_syncdb_classroom', connection.ops.max_name_length()) self.assertIn('Creating table %s' % table_name, stdout) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_syncdb_app_with_migrations(self): msg = "Can't use run_syncdb with app 'migrations' as it has migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', 'migrations', run_syncdb=True, verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.unmigrated_app_syncdb', 'migrations.migrations_test_apps.unmigrated_app_simple', ]) def test_migrate_syncdb_app_label(self): """ Running migrate --run-syncdb with an app_label only creates tables for the specified app. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', 'unmigrated_app_syncdb', run_syncdb=True, stdout=stdout) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) self.assertGreater(len(execute.mock_calls), 2) self.assertIn('Synchronize unmigrated app: unmigrated_app_syncdb', stdout.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_replaced(self): """ Running a single squashed migration should record all of the original replaced migrations as run. """ recorder = MigrationRecorder(connection) out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) applied_migrations = recorder.applied_migrations() self.assertIn(("migrations", "0001_initial"), applied_migrations) self.assertIn(("migrations", "0002_second"), applied_migrations) self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations) # Rollback changes call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_squashed(self): """ Running migrate for a squashed migration should record as run if all of the replaced migrations have been run (#25231). """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations() ) # No changes were actually applied so there is nothing to rollback @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_inconsistent_history(self): """ Running migrate with some migrations applied before their dependencies should not be allowed. """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0002_second") msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("migrate") applied_migrations = recorder.applied_migrations() self.assertNotIn(("migrations", "0001_initial"), applied_migrations) class MakeMigrationsTests(MigrationTestBase): """ Tests running the makemigrations command. """ def setUp(self): super().setUp() self._old_models = apps.app_configs['migrations'].models.copy() def tearDown(self): apps.app_configs['migrations'].models = self._old_models apps.all_models['migrations'] = self._old_models apps.clear_cache() super().tearDown() def test_files_content(self): self.assertTableNotExists("migrations_unicodemodel") apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) # Check for empty __init__.py file in migrations folder init_file = os.path.join(migration_dir, "__init__.py") self.assertTrue(os.path.exists(init_file)) with open(init_file) as fp: content = fp.read() self.assertEqual(content, '') # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() self.assertIn('migrations.CreateModel', content) self.assertIn('initial = True', content) self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name self.assertIn('“Ðjáñgó”', content) # title.default def test_makemigrations_order(self): """ makemigrations should recognize number-only migrations (0001.py). """ module = 'migrations.test_migrations_order' with self.temporary_migration_module(module=module) as migration_dir: if hasattr(importlib, 'invalidate_caches'): # importlib caches os.listdir() on some platforms like macOS # (#23850). importlib.invalidate_caches() call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0') self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py'))) def test_makemigrations_empty_connections(self): empty_connections = ConnectionHandler({'default': {}}) with mock.patch('django.core.management.commands.makemigrations.connections', new=empty_connections): # with no apps out = io.StringIO() call_command('makemigrations', stdout=out) self.assertIn('No changes detected', out.getvalue()) # with an app with self.temporary_migration_module() as migration_dir: call_command('makemigrations', 'migrations', verbosity=0) init_file = os.path.join(migration_dir, '__init__.py') self.assertTrue(os.path.exists(init_file)) @override_settings(INSTALLED_APPS=['migrations', 'migrations2']) def test_makemigrations_consistency_checks_respect_routers(self): """ The history consistency checks in makemigrations respect settings.DATABASE_ROUTERS. """ def patched_has_table(migration_recorder): if migration_recorder.connection is connections['other']: raise Exception('Other connection') else: return mock.DEFAULT self.assertTableNotExists('migrations_unicodemodel') apps.register_model('migrations', UnicodeModel) with mock.patch.object( MigrationRecorder, 'has_table', autospec=True, side_effect=patched_has_table) as has_table: with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) self.assertEqual(has_table.call_count, 1) # 'default' is checked # Router says not to migrate 'other' so consistency shouldn't # be checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 2) # 'default' again # With a router that doesn't prohibit migrating 'other', # consistency is checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.EmptyRouter']): with self.assertRaisesMessage(Exception, 'Other connection'): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 4) # 'default' and 'other' # With a router that doesn't allow migrating on any database, # no consistency checks are made. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): with mock.patch.object(TestRouter, 'allow_migrate', return_value=False) as allow_migrate: call_command('makemigrations', 'migrations', verbosity=0) allow_migrate.assert_any_call('other', 'migrations', model_name='UnicodeModel') # allow_migrate() is called with the correct arguments. self.assertGreater(len(allow_migrate.mock_calls), 0) for mock_call in allow_migrate.mock_calls: _, call_args, call_kwargs = mock_call connection_alias, app_name = call_args self.assertIn(connection_alias, ['default', 'other']) # Raises an error if invalid app_name/model_name occurs. apps.get_app_config(app_name).get_model(call_kwargs['model_name']) self.assertEqual(has_table.call_count, 4) def test_failing_migration(self): # If a migration fails to serialize, it shouldn't generate an empty file. #21280 apps.register_model('migrations', UnserializableModel) with self.temporary_migration_module() as migration_dir: with self.assertRaisesMessage(ValueError, 'Cannot serialize'): call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertFalse(os.path.exists(initial_file)) def test_makemigrations_conflict_exit(self): """ makemigrations exits if it detects a conflict. """ with self.temporary_migration_module(module="migrations.test_migrations_conflict"): with self.assertRaises(CommandError) as context: call_command("makemigrations") exception_message = str(context.exception) self.assertIn( 'Conflicting migrations detected; multiple leaf nodes ' 'in the migration graph:', exception_message ) self.assertIn('0002_second', exception_message) self.assertIn('0002_conflicting_second', exception_message) self.assertIn('in migrations', exception_message) self.assertIn("To fix them run 'python manage.py makemigrations --merge'", exception_message) def test_makemigrations_merge_no_conflict(self): """ makemigrations exits if in merge mode with no conflicts. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", merge=True, stdout=out) self.assertIn("No conflicts detected to merge.", out.getvalue()) def test_makemigrations_empty_no_app_specified(self): """ makemigrations exits if no app is specified with 'empty' mode. """ msg = 'You must supply at least one app label when using --empty.' with self.assertRaisesMessage(CommandError, msg): call_command("makemigrations", empty=True) def test_makemigrations_empty_migration(self): """ makemigrations properly constructs an empty migration. """ with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", empty=True, verbosity=0) # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() # Remove all whitespace to check for empty dependencies and operations content = content.replace(' ', '') self.assertIn('dependencies=[\n]', content) self.assertIn('operations=[\n]', content) @override_settings(MIGRATION_MODULES={"migrations": None}) def test_makemigrations_disabled_migrations_for_app(self): """ makemigrations raises a nice error when migrations are disabled for an app. """ msg = ( "Django can't create migrations for app 'migrations' because migrations " "have been disabled via the MIGRATION_MODULES setting." ) with self.assertRaisesMessage(ValueError, msg): call_command("makemigrations", "migrations", empty=True, verbosity=0) def test_makemigrations_no_changes_no_apps(self): """ makemigrations exits when there are no changes and no apps are specified. """ out = io.StringIO() call_command("makemigrations", stdout=out) self.assertIn("No changes detected", out.getvalue()) def test_makemigrations_no_changes(self): """ makemigrations exits when there are no changes to an app. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", stdout=out) self.assertIn("No changes detected in app 'migrations'", out.getvalue()) def test_makemigrations_no_apps_initial(self): """ makemigrations should detect initial is needed on empty migration modules if no app provided. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_empty"): call_command("makemigrations", stdout=out) self.assertIn("0001_initial.py", out.getvalue()) def test_makemigrations_no_init(self): """Migration directories without an __init__.py file are allowed.""" out = io.StringIO() with self.temporary_migration_module(module='migrations.test_migrations_no_init'): call_command('makemigrations', stdout=out) self.assertIn('0001_initial.py', out.getvalue()) def test_makemigrations_migrations_announce(self): """ makemigrations announces the migration at the default verbosity level. """ out = io.StringIO() with self.temporary_migration_module(): call_command("makemigrations", "migrations", stdout=out) self.assertIn("Migrations for 'migrations'", out.getvalue()) def test_makemigrations_no_common_ancestor(self): """ makemigrations fails to merge migrations with no common ancestor. """ with self.assertRaises(ValueError) as context: with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"): call_command("makemigrations", "migrations", merge=True) exception_message = str(context.exception) self.assertIn("Could not find common ancestor of", exception_message) self.assertIn("0002_second", exception_message) self.assertIn("0002_conflicting_second", exception_message) def test_makemigrations_interactive_reject(self): """ makemigrations enters and exits interactive mode properly. """ # Monkeypatch interactive questioner to auto reject with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, verbosity=0) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) def test_makemigrations_interactive_accept(self): """ makemigrations enters interactive mode and merges properly. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) @mock.patch('django.db.migrations.utils.datetime') def test_makemigrations_default_merge_name(self, mock_datetime): mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4) with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge_20160102_0304.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_non_interactive_not_null_addition(self): """ Non-interactive makemigrations fails when a default is missing on a new not-null field. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_int = models.IntegerField() class Meta: app_label = "migrations" out = io.StringIO() with self.assertRaises(SystemExit): with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) def test_makemigrations_non_interactive_not_null_alteration(self): """ Non-interactive makemigrations fails when a default is missing on a field changed to not-null. """ class Author(models.Model): name = models.CharField(max_length=255) slug = models.SlugField() age = models.IntegerField(default=0) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Alter field slug on author", out.getvalue()) def test_makemigrations_non_interactive_no_model_rename(self): """ makemigrations adds and removes a possible model rename in non-interactive mode. """ class RenamedModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Delete model SillyModel", out.getvalue()) self.assertIn("Create model RenamedModel", out.getvalue()) def test_makemigrations_non_interactive_no_field_rename(self): """ makemigrations adds and removes a possible field rename in non-interactive mode. """ class SillyModel(models.Model): silly_rename = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Remove field silly_field from sillymodel", out.getvalue()) self.assertIn("Add field silly_rename to sillymodel", out.getvalue()) def test_makemigrations_handle_merge(self): """ makemigrations properly merges the conflicting migrations with --noinput. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=False, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertIn("Created new merge migration", output) def test_makemigration_merge_dry_run(self): """ makemigrations respects --dry-run option when fixing migration conflicts (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) def test_makemigration_merge_dry_run_verbosity_3(self): """ `makemigrations --merge --dry-run` writes the merge migration file to stdout with `verbosity == 3` (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, verbosity=3, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) # Additional output caused by verbosity 3 # The complete merge migration file that would be written self.assertIn("class Migration(migrations.Migration):", output) self.assertIn("dependencies = [", output) self.assertIn("('migrations', '0002_second')", output) self.assertIn("('migrations', '0002_conflicting_second')", output) self.assertIn("operations = [", output) self.assertIn("]", output) def test_makemigrations_dry_run(self): """ `makemigrations --dry-run` should not ask for defaults. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_date = models.DateField() # Added field without a default class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out) # Output the expected changes directly, without asking for defaults self.assertIn("Add field silly_date to sillymodel", out.getvalue()) def test_makemigrations_dry_run_verbosity_3(self): """ Allow `makemigrations --dry-run` to output the migrations file to stdout (with verbosity == 3). """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_char = models.CharField(default="") class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3) # Normal --dry-run output self.assertIn("- Add field silly_char to sillymodel", out.getvalue()) # Additional output caused by verbosity 3 # The complete migrations file that would be written self.assertIn("class Migration(migrations.Migration):", out.getvalue()) self.assertIn("dependencies = [", out.getvalue()) self.assertIn("('migrations', '0001_initial'),", out.getvalue()) self.assertIn("migrations.AddField(", out.getvalue()) self.assertIn("model_name='sillymodel',", out.getvalue()) self.assertIn("name='silly_char',", out.getvalue()) def test_makemigrations_migrations_modules_path_not_exist(self): """ makemigrations creates migrations when specifying a custom location for migration files using MIGRATION_MODULES if the custom path doesn't already exist. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar" with self.temporary_migration_module(module=migration_module) as migration_dir: call_command("makemigrations", "migrations", stdout=out) # Migrations file is actually created in the expected path. initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) # Command output indicates the migration is created. self.assertIn(" - Create model SillyModel", out.getvalue()) @override_settings(MIGRATION_MODULES={'migrations': 'some.nonexistent.path'}) def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self): msg = ( 'Could not locate an appropriate location to create migrations ' 'package some.nonexistent.path. Make sure the toplevel package ' 'exists and can be imported.' ) with self.assertRaisesMessage(ValueError, msg): call_command('makemigrations', 'migrations', empty=True, verbosity=0) def test_makemigrations_interactive_by_default(self): """ The user is prompted to merge by default if there are conflicts and merge is True. Answer negative to differentiate it from behavior when --noinput is specified. """ # Monkeypatch interactive questioner to auto reject out = io.StringIO() with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') # This will fail if interactive is False by default self.assertFalse(os.path.exists(merge_file)) self.assertNotIn("Created new merge migration", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_no_merge(self): """ makemigrations does not raise a CommandError when an unspecified app has conflicting migrations. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", merge=False, verbosity=0) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_merge(self): """ makemigrations does not create a merge for an unspecified app even if it has conflicting migrations. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(app_label="migrated_app") as migration_dir: call_command("makemigrations", "migrated_app", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) self.assertIn("No conflicts detected to merge.", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.conflicting_app_with_dependencies"]) def test_makemigrations_merge_dont_output_dependency_operations(self): """ makemigrations --merge does not output any operations from apps that don't belong to a given app. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='N')): out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command( "makemigrations", "conflicting_app_with_dependencies", merge=True, interactive=True, stdout=out ) val = out.getvalue().lower() self.assertIn('merging conflicting_app_with_dependencies\n', val) self.assertIn( ' branch 0002_conflicting_second\n' ' - create model something\n', val ) self.assertIn( ' branch 0002_second\n' ' - delete model tribble\n' ' - remove field silly_field from author\n' ' - add field rating to author\n' ' - create model book\n', val ) def test_makemigrations_with_custom_name(self): """ makemigrations --name generate a custom migration name. """ with self.temporary_migration_module() as migration_dir: def cmd(migration_count, migration_name, *args): call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args) migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name)) # Check for existing migration file in migration folder self.assertTrue(os.path.exists(migration_file)) with open(migration_file, encoding='utf-8') as fp: content = fp.read() content = content.replace(" ", "") return content # generate an initial migration migration_name_0001 = "my_initial_migration" content = cmd("0001", migration_name_0001) self.assertIn("dependencies=[\n]", content) # importlib caches os.listdir() on some platforms like macOS # (#23850). if hasattr(importlib, 'invalidate_caches'): importlib.invalidate_caches() # generate an empty migration migration_name_0002 = "my_custom_migration" content = cmd("0002", migration_name_0002, "--empty") self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content) self.assertIn("operations=[\n]", content) def test_makemigrations_with_invalid_custom_name(self): msg = 'The migration name must be a valid Python identifier.' with self.assertRaisesMessage(CommandError, msg): call_command('makemigrations', 'migrations', '--name', 'invalid name', '--empty') def test_makemigrations_check(self): """ makemigrations --check should exit with a non-zero status when there are changes to an app requiring migrations. """ with self.temporary_migration_module(): with self.assertRaises(SystemExit): call_command("makemigrations", "--check", "migrations", verbosity=0) with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "--check", "migrations", verbosity=0) def test_makemigrations_migration_path_output(self): """ makemigrations should print the relative paths to the migrations unless they are outside of the current tree, in which case the absolute path should be shown. """ out = io.StringIO() apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_migration_path_output_valueerror(self): """ makemigrations prints the absolute path if os.path.relpath() raises a ValueError when it's impossible to obtain a relative path, e.g. on Windows if Django is installed on a different drive than where the migration files are created. """ out = io.StringIO() with self.temporary_migration_module() as migration_dir: with mock.patch('os.path.relpath', side_effect=ValueError): call_command('makemigrations', 'migrations', stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_inconsistent_history(self): """ makemigrations should raise InconsistentMigrationHistory exception if there are some migrations applied before their dependencies. """ recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.temporary_migration_module(module="migrations.test_migrations"): with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("makemigrations") @mock.patch('builtins.input', return_value='1') @mock.patch('django.db.migrations.questioner.sys.stdin', mock.MagicMock(encoding=sys.getdefaultencoding())) def test_makemigrations_auto_now_add_interactive(self, *args): """ makemigrations prompts the user when adding auto_now_add to an existing model. """ class Entry(models.Model): title = models.CharField(max_length=255) creation_date = models.DateTimeField(auto_now_add=True) class Meta: app_label = 'migrations' # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.sys.stdout', new_callable=io.StringIO) as prompt_stdout: out = io.StringIO() with self.temporary_migration_module(module='migrations.test_auto_now_add'): call_command('makemigrations', 'migrations', interactive=True, stdout=out) output = out.getvalue() prompt_output = prompt_stdout.getvalue() self.assertIn("You can accept the default 'timezone.now' by pressing 'Enter'", prompt_output) self.assertIn("Add field creation_date to entry", output) class SquashMigrationsTests(MigrationTestBase): """ Tests running the squashmigrations command. """ def test_squashmigrations_squashes(self): """ squashmigrations squashes migrations. """ with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashmigrations_initial_attribute(self): with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn("initial = True", content) def test_squashmigrations_optimizes(self): """ squashmigrations optimizes operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out) self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue()) def test_ticket_23799_squashmigrations_no_optimize(self): """ squashmigrations --no-optimize doesn't optimize operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, no_optimize=True, stdout=out) self.assertIn("Skipping optimization", out.getvalue()) def test_squashmigrations_valid_start(self): """ squashmigrations accepts a starting migration. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes") as migration_dir: call_command("squashmigrations", "migrations", "0002", "0003", interactive=False, verbosity=1, stdout=out) squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn(" ('migrations', '0001_initial')", content) self.assertNotIn("initial = True", content) out = out.getvalue() self.assertNotIn(" - 0001_initial", out) self.assertIn(" - 0002_second", out) self.assertIn(" - 0003_third", out) def test_squashmigrations_invalid_start(self): """ squashmigrations doesn't accept a starting migration after the ending migration. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): msg = ( "The migration 'migrations.0003_third' cannot be found. Maybe " "it comes after the migration 'migrations.0002_second'" ) with self.assertRaisesMessage(CommandError, msg): call_command("squashmigrations", "migrations", "0003", "0002", interactive=False, verbosity=0) def test_squashed_name_with_start_migration_name(self): """--squashed-name specifies the new migration's name.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', '0002', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashed_name_without_start_migration_name(self): """--squashed-name also works if a start migration is omitted.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) class AppLabelErrorTests(TestCase): """ This class inherits TestCase because MigrationTestBase uses `available_apps = ['migrations']` which means that it's the only installed app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these tests. """ nonexistent_app_error = "No installed app with label 'nonexistent_app'." did_you_mean_auth_error = ( "No installed app with label 'django.contrib.auth'. Did you mean " "'auth'?" ) def test_makemigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_makemigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_migrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('migrate', 'nonexistent_app') def test_migrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('migrate', 'django.contrib.auth') def test_showmigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_showmigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_sqlmigrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('sqlmigrate', 'nonexistent_app', '0002') def test_sqlmigrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('sqlmigrate', 'django.contrib.auth', '0002') def test_squashmigrations_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('squashmigrations', 'nonexistent_app', '0002') def test_squashmigrations_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('squashmigrations', 'django.contrib.auth', '0002')
5991b927e8f9f3c0d663770d504673369d2cda017e9ae383674ffdbacb442571
import datetime import decimal import enum import functools import math import os import re import uuid from unittest import mock import custom_migration_operations.more_operations import custom_migration_operations.operations from django import get_version from django.conf import SettingsReference, settings from django.core.validators import EmailValidator, RegexValidator from django.db import migrations, models from django.db.migrations.serializer import BaseSerializer from django.db.migrations.writer import MigrationWriter, OperationWriter from django.test import SimpleTestCase from django.utils.deconstruct import deconstructible from django.utils.functional import SimpleLazyObject from django.utils.timezone import get_default_timezone, get_fixed_timezone, utc from django.utils.translation import gettext_lazy as _ from .models import FoodManager, FoodQuerySet class Money(decimal.Decimal): def deconstruct(self): return ( '%s.%s' % (self.__class__.__module__, self.__class__.__name__), [str(self)], {} ) class TestModel1: def upload_to(self): return '/somewhere/dynamic/' thing = models.FileField(upload_to=upload_to) class OperationWriterTests(SimpleTestCase): def test_empty_signature(self): operation = custom_migration_operations.operations.TestOperation() buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.TestOperation(\n' '),' ) def test_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation(1, 2) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' '),' ) def test_kwargs_signature(self): operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' '),' ) def test_args_kwargs_signature(self): operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsKwargsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' kwarg2=4,\n' '),' ) def test_nested_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation( custom_migration_operations.operations.ArgsOperation(1, 2), custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4) ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' ),\n' ' arg2=custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=3,\n' ' kwarg2=4,\n' ' ),\n' '),' ) def test_multiline_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2") buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, "custom_migration_operations.operations.ArgsOperation(\n" " arg1='test\\n arg1',\n" " arg2='test\\narg2',\n" ")," ) def test_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2]) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' 1,\n' ' 2,\n' ' ],\n' '),' ) def test_nested_operation_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation( arg=[ custom_migration_operations.operations.KwargsOperation( kwarg1=1, kwarg2=2, ), ] ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' ' kwarg2=2,\n' ' ),\n' ' ],\n' '),' ) class WriterTests(SimpleTestCase): """ Tests the migration writer (makes migration files from Migration instances) """ def safe_exec(self, string, value=None): d = {} try: exec(string, globals(), d) except Exception as e: if value: self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e)) else: self.fail("Could not exec %r: %s" % (string.strip(), e)) return d def serialize_round_trip(self, value): string, imports = MigrationWriter.serialize(value) return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result'] def assertSerializedEqual(self, value): self.assertEqual(self.serialize_round_trip(value), value) def assertSerializedResultEqual(self, value, target): self.assertEqual(MigrationWriter.serialize(value), target) def assertSerializedFieldEqual(self, value): new_value = self.serialize_round_trip(value) self.assertEqual(value.__class__, new_value.__class__) self.assertEqual(value.max_length, new_value.max_length) self.assertEqual(value.null, new_value.null) self.assertEqual(value.unique, new_value.unique) def test_serialize_numbers(self): self.assertSerializedEqual(1) self.assertSerializedEqual(1.2) self.assertTrue(math.isinf(self.serialize_round_trip(float("inf")))) self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf")))) self.assertTrue(math.isnan(self.serialize_round_trip(float("nan")))) self.assertSerializedEqual(decimal.Decimal('1.3')) self.assertSerializedResultEqual( decimal.Decimal('1.3'), ("Decimal('1.3')", {'from decimal import Decimal'}) ) self.assertSerializedEqual(Money('1.3')) self.assertSerializedResultEqual( Money('1.3'), ("migrations.test_writer.Money('1.3')", {'import migrations.test_writer'}) ) def test_serialize_constants(self): self.assertSerializedEqual(None) self.assertSerializedEqual(True) self.assertSerializedEqual(False) def test_serialize_strings(self): self.assertSerializedEqual(b"foobar") string, imports = MigrationWriter.serialize(b"foobar") self.assertEqual(string, "b'foobar'") self.assertSerializedEqual("föobár") string, imports = MigrationWriter.serialize("foobar") self.assertEqual(string, "'foobar'") def test_serialize_multiline_strings(self): self.assertSerializedEqual(b"foo\nbar") string, imports = MigrationWriter.serialize(b"foo\nbar") self.assertEqual(string, "b'foo\\nbar'") self.assertSerializedEqual("föo\nbár") string, imports = MigrationWriter.serialize("foo\nbar") self.assertEqual(string, "'foo\\nbar'") def test_serialize_collections(self): self.assertSerializedEqual({1: 2}) self.assertSerializedEqual(["a", 2, True, None]) self.assertSerializedEqual({2, 3, "eighty"}) self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) self.assertSerializedEqual(_('Hello')) def test_serialize_builtin_types(self): self.assertSerializedEqual([list, tuple, dict, set, frozenset]) self.assertSerializedResultEqual( [list, tuple, dict, set, frozenset], ("[list, tuple, dict, set, frozenset]", set()) ) def test_serialize_lazy_objects(self): pattern = re.compile(r'^foo$') lazy_pattern = SimpleLazyObject(lambda: pattern) self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern) def test_serialize_enums(self): class TextEnum(enum.Enum): A = 'a-value' B = 'value-b' class BinaryEnum(enum.Enum): A = b'a-value' B = b'value-b' class IntEnum(enum.IntEnum): A = 1 B = 2 self.assertSerializedResultEqual( TextEnum.A, ("migrations.test_writer.TextEnum('a-value')", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( BinaryEnum.A, ("migrations.test_writer.BinaryEnum(b'a-value')", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( IntEnum.B, ("migrations.test_writer.IntEnum(2)", {'import migrations.test_writer'}) ) field = models.CharField(default=TextEnum.B, choices=[(m.value, m) for m in TextEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "('a-value', migrations.test_writer.TextEnum('a-value')), " "('value-b', migrations.test_writer.TextEnum('value-b'))], " "default=migrations.test_writer.TextEnum('value-b'))" ) field = models.CharField(default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "(b'a-value', migrations.test_writer.BinaryEnum(b'a-value')), " "(b'value-b', migrations.test_writer.BinaryEnum(b'value-b'))], " "default=migrations.test_writer.BinaryEnum(b'value-b'))" ) field = models.IntegerField(default=IntEnum.A, choices=[(m.value, m) for m in IntEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.IntegerField(choices=[" "(1, migrations.test_writer.IntEnum(1)), " "(2, migrations.test_writer.IntEnum(2))], " "default=migrations.test_writer.IntEnum(1))" ) def test_serialize_uuid(self): self.assertSerializedEqual(uuid.uuid1()) self.assertSerializedEqual(uuid.uuid4()) uuid_a = uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8') uuid_b = uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2') self.assertSerializedResultEqual( uuid_a, ("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {'import uuid'}) ) self.assertSerializedResultEqual( uuid_b, ("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {'import uuid'}) ) field = models.UUIDField(choices=((uuid_a, 'UUID A'), (uuid_b, 'UUID B')), default=uuid_a) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.UUIDField(choices=[" "(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), " "(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], " "default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))" ) def test_serialize_functions(self): with self.assertRaisesMessage(ValueError, 'Cannot serialize function: lambda'): self.assertSerializedEqual(lambda x: 42) self.assertSerializedEqual(models.SET_NULL) string, imports = MigrationWriter.serialize(models.SET(42)) self.assertEqual(string, 'models.SET(42)') self.serialize_round_trip(models.SET(42)) def test_serialize_datetime(self): self.assertSerializedEqual(datetime.datetime.utcnow()) self.assertSerializedEqual(datetime.datetime.utcnow) self.assertSerializedEqual(datetime.datetime.today()) self.assertSerializedEqual(datetime.datetime.today) self.assertSerializedEqual(datetime.date.today()) self.assertSerializedEqual(datetime.date.today) self.assertSerializedEqual(datetime.datetime.now().time()) self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone())) self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=get_fixed_timezone(180))) self.assertSerializedResultEqual( datetime.datetime(2014, 1, 1, 1, 1), ("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), ( "datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)", {'import datetime', 'from django.utils.timezone import utc'}, ) ) def test_serialize_fields(self): self.assertSerializedFieldEqual(models.CharField(max_length=255)) self.assertSerializedResultEqual( models.CharField(max_length=255), ("models.CharField(max_length=255)", {"from django.db import models"}) ) self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) self.assertSerializedResultEqual( models.TextField(null=True, blank=True), ("models.TextField(blank=True, null=True)", {'from django.db import models'}) ) def test_serialize_settings(self): self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")) self.assertSerializedResultEqual( SettingsReference("someapp.model", "AUTH_USER_MODEL"), ("settings.AUTH_USER_MODEL", {"from django.conf import settings"}) ) def test_serialize_iterators(self): self.assertSerializedResultEqual( ((x, x * x) for x in range(3)), ("((0, 0), (1, 1), (2, 4))", set()) ) def test_serialize_compiled_regex(self): """ Make sure compiled regex can be serialized. """ regex = re.compile(r'^\w+$') self.assertSerializedEqual(regex) def test_serialize_class_based_validators(self): """ Ticket #22943: Test serialization of class-based validators, including compiled regexes. """ validator = RegexValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')") self.serialize_round_trip(validator) # Test with a compiled regex. validator = RegexValidator(regex=re.compile(r'^\w+$')) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$'))") self.serialize_round_trip(validator) # Test a string regex with flag validator = RegexValidator(r'^[0-9]+$', flags=re.S) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=re.RegexFlag(16))") self.serialize_round_trip(validator) # Test message and code validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid') string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')") self.serialize_round_trip(validator) # Test with a subclass. validator = EmailValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')") self.serialize_round_trip(validator) validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')") validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello") with self.assertRaisesMessage(ImportError, "No module named 'custom'"): MigrationWriter.serialize(validator) validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello") with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."): MigrationWriter.serialize(validator) def test_serialize_empty_nonempty_tuple(self): """ Ticket #22679: makemigrations generates invalid code for (an empty tuple) default_permissions = () """ empty_tuple = () one_item_tuple = ('a',) many_items_tuple = ('a', 'b', 'c') self.assertSerializedEqual(empty_tuple) self.assertSerializedEqual(one_item_tuple) self.assertSerializedEqual(many_items_tuple) def test_serialize_range(self): string, imports = MigrationWriter.serialize(range(1, 5)) self.assertEqual(string, 'range(1, 5)') self.assertEqual(imports, set()) def test_serialize_builtins(self): string, imports = MigrationWriter.serialize(range) self.assertEqual(string, 'range') self.assertEqual(imports, set()) def test_serialize_unbound_method_reference(self): """An unbound method used within a class body can be serialized.""" self.serialize_round_trip(TestModel1.thing) def test_serialize_local_function_reference(self): """A reference in a local scope can't be serialized.""" class TestModel2: def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with self.assertRaisesMessage(ValueError, 'Could not find function upload_to in migrations.test_writer'): self.serialize_round_trip(TestModel2.thing) def test_serialize_managers(self): self.assertSerializedEqual(models.Manager()) self.assertSerializedResultEqual( FoodQuerySet.as_manager(), ('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'}) ) self.assertSerializedEqual(FoodManager('a', 'b')) self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4)) def test_serialize_frozensets(self): self.assertSerializedEqual(frozenset()) self.assertSerializedEqual(frozenset("let it go")) def test_serialize_set(self): self.assertSerializedEqual(set()) self.assertSerializedResultEqual(set(), ('set()', set())) self.assertSerializedEqual({'a'}) self.assertSerializedResultEqual({'a'}, ("{'a'}", set())) def test_serialize_timedelta(self): self.assertSerializedEqual(datetime.timedelta()) self.assertSerializedEqual(datetime.timedelta(minutes=42)) def test_serialize_functools_partial(self): value = functools.partial(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_serialize_functools_partialmethod(self): value = functools.partialmethod(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertIsInstance(result, functools.partialmethod) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_serialize_type_none(self): self.assertSerializedEqual(type(None)) def test_simple_migration(self): """ Tests serializing a simple migration. """ fields = { 'charfield': models.DateTimeField(default=datetime.datetime.utcnow), 'datetimefield': models.DateTimeField(default=datetime.datetime.utcnow), } options = { 'verbose_name': 'My model', 'verbose_name_plural': 'My models', } migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)), migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)), migrations.CreateModel( name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,) ), migrations.DeleteModel("MyModel"), migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]), ], "dependencies": [("testapp", "some_other_one")], }) writer = MigrationWriter(migration) output = writer.as_string() # We don't test the output formatting - that's too fragile. # Just make sure it runs for now, and that things look alright. result = self.safe_exec(output) self.assertIn("Migration", result) def test_migration_path(self): test_apps = [ 'migrations.migrations_test_apps.normal', 'migrations.migrations_test_apps.with_package_model', 'migrations.migrations_test_apps.without_init_file', ] base_dir = os.path.dirname(os.path.dirname(__file__)) for app in test_apps: with self.modify_settings(INSTALLED_APPS={'append': app}): migration = migrations.Migration('0001_initial', app.split('.')[-1]) expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py'])) writer = MigrationWriter(migration) self.assertEqual(writer.path, expected_path) def test_custom_operation(self): migration = type("Migration", (migrations.Migration,), { "operations": [ custom_migration_operations.operations.TestOperation(), custom_migration_operations.operations.CreateModel(), migrations.CreateModel("MyModel", (), {}, (models.Model,)), custom_migration_operations.more_operations.TestOperation() ], "dependencies": [] }) writer = MigrationWriter(migration) output = writer.as_string() result = self.safe_exec(output) self.assertIn("custom_migration_operations", result) self.assertNotEqual( result['custom_migration_operations'].operations.TestOperation, result['custom_migration_operations'].more_operations.TestOperation ) def test_sorted_imports(self): """ #24155 - Tests ordering of imports. """ migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.AddField("mymodel", "myfield", models.DateTimeField( default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), )), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn( "import datetime\n" "from django.db import migrations, models\n" "from django.utils.timezone import utc\n", output ) def test_migration_file_header_comments(self): """ Test comments at top of file. """ migration = type("Migration", (migrations.Migration,), { "operations": [] }) dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=utc) with mock.patch('django.db.migrations.writer.now', lambda: dt): for include_header in (True, False): with self.subTest(include_header=include_header): writer = MigrationWriter(migration, include_header) output = writer.as_string() self.assertEqual( include_header, output.startswith( "# Generated by Django %s on 2015-07-31 04:40\n\n" % get_version() ) ) if not include_header: # Make sure the output starts with something that's not # a comment or indentation or blank line self.assertRegex(output.splitlines(keepends=True)[0], r"^[^#\s]+") def test_models_import_omitted(self): """ django.db.models shouldn't be imported if unused. """ migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.AlterModelOptions( name='model', options={'verbose_name': 'model', 'verbose_name_plural': 'models'}, ), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn("from django.db import migrations\n", output) def test_deconstruct_class_arguments(self): # Yes, it doesn't make sense to use a class as a default for a # CharField. It does make sense for custom fields though, for example # an enumfield that takes the enum class as an argument. class DeconstructibleInstances: def deconstruct(self): return ('DeconstructibleInstances', [], {}) string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0] self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)") def test_register_serializer(self): class ComplexSerializer(BaseSerializer): def serialize(self): return 'complex(%r)' % self.value, {} MigrationWriter.register_serializer(complex, ComplexSerializer) self.assertSerializedEqual(complex(1, 2)) MigrationWriter.unregister_serializer(complex) with self.assertRaisesMessage(ValueError, 'Cannot serialize: (1+2j)'): self.assertSerializedEqual(complex(1, 2)) def test_register_non_serializer(self): with self.assertRaisesMessage(ValueError, "'TestModel1' must inherit from 'BaseSerializer'."): MigrationWriter.register_serializer(complex, TestModel1)
e639bc05408086beec12efb7b11210659834d6f50a9933e9c8707357263cb77a
import compileall import os from django.db import connection, connections from django.db.migrations.exceptions import ( AmbiguityError, InconsistentMigrationHistory, NodeNotFoundError, ) from django.db.migrations.loader import MigrationLoader from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, modify_settings, override_settings from .test_base import MigrationTestBase class RecorderTests(TestCase): """ Tests recording migrations as applied or not. """ databases = {'default', 'other'} def test_apply(self): """ Tests marking migrations as applied/unapplied. """ recorder = MigrationRecorder(connection) self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, set(), ) recorder.record_applied("myapp", "0432_ponies") self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, {("myapp", "0432_ponies")}, ) # That should not affect records of another database recorder_other = MigrationRecorder(connections['other']) self.assertEqual( {(x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"}, set(), ) recorder.record_unapplied("myapp", "0432_ponies") self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, set(), ) class LoaderTests(TestCase): """ Tests the disk and database loader, and running through migrations in memory. """ @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) @modify_settings(INSTALLED_APPS={'append': 'basic'}) def test_load(self): """ Makes sure the loader can load the migrations for the test apps, and then render them out to a new Apps. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0002_second"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0002_second")) self.assertEqual(len(project_state.models), 2) author_state = project_state.models["migrations", "author"] self.assertEqual( [x for x, y in author_state.fields], ["id", "name", "slug", "age", "rating"] ) book_state = project_state.models["migrations", "book"] self.assertEqual( [x for x, y in book_state.fields], ["id", "author"] ) # Ensure we've included unmigrated apps in there too self.assertIn("basic", project_state.real_apps) @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations', 'migrations2': 'migrations2.test_migrations_2', }) @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) def test_plan_handles_repeated_migrations(self): """ _generate_plan() doesn't readd migrations already in the plan (#29180). """ migration_loader = MigrationLoader(connection) nodes = [('migrations', '0002_second'), ('migrations2', '0001_initial')] self.assertEqual( migration_loader.graph._generate_plan(nodes, at_end=True), [('migrations', '0001_initial'), ('migrations', '0002_second'), ('migrations2', '0001_initial')] ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"}) def test_load_unmigrated_dependency(self): """ Makes sure the loader can load migrations with a dependency on an unmigrated app. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0001_initial")), [ ('contenttypes', '0001_initial'), ('auth', '0001_initial'), ("migrations", "0001_initial"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0001_initial")) self.assertEqual(len([m for a, m in project_state.models if a == "migrations"]), 1) book_state = project_state.models["migrations", "book"] self.assertEqual( [x for x, y in book_state.fields], ["id", "user"] ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_run_before(self): """ Makes sure the loader uses Migration.run_before. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0003_third"), ("migrations", "0002_second"), ], ) @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations_first", "migrations2": "migrations2.test_migrations_2_first", }) @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) def test_first(self): """ Makes sure the '__first__' migrations build correctly. """ migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "second")), [ ("migrations", "thefirst"), ("migrations2", "0001_initial"), ("migrations2", "0002_second"), ("migrations", "second"), ], ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_name_match(self): "Tests prefix name matching" migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.get_migration_by_prefix("migrations", "0001").name, "0001_initial", ) with self.assertRaises(AmbiguityError): migration_loader.get_migration_by_prefix("migrations", "0") with self.assertRaises(KeyError): migration_loader.get_migration_by_prefix("migrations", "blarg") def test_load_import_error(self): with override_settings(MIGRATION_MODULES={"migrations": "import_error_package"}): with self.assertRaises(ImportError): MigrationLoader(connection) def test_load_module_file(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App with migrations module file not in unmigrated apps." ) def test_load_empty_dir(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App missing __init__.py in migrations module not in unmigrated apps." ) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], ) def test_marked_as_migrated(self): """ Undefined MIGRATION_MODULES implies default migration module. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, {'migrated_app'}) self.assertEqual(migration_loader.unmigrated_apps, set()) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={"migrated_app": None}, ) def test_marked_as_unmigrated(self): """ MIGRATION_MODULES allows disabling of migrations for a particular app. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={'migrated_app': 'missing-module'}, ) def test_explicit_missing_module(self): """ If a MIGRATION_MODULES override points to a missing module, the error raised during the importation attempt should be propagated unless `ignore_no_migrations=True`. """ with self.assertRaisesMessage(ImportError, 'missing-module'): migration_loader = MigrationLoader(connection) migration_loader = MigrationLoader(connection, ignore_no_migrations=True) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_loading_squashed(self): "Tests loading a squashed migration" migration_loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Loading with nothing applied should just give us the one node self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 1, ) # However, fake-apply one migration and it should now use the old two recorder.record_applied("migrations", "0001_initial") migration_loader.build_graph() self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 2, ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_loading_squashed_complex(self): "Tests loading a complex set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations.keys()) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too recorder.record_applied("migrations", "1_auto") loader.build_graph() self.assertEqual(num_nodes(), 4) recorder.record_applied("migrations", "2_auto") loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 to 5 cannot use the squashed migration recorder.record_applied("migrations", "3_auto") loader.build_graph() self.assertEqual(num_nodes(), 4) recorder.record_applied("migrations", "4_auto") loader.build_graph() self.assertEqual(num_nodes(), 3) # Starting at 5 to 7 we are passed the squashed migrations recorder.record_applied("migrations", "5_auto") loader.build_graph() self.assertEqual(num_nodes(), 2) recorder.record_applied("migrations", "6_auto") loader.build_graph() self.assertEqual(num_nodes(), 1) recorder.record_applied("migrations", "7_auto") loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps(self): loader = MigrationLoader(connection) loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps_partially_applied(self): loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) recorder.record_applied('app1', '1_auto') recorder.record_applied('app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_erroneous"}) def test_loading_squashed_erroneous(self): "Tests loading a complex but erroneous set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations.keys()) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too recorder.record_applied("migrations", "1_auto") loader.build_graph() self.assertEqual(num_nodes(), 4) recorder.record_applied("migrations", "2_auto") loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 or 4, nonexistent migrations would be needed. msg = ("Migration migrations.6_auto depends on nonexistent node ('migrations', '5_auto'). " "Django tried to replace migration migrations.5_auto with any of " "[migrations.3_squashed_5] but wasn't able to because some of the replaced " "migrations are already applied.") recorder.record_applied("migrations", "3_auto") with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() recorder.record_applied("migrations", "4_auto") with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() # Starting at 5 to 7 we are passed the squashed migrations recorder.record_applied("migrations", "5_auto") loader.build_graph() self.assertEqual(num_nodes(), 2) recorder.record_applied("migrations", "6_auto") loader.build_graph() self.assertEqual(num_nodes(), 1) recorder.record_applied("migrations", "7_auto") loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history(self): loader = MigrationLoader(connection=None) loader.check_consistent_history(connection) recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = ( "Migration migrations.0002_second is applied before its dependency " "migrations.0001_initial on database 'default'." ) with self.assertRaisesMessage(InconsistentMigrationHistory, msg): loader.check_consistent_history(connection) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed_extra'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history_squashed(self): """ MigrationLoader.check_consistent_history() should ignore unapplied squashed migrations that have all of their `replaces` applied. """ loader = MigrationLoader(connection=None) recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0001_initial') recorder.record_applied('migrations', '0002_second') loader.check_consistent_history(connection) recorder.record_applied('migrations', '0003_third') loader.check_consistent_history(connection) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_ref_squashed.app1", "app2": "migrations.test_migrations_squashed_ref_squashed.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_ref_squashed.app1", "migrations.test_migrations_squashed_ref_squashed.app2", ]}) def test_loading_squashed_ref_squashed(self): "Tests loading a squashed migration with a new migration referencing it" r""" The sample migrations are structured like this: app_1 1 --> 2 ---------------------*--> 3 *--> 4 \ / / *-------------------*----/--> 2_sq_3 --* \ / / =============== \ ============= / == / ====================== app_2 *--> 1_sq_2 --* / \ / *--> 1 --> 2 --* Where 2_sq_3 is a replacing migration for 2 and 3 in app_1, as 1_sq_2 is a replacing migration for 1 and 2 in app_2. """ loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Load with nothing applied: both migrations squashed. loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Fake-apply a few from app1: unsquashes migration in app1. recorder.record_applied('app1', '1_auto') recorder.record_applied('app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Fake-apply one from app2: unsquashes migration in app2 too. recorder.record_applied('app2', '1_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '2_auto'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_private'}) def test_ignore_files(self): """Files prefixed with underscore, tilde, or dot aren't loaded.""" loader = MigrationLoader(connection) loader.load_disk() migrations = [name for app, name in loader.disk_migrations if app == 'migrations'] self.assertEqual(migrations, ['0001_initial']) class PycLoaderTests(MigrationTestBase): def test_valid(self): """ To support frozen environments, MigrationLoader loads .pyc migrations. """ with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: # Compile .py files to .pyc files and delete .py files. compileall.compile_dir(migration_dir, force=True, quiet=1, legacy=True) for name in os.listdir(migration_dir): if name.endswith('.py'): os.remove(os.path.join(migration_dir, name)) loader = MigrationLoader(connection) self.assertIn(('migrations', '0001_initial'), loader.disk_migrations) def test_invalid(self): """ MigrationLoader reraises ImportErrors caused by "bad magic number" pyc files with a more helpful message. """ with self.temporary_migration_module(module='migrations.test_migrations_bad_pyc') as migration_dir: # The -tpl suffix is to avoid the pyc exclusion in MANIFEST.in. os.rename( os.path.join(migration_dir, '0001_initial.pyc-tpl'), os.path.join(migration_dir, '0001_initial.pyc'), ) msg = ( r"Couldn't import '\w+.migrations.0001_initial' as it appears " "to be a stale .pyc file." ) with self.assertRaisesRegex(ImportError, msg): MigrationLoader(connection)
7f4922672cd7e29115b8819db7ce5c7c805585456fe2dc0edec6c8cc35588b18
from django.db.migrations.exceptions import ( CircularDependencyError, NodeNotFoundError, ) from django.db.migrations.graph import DummyNode, MigrationGraph, Node from django.test import SimpleTestCase class GraphTests(SimpleTestCase): """ Tests the digraph structure. """ def test_simple_graph(self): """ Tests a basic dependency graph: app_a: 0001 <-- 0002 <--- 0003 <-- 0004 / app_b: 0001 <-- 0002 <-/ """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_a", "0004"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_a", "0003")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001")) # Test root migration case self.assertEqual( graph.forwards_plan(("app_a", "0001")), [('app_a', '0001')], ) # Test branch B only self.assertEqual( graph.forwards_plan(("app_b", "0002")), [("app_b", "0001"), ("app_b", "0002")], ) # Test whole graph self.assertEqual( graph.forwards_plan(("app_a", "0004")), [ ('app_b', '0001'), ('app_b', '0002'), ('app_a', '0001'), ('app_a', '0002'), ('app_a', '0003'), ('app_a', '0004'), ], ) # Test reverse to b:0002 self.assertEqual( graph.backwards_plan(("app_b", "0002")), [('app_a', '0004'), ('app_a', '0003'), ('app_b', '0002')], ) # Test roots and leaves self.assertEqual( graph.root_nodes(), [('app_a', '0001'), ('app_b', '0001')], ) self.assertEqual( graph.leaf_nodes(), [('app_a', '0004'), ('app_b', '0002')], ) def test_complex_graph(self): r""" Tests a complex dependency graph: app_a: 0001 <-- 0002 <--- 0003 <-- 0004 \ \ / / app_b: 0001 <-\ 0002 <-X / \ \ / app_c: \ 0001 <-- 0002 <- """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_a", "0004"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_node(("app_c", "0001"), None) graph.add_node(("app_c", "0002"), None) graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_a", "0003")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001")) graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_c", "0002")) graph.add_dependency("app_c.0002", ("app_c", "0002"), ("app_c", "0001")) graph.add_dependency("app_c.0001", ("app_c", "0001"), ("app_b", "0001")) graph.add_dependency("app_c.0002", ("app_c", "0002"), ("app_a", "0002")) # Test branch C only self.assertEqual( graph.forwards_plan(("app_c", "0002")), [('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'), ('app_a', '0002'), ('app_c', '0002')], ) # Test whole graph self.assertEqual( graph.forwards_plan(("app_a", "0004")), [ ('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'), ('app_a', '0002'), ('app_c', '0002'), ('app_b', '0002'), ('app_a', '0003'), ('app_a', '0004'), ], ) # Test reverse to b:0001 self.assertEqual( graph.backwards_plan(("app_b", "0001")), [ ('app_a', '0004'), ('app_c', '0002'), ('app_c', '0001'), ('app_a', '0003'), ('app_b', '0002'), ('app_b', '0001'), ], ) # Test roots and leaves self.assertEqual( graph.root_nodes(), [('app_a', '0001'), ('app_b', '0001'), ('app_c', '0001')], ) self.assertEqual( graph.leaf_nodes(), [('app_a', '0004'), ('app_b', '0002'), ('app_c', '0002')], ) def test_circular_graph(self): """ Tests a circular dependency graph. """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001")) graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_a", "0003")) # Test whole graph with self.assertRaises(CircularDependencyError): graph.ensure_not_cyclic() def test_circular_graph_2(self): graph = MigrationGraph() graph.add_node(('A', '0001'), None) graph.add_node(('C', '0001'), None) graph.add_node(('B', '0001'), None) graph.add_dependency('A.0001', ('A', '0001'), ('B', '0001')) graph.add_dependency('B.0001', ('B', '0001'), ('A', '0001')) graph.add_dependency('C.0001', ('C', '0001'), ('B', '0001')) with self.assertRaises(CircularDependencyError): graph.ensure_not_cyclic() def test_iterative_dfs(self): graph = MigrationGraph() root = ("app_a", "1") graph.add_node(root, None) expected = [root] for i in range(2, 750): parent = ("app_a", str(i - 1)) child = ("app_a", str(i)) graph.add_node(child, None) graph.add_dependency(str(i), child, parent) expected.append(child) leaf = expected[-1] forwards_plan = graph.forwards_plan(leaf) self.assertEqual(expected, forwards_plan) backwards_plan = graph.backwards_plan(root) self.assertEqual(expected[::-1], backwards_plan) def test_iterative_dfs_complexity(self): """ In a graph with merge migrations, iterative_dfs() traverses each node only once even if there are multiple paths leading to it. """ n = 50 graph = MigrationGraph() for i in range(1, n + 1): graph.add_node(('app_a', str(i)), None) graph.add_node(('app_b', str(i)), None) graph.add_node(('app_c', str(i)), None) for i in range(1, n): graph.add_dependency(None, ('app_b', str(i)), ('app_a', str(i))) graph.add_dependency(None, ('app_c', str(i)), ('app_a', str(i))) graph.add_dependency(None, ('app_a', str(i + 1)), ('app_b', str(i))) graph.add_dependency(None, ('app_a', str(i + 1)), ('app_c', str(i))) plan = graph.forwards_plan(('app_a', str(n))) expected = [ (app, str(i)) for i in range(1, n) for app in ['app_a', 'app_c', 'app_b'] ] + [('app_a', str(n))] self.assertEqual(plan, expected) def test_plan_invalid_node(self): """ Tests for forwards/backwards_plan of nonexistent node. """ graph = MigrationGraph() message = "Node ('app_b', '0001') not a valid node" with self.assertRaisesMessage(NodeNotFoundError, message): graph.forwards_plan(("app_b", "0001")) with self.assertRaisesMessage(NodeNotFoundError, message): graph.backwards_plan(("app_b", "0001")) def test_missing_parent_nodes(self): """ Tests for missing parent nodes. """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_b", "0001"), None) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) msg = "Migration app_a.0001 dependencies reference nonexistent parent node ('app_b', '0002')" with self.assertRaisesMessage(NodeNotFoundError, msg): graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002")) def test_missing_child_nodes(self): """ Tests for missing child nodes. """ # Build graph graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) msg = "Migration app_a.0002 dependencies reference nonexistent child node ('app_a', '0002')" with self.assertRaisesMessage(NodeNotFoundError, msg): graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) def test_validate_consistency_missing_parent(self): graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002"), skip_validation=True) msg = "Migration app_a.0001 dependencies reference nonexistent parent node ('app_b', '0002')" with self.assertRaisesMessage(NodeNotFoundError, msg): graph.validate_consistency() def test_validate_consistency_missing_child(self): graph = MigrationGraph() graph.add_node(("app_b", "0002"), None) graph.add_dependency("app_b.0002", ("app_a", "0001"), ("app_b", "0002"), skip_validation=True) msg = "Migration app_b.0002 dependencies reference nonexistent child node ('app_a', '0001')" with self.assertRaisesMessage(NodeNotFoundError, msg): graph.validate_consistency() def test_validate_consistency_no_error(self): graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002"), skip_validation=True) graph.validate_consistency() def test_validate_consistency_dummy(self): """ validate_consistency() raises an error if there's an isolated dummy node. """ msg = "app_a.0001 (req'd by app_b.0002) is missing!" graph = MigrationGraph() graph.add_dummy_node( key=("app_a", "0001"), origin="app_b.0002", error_message=msg ) with self.assertRaisesMessage(NodeNotFoundError, msg): graph.validate_consistency() def test_remove_replaced_nodes(self): """ Replaced nodes are properly removed and dependencies remapped. """ # Add some dummy nodes to be replaced. graph = MigrationGraph() graph.add_dummy_node(key=("app_a", "0001"), origin="app_a.0002", error_message="BAD!") graph.add_dummy_node(key=("app_a", "0002"), origin="app_b.0001", error_message="BAD!") graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"), skip_validation=True) # Add some normal parent and child nodes to test dependency remapping. graph.add_node(("app_c", "0001"), None) graph.add_node(("app_b", "0001"), None) graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_c", "0001"), skip_validation=True) graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_a", "0002"), skip_validation=True) # Try replacing before replacement node exists. msg = ( "Unable to find replacement node ('app_a', '0001_squashed_0002'). It was either" " never added to the migration graph, or has been removed." ) with self.assertRaisesMessage(NodeNotFoundError, msg): graph.remove_replaced_nodes( replacement=("app_a", "0001_squashed_0002"), replaced=[("app_a", "0001"), ("app_a", "0002")] ) graph.add_node(("app_a", "0001_squashed_0002"), None) # Ensure `validate_consistency()` still raises an error at this stage. with self.assertRaisesMessage(NodeNotFoundError, "BAD!"): graph.validate_consistency() # Remove the dummy nodes. graph.remove_replaced_nodes( replacement=("app_a", "0001_squashed_0002"), replaced=[("app_a", "0001"), ("app_a", "0002")] ) # Ensure graph is now consistent and dependencies have been remapped graph.validate_consistency() parent_node = graph.node_map[("app_c", "0001")] replacement_node = graph.node_map[("app_a", "0001_squashed_0002")] child_node = graph.node_map[("app_b", "0001")] self.assertIn(parent_node, replacement_node.parents) self.assertIn(replacement_node, parent_node.children) self.assertIn(child_node, replacement_node.children) self.assertIn(replacement_node, child_node.parents) def test_remove_replacement_node(self): """ A replacement node is properly removed and child dependencies remapped. We assume parent dependencies are already correct. """ # Add some dummy nodes to be replaced. graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) # Try removing replacement node before replacement node exists. msg = ( "Unable to remove replacement node ('app_a', '0001_squashed_0002'). It was" " either never added to the migration graph, or has been removed already." ) with self.assertRaisesMessage(NodeNotFoundError, msg): graph.remove_replacement_node( replacement=("app_a", "0001_squashed_0002"), replaced=[("app_a", "0001"), ("app_a", "0002")] ) graph.add_node(("app_a", "0001_squashed_0002"), None) # Add a child node to test dependency remapping. graph.add_node(("app_b", "0001"), None) graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_a", "0001_squashed_0002")) # Remove the replacement node. graph.remove_replacement_node( replacement=("app_a", "0001_squashed_0002"), replaced=[("app_a", "0001"), ("app_a", "0002")] ) # Ensure graph is consistent and child dependency has been remapped graph.validate_consistency() replaced_node = graph.node_map[("app_a", "0002")] child_node = graph.node_map[("app_b", "0001")] self.assertIn(child_node, replaced_node.children) self.assertIn(replaced_node, child_node.parents) # Ensure child dependency hasn't also gotten remapped to the other replaced node. other_replaced_node = graph.node_map[("app_a", "0001")] self.assertNotIn(child_node, other_replaced_node.children) self.assertNotIn(other_replaced_node, child_node.parents) def test_infinite_loop(self): """ Tests a complex dependency graph: app_a: 0001 <- \ app_b: 0001 <- x 0002 <- / \ app_c: 0001<- <------------- x 0002 And apply squashing on app_c. """ graph = MigrationGraph() graph.add_node(("app_a", "0001"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_node(("app_c", "0001_squashed_0002"), None) graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_c", "0001_squashed_0002")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_a", "0001")) graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001")) graph.add_dependency("app_c.0001_squashed_0002", ("app_c", "0001_squashed_0002"), ("app_b", "0002")) with self.assertRaises(CircularDependencyError): graph.ensure_not_cyclic() def test_stringify(self): graph = MigrationGraph() self.assertEqual(str(graph), "Graph: 0 nodes, 0 edges") graph.add_node(("app_a", "0001"), None) graph.add_node(("app_a", "0002"), None) graph.add_node(("app_a", "0003"), None) graph.add_node(("app_b", "0001"), None) graph.add_node(("app_b", "0002"), None) graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002")) graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002")) self.assertEqual(str(graph), "Graph: 5 nodes, 3 edges") self.assertEqual(repr(graph), "<MigrationGraph: nodes=5, edges=3>") class NodeTests(SimpleTestCase): def test_node_repr(self): node = Node(('app_a', '0001')) self.assertEqual(repr(node), "<Node: ('app_a', '0001')>") def test_dummynode_repr(self): node = DummyNode( key=('app_a', '0001'), origin='app_a.0001', error_message='x is missing', ) self.assertEqual(repr(node), "<DummyNode: ('app_a', '0001')>")
f3dc4c9c745695698d97798ae78a04c04ffa33a9f1c1f07c51ece0099b3e2b3e
from django.core.exceptions import FieldDoesNotExist from django.db import connection, migrations, models, transaction from django.db.migrations.migration import Migration from django.db.migrations.operations import CreateModel from django.db.migrations.operations.fields import FieldOperation from django.db.migrations.state import ModelState, ProjectState from django.db.models.fields import NOT_PROVIDED from django.db.transaction import atomic from django.db.utils import IntegrityError from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from .models import FoodManager, FoodQuerySet, UnicodeModel from .test_base import MigrationTestBase class Mixin: pass class OperationTestBase(MigrationTestBase): """ Common functions to help test operations. """ @classmethod def setUpClass(cls): super().setUpClass() cls._initial_table_names = frozenset(connection.introspection.table_names()) def tearDown(self): self.cleanup_test_tables() super().tearDown() def cleanup_test_tables(self): table_names = frozenset(connection.introspection.table_names()) - self._initial_table_names with connection.schema_editor() as editor: with connection.constraint_checks_disabled(): for table_name in table_names: editor.execute(editor.sql_delete_table % { 'table': editor.quote_name(table_name), }) def apply_operations(self, app_label, project_state, operations, atomic=True): migration = Migration('name', app_label) migration.operations = operations with connection.schema_editor(atomic=atomic) as editor: return migration.apply(project_state, editor) def unapply_operations(self, app_label, project_state, operations, atomic=True): migration = Migration('name', app_label) migration.operations = operations with connection.schema_editor(atomic=atomic) as editor: return migration.unapply(project_state, editor) def make_test_state(self, app_label, operation, **kwargs): """ Makes a test state using set_up_test_model and returns the original state and the state after the migration is applied. """ project_state = self.set_up_test_model(app_label, **kwargs) new_state = project_state.clone() operation.state_forwards(app_label, new_state) return project_state, new_state def set_up_test_model( self, app_label, second_model=False, third_model=False, index=False, multicol_index=False, related_model=False, mti_model=False, proxy_model=False, manager_model=False, unique_together=False, options=False, db_table=None, index_together=False, constraints=None): """ Creates a test model state and database table. """ # Make the "current" state model_options = { "swappable": "TEST_SWAP_MODEL", "index_together": [["weight", "pink"]] if index_together else [], "unique_together": [["pink", "weight"]] if unique_together else [], } if options: model_options["permissions"] = [("can_groom", "Can groom")] if db_table: model_options["db_table"] = db_table operations = [migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ("weight", models.FloatField()), ], options=model_options, )] if index: operations.append(migrations.AddIndex( "Pony", models.Index(fields=["pink"], name="pony_pink_idx") )) if multicol_index: operations.append(migrations.AddIndex( "Pony", models.Index(fields=["pink", "weight"], name="pony_test_idx") )) if constraints: for constraint in constraints: operations.append(migrations.AddConstraint( "Pony", constraint, )) if second_model: operations.append(migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ] )) if third_model: operations.append(migrations.CreateModel( "Van", [ ("id", models.AutoField(primary_key=True)), ] )) if related_model: operations.append(migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ("friend", models.ForeignKey("self", models.CASCADE)) ], )) if mti_model: operations.append(migrations.CreateModel( "ShetlandPony", fields=[ ('pony_ptr', models.OneToOneField( 'Pony', models.CASCADE, auto_created=True, parent_link=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], bases=['%s.Pony' % app_label], )) if proxy_model: operations.append(migrations.CreateModel( "ProxyPony", fields=[], options={"proxy": True}, bases=['%s.Pony' % app_label], )) if manager_model: operations.append(migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] )) return self.apply_operations(app_label, ProjectState(), operations) class OperationTests(OperationTestBase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - both forwards and backwards. """ def test_create_model(self): """ Tests the CreateModel operation. Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) self.assertEqual(operation.describe(), "Create model Pony") # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["fields", "name"]) # And default manager not in set operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())]) definition = operation.deconstruct() self.assertNotIn('managers', definition[2]) def test_create_model_with_duplicate_field_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'): migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.TextField()), ("pink", models.IntegerField(default=1)), ], ) def test_create_model_with_duplicate_base(self): message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.Pony",), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.pony",), ) message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, UnicodeModel,), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.unicodemodel',), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.UnicodeModel',), ) message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(models.Model, models.Model,), ) message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(Mixin, Mixin,), ) def test_create_model_with_duplicate_manager_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'): migrations.CreateModel( "Pony", fields=[], managers=[ ("objects", models.Manager()), ("objects", models.Manager()), ], ) def test_create_model_with_unique_after(self): """ Tests the CreateModel operation directly followed by an AlterUniqueTogether (bug #22844 - sqlite remake issues) """ operation1 = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) operation2 = migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("number", models.IntegerField(default=1)), ("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)), ], ) operation3 = migrations.AlterUniqueTogether( "Rider", [ ("number", "pony"), ], ) # Test the database alteration project_state = ProjectState() self.assertTableNotExists("test_crmoua_pony") self.assertTableNotExists("test_crmoua_rider") with connection.schema_editor() as editor: new_state = project_state.clone() operation1.state_forwards("test_crmoua", new_state) operation1.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation2.state_forwards("test_crmoua", new_state) operation2.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation3.state_forwards("test_crmoua", new_state) operation3.database_forwards("test_crmoua", editor, project_state, new_state) self.assertTableExists("test_crmoua_pony") self.assertTableExists("test_crmoua_rider") def test_create_model_m2m(self): """ Test the creation of a model with a ManyToMany field and the auto-created "through" model. """ project_state = self.set_up_test_model("test_crmomm") operation = migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("Pony", related_name="stables")) ] ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_crmomm", new_state) # Test the database alteration self.assertTableNotExists("test_crmomm_stable_ponies") with connection.schema_editor() as editor: operation.database_forwards("test_crmomm", editor, project_state, new_state) self.assertTableExists("test_crmomm_stable") self.assertTableExists("test_crmomm_stable_ponies") self.assertColumnNotExists("test_crmomm_stable", "ponies") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_crmomm", "Pony") Stable = new_state.apps.get_model("test_crmomm", "Stable") stable = Stable.objects.create() p1 = Pony.objects.create(pink=False, weight=4.55) p2 = Pony.objects.create(pink=True, weight=5.43) stable.ponies.add(p1, p2) self.assertEqual(stable.ponies.count(), 2) stable.ponies.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmomm", editor, new_state, project_state) self.assertTableNotExists("test_crmomm_stable") self.assertTableNotExists("test_crmomm_stable_ponies") def test_create_model_inheritance(self): """ Tests the CreateModel operation on a multi-table inheritance setup. """ project_state = self.set_up_test_model("test_crmoih") # Test the state alteration operation = migrations.CreateModel( "ShetlandPony", [ ('pony_ptr', models.OneToOneField( 'test_crmoih.Pony', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], ) new_state = project_state.clone() operation.state_forwards("test_crmoih", new_state) self.assertIn(("test_crmoih", "shetlandpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crmoih_shetlandpony") with connection.schema_editor() as editor: operation.database_forwards("test_crmoih", editor, project_state, new_state) self.assertTableExists("test_crmoih_shetlandpony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmoih", editor, new_state, project_state) self.assertTableNotExists("test_crmoih_shetlandpony") def test_create_proxy_model(self): """ CreateModel ignores proxy models. """ project_state = self.set_up_test_model("test_crprmo") # Test the state alteration operation = migrations.CreateModel( "ProxyPony", [], options={"proxy": True}, bases=("test_crprmo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model ProxyPony") new_state = project_state.clone() operation.state_forwards("test_crprmo", new_state) self.assertIn(("test_crprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crprmo", editor, project_state, new_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crprmo", editor, new_state, project_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"]) def test_create_unmanaged_model(self): """ CreateModel ignores unmanaged models. """ project_state = self.set_up_test_model("test_crummo") # Test the state alteration operation = migrations.CreateModel( "UnmanagedPony", [], options={"proxy": True}, bases=("test_crummo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony") new_state = project_state.clone() operation.state_forwards("test_crummo", new_state) self.assertIn(("test_crummo", "unmanagedpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crummo", editor, project_state, new_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crummo", editor, new_state, project_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") @skipUnlessDBFeature('supports_table_check_constraints') def test_create_model_with_constraint(self): where = models.Q(pink__gt=2) check_constraint = models.CheckConstraint(check=where, name='test_constraint_pony_pink_gt_2') operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ], options={'constraints': [check_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") with connection.cursor() as cursor: with self.assertRaises(IntegrityError): cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)") # Test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [check_constraint]) def test_create_model_with_partial_unique_constraint(self): partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [partial_unique_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists('test_crmo_pony') with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') # Test constraint works Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [partial_unique_constraint]) def test_create_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_cmoma") # Test the state alteration operation = migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Create model Food") new_state = project_state.clone() operation.state_forwards("test_cmoma", new_state) self.assertIn(("test_cmoma", "food"), new_state.models) managers = new_state.models["test_cmoma", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) def test_delete_model(self): """ Tests the DeleteModel operation. """ project_state = self.set_up_test_model("test_dlmo") # Test the state alteration operation = migrations.DeleteModel("Pony") self.assertEqual(operation.describe(), "Delete model Pony") new_state = project_state.clone() operation.state_forwards("test_dlmo", new_state) self.assertNotIn(("test_dlmo", "pony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dlmo", editor, project_state, new_state) self.assertTableNotExists("test_dlmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlmo", editor, new_state, project_state) self.assertTableExists("test_dlmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "DeleteModel") self.assertEqual(definition[1], []) self.assertEqual(list(definition[2]), ["name"]) def test_delete_proxy_model(self): """ Tests the DeleteModel operation ignores proxy models. """ project_state = self.set_up_test_model("test_dlprmo", proxy_model=True) # Test the state alteration operation = migrations.DeleteModel("ProxyPony") new_state = project_state.clone() operation.state_forwards("test_dlprmo", new_state) self.assertIn(("test_dlprmo", "proxypony"), project_state.models) self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") with connection.schema_editor() as editor: operation.database_forwards("test_dlprmo", editor, project_state, new_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlprmo", editor, new_state, project_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") def test_delete_mti_model(self): project_state = self.set_up_test_model('test_dlmtimo', mti_model=True) # Test the state alteration operation = migrations.DeleteModel('ShetlandPony') new_state = project_state.clone() operation.state_forwards('test_dlmtimo', new_state) self.assertIn(('test_dlmtimo', 'shetlandpony'), project_state.models) self.assertNotIn(('test_dlmtimo', 'shetlandpony'), new_state.models) # Test the database alteration self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') with connection.schema_editor() as editor: operation.database_forwards('test_dlmtimo', editor, project_state, new_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableNotExists('test_dlmtimo_shetlandpony') # And test reversal with connection.schema_editor() as editor: operation.database_backwards('test_dlmtimo', editor, new_state, project_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') def test_rename_model(self): """ Tests the RenameModel operation. """ project_state = self.set_up_test_model("test_rnmo", related_model=True) # Test the state alteration operation = migrations.RenameModel("Pony", "Horse") self.assertEqual(operation.describe(), "Rename model Pony to Horse") # Test initial state and database self.assertIn(("test_rnmo", "pony"), project_state.models) self.assertNotIn(("test_rnmo", "horse"), project_state.models) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate forwards new_state = project_state.clone() atomic_rename = connection.features.supports_atomic_references_rename new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename) # Test new state and database self.assertNotIn(("test_rnmo", "pony"), new_state.models) self.assertIn(("test_rnmo", "horse"), new_state.models) # RenameModel also repoints all incoming FKs and M2Ms self.assertEqual("test_rnmo.Horse", new_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model) self.assertTableNotExists("test_rnmo_pony") self.assertTableExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate backwards original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename) # Test original state and database self.assertIn(("test_rnmo", "pony"), original_state.models) self.assertNotIn(("test_rnmo", "horse"), original_state.models) self.assertEqual("Pony", original_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"}) def test_rename_model_state_forwards(self): """ RenameModel operations shouldn't trigger the caching of rendered apps on state without prior apps. """ state = ProjectState() state.add_model(ModelState('migrations', 'Foo', [])) operation = migrations.RenameModel('Foo', 'Bar') operation.state_forwards('migrations', state) self.assertNotIn('apps', state.__dict__) self.assertNotIn(('migrations', 'foo'), state.models) self.assertIn(('migrations', 'bar'), state.models) # Now with apps cached. apps = state.apps operation = migrations.RenameModel('Bar', 'Foo') operation.state_forwards('migrations', state) self.assertIs(state.apps, apps) self.assertNotIn(('migrations', 'bar'), state.models) self.assertIn(('migrations', 'foo'), state.models) def test_rename_model_with_self_referential_fk(self): """ Tests the RenameModel operation on model with self referential FK. """ project_state = self.set_up_test_model("test_rmwsrf", related_model=True) # Test the state alteration operation = migrations.RenameModel("Rider", "HorseRider") self.assertEqual(operation.describe(), "Rename model Rider to HorseRider") new_state = project_state.clone() operation.state_forwards("test_rmwsrf", new_state) self.assertNotIn(("test_rmwsrf", "rider"), new_state.models) self.assertIn(("test_rmwsrf", "horserider"), new_state.models) # Remember, RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( 'self', new_state.models["test_rmwsrf", "horserider"].fields[2][1].remote_field.model ) HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider') self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider) # Test the database alteration self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards("test_rmwsrf", editor, project_state, new_state) self.assertTableNotExists("test_rmwsrf_rider") self.assertTableExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id")) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards("test_rmwsrf", editor, new_state, project_state) self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) def test_rename_model_with_superclass_fk(self): """ Tests the RenameModel operation on a model which has a superclass that has a foreign key. """ project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True) # Test the state alteration operation = migrations.RenameModel("ShetlandPony", "LittleHorse") self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse") new_state = project_state.clone() operation.state_forwards("test_rmwsc", new_state) self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models) self.assertIn(("test_rmwsc", "littlehorse"), new_state.models) # RenameModel shouldn't repoint the superclass's relations, only local ones self.assertEqual( project_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model, new_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model ) # Before running the migration we have a table for Shetland Pony, not Little Horse self.assertTableExists("test_rmwsc_shetlandpony") self.assertTableNotExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # and the foreign key on rider points to pony, not shetland pony self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")) with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: operation.database_forwards("test_rmwsc", editor, project_state, new_state) # Now we have a little horse table, not shetland pony self.assertTableNotExists("test_rmwsc_shetlandpony") self.assertTableExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # but the Foreign keys still point at pony, not little horse self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")) def test_rename_model_with_self_referential_m2m(self): app_label = "test_rename_model_with_self_referential_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("ReflexivePony", fields=[ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("self")), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("ReflexivePony", "ReflexivePony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "ReflexivePony2") pony = Pony.objects.create() pony.ponies.add(pony) def test_rename_model_with_m2m(self): app_label = "test_rename_model_with_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Pony", "Pony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony2") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_target_model(self): app_label = "test_rename_m2m_target_model" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Rider", "Rider2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider2") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_through_model(self): app_label = "test_rename_through" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("PonyRider", fields=[ ("id", models.AutoField(primary_key=True)), ("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)), ("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)), ]), migrations.AddField( "Pony", "riders", models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"), ), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider") pony = Pony.objects.create() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("PonyRider", "PonyRider2"), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider2") pony = Pony.objects.first() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) self.assertEqual(Pony.objects.count(), 1) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(PonyRider.objects.count(), 2) self.assertEqual(pony.riders.count(), 2) def test_rename_m2m_model_after_rename_field(self): """RenameModel renames a many-to-many column after a RenameField.""" app_label = 'test_rename_multiple' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=20)), ]), migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('test_rename_multiple.Pony', models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('riders', models.ManyToManyField('Rider')), ]), migrations.RenameField(model_name='pony', old_name='name', new_name='fancy_name'), migrations.RenameModel(old_name='Rider', new_name='Jockey'), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, 'Pony') Jockey = project_state.apps.get_model(app_label, 'Jockey') PonyRider = project_state.apps.get_model(app_label, 'PonyRider') # No "no such column" error means the column was renamed correctly. pony = Pony.objects.create(fancy_name='a good name') jockey = Jockey.objects.create(pony=pony) ponyrider = PonyRider.objects.create() ponyrider.riders.add(jockey) def test_add_field(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) self.assertEqual(operation.describe(), "Add field height to Pony") project_state, new_state = self.make_test_state("test_adfl", operation) self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) field = [ f for n, f in new_state.models["test_adfl", "pony"].fields if n == "height" ][0] self.assertEqual(field.default, 5) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adfl", editor, project_state, new_state) self.assertColumnExists("test_adfl_pony", "height") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_add_charfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adchfl") Pony = project_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adchfl", project_state, [ migrations.AddField( "Pony", "text", models.CharField(max_length=10, default="some text"), ), migrations.AddField( "Pony", "empty", models.CharField(max_length=10, default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.CharField(max_length=10, default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.CharField(max_length=10, default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_textfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adtxtfl") Pony = project_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adtxtfl", project_state, [ migrations.AddField( "Pony", "text", models.TextField(default="some text"), ), migrations.AddField( "Pony", "empty", models.TextField(default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.TextField(default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.TextField(default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_binaryfield(self): """ Tests the AddField operation on TextField/BinaryField. """ project_state = self.set_up_test_model("test_adbinfl") Pony = project_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adbinfl", project_state, [ migrations.AddField( "Pony", "blob", models.BinaryField(default=b"some text"), ), migrations.AddField( "Pony", "empty", models.BinaryField(default=b""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.BinaryField(default=b"42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.BinaryField(default=b'"\'"'), ), ]) Pony = new_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.get(pk=pony.pk) # SQLite returns buffer/memoryview, cast to bytes for checking. self.assertEqual(bytes(pony.blob), b"some text") self.assertEqual(bytes(pony.empty), b"") self.assertEqual(bytes(pony.digits), b"42") self.assertEqual(bytes(pony.quotes), b'"\'"') def test_column_name_quoting(self): """ Column names that are SQL keywords shouldn't cause problems when used in migrations (#22168). """ project_state = self.set_up_test_model("test_regr22168") operation = migrations.AddField( "Pony", "order", models.IntegerField(default=0), ) new_state = project_state.clone() operation.state_forwards("test_regr22168", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_regr22168", editor, project_state, new_state) self.assertColumnExists("test_regr22168_pony", "order") def test_add_field_preserve_default(self): """ Tests the AddField operation's state alteration when preserve_default = False. """ project_state = self.set_up_test_model("test_adflpd") # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=4), preserve_default=False, ) new_state = project_state.clone() operation.state_forwards("test_adflpd", new_state) self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4) field = [ f for n, f in new_state.models["test_adflpd", "pony"].fields if n == "height" ][0] self.assertEqual(field.default, NOT_PROVIDED) # Test the database alteration project_state.apps.get_model("test_adflpd", "pony").objects.create( weight=4, ) self.assertColumnNotExists("test_adflpd_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adflpd", editor, project_state, new_state) self.assertColumnExists("test_adflpd_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]) def test_add_field_m2m(self): """ Tests the AddField operation with a ManyToManyField. """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) # Test the database alteration self.assertTableNotExists("test_adflmm_pony_stables") with connection.schema_editor() as editor: operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_adflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.stables.create() self.assertEqual(p.stables.count(), 1) p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) self.assertTableNotExists("test_adflmm_pony_stables") def test_alter_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertFalse(Pony._meta.get_field('stables').blank) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField( "Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True) ) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertTrue(Pony._meta.get_field('stables').blank) def test_repoint_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies")) ]) # Ensure the new field actually works Pony = project_state.apps.get_model("test_alflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.places.create() self.assertEqual(p.places.count(), 1) p.places.all().delete() def test_remove_field_m2m(self): project_state = self.set_up_test_model("test_rmflmm", second_model=True) project_state = self.apply_operations("test_rmflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) self.assertTableExists("test_rmflmm_pony_stables") with_field_state = project_state.clone() operations = [migrations.RemoveField("Pony", "stables")] project_state = self.apply_operations("test_rmflmm", project_state, operations=operations) self.assertTableNotExists("test_rmflmm_pony_stables") # And test reversal self.unapply_operations("test_rmflmm", with_field_state, operations=operations) self.assertTableExists("test_rmflmm_pony_stables") def test_remove_field_m2m_with_through(self): project_state = self.set_up_test_model("test_rmflmmwt", second_model=True) self.assertTableNotExists("test_rmflmmwt_ponystables") project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[ migrations.CreateModel("PonyStables", fields=[ ("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)), ("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)), ]), migrations.AddField( "Pony", "stables", models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables') ) ]) self.assertTableExists("test_rmflmmwt_ponystables") operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")] self.apply_operations("test_rmflmmwt", project_state, operations=operations) def test_remove_field(self): """ Tests the RemoveField operation. """ project_state = self.set_up_test_model("test_rmfl") # Test the state alteration operation = migrations.RemoveField("Pony", "pink") self.assertEqual(operation.describe(), "Remove field pink from Pony") new_state = project_state.clone() operation.state_forwards("test_rmfl", new_state) self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) # Test the database alteration self.assertColumnExists("test_rmfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_rmfl", editor, project_state, new_state) self.assertColumnNotExists("test_rmfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmfl", editor, new_state, project_state) self.assertColumnExists("test_rmfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'}) def test_remove_fk(self): """ Tests the RemoveField operation on a foreign key. """ project_state = self.set_up_test_model("test_rfk", related_model=True) self.assertColumnExists("test_rfk_rider", "pony_id") operation = migrations.RemoveField("Rider", "pony") new_state = project_state.clone() operation.state_forwards("test_rfk", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_rfk", editor, project_state, new_state) self.assertColumnNotExists("test_rfk_rider", "pony_id") with connection.schema_editor() as editor: operation.database_backwards("test_rfk", editor, new_state, project_state) self.assertColumnExists("test_rfk_rider", "pony_id") def test_alter_model_table(self): """ Tests the AlterModelTable operation. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") # Test the database alteration self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableNotExists("test_almota_pony") self.assertTableExists("test_almota_pony_2") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelTable") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"}) def test_alter_model_table_none(self): """ Tests the AlterModelTable operation if the table name is set to None. """ operation = migrations.AlterModelTable("Pony", None) self.assertEqual(operation.describe(), "Rename table for Pony to (default)") def test_alter_model_table_noop(self): """ Tests the AlterModelTable operation if the table name is not changed. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony") # Test the database alteration self.assertTableExists("test_almota_pony") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableExists("test_almota_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") def test_alter_model_table_m2m(self): """ AlterModelTable should rename auto-generated M2M tables. """ app_label = "test_talflmltlm2m" pony_db_table = 'pony_foo' project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table) # Add the M2M field first_state = project_state.clone() operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) operation.state_forwards(app_label, first_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, first_state) original_m2m_table = "%s_%s" % (pony_db_table, "stables") new_m2m_table = "%s_%s" % (app_label, "pony_stables") self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) # Rename the Pony db_table which should also rename the m2m table. second_state = first_state.clone() operation = migrations.AlterModelTable(name='pony', table=None) operation.state_forwards(app_label, second_state) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards(app_label, editor, first_state, second_state) self.assertTableExists(new_m2m_table) self.assertTableNotExists(original_m2m_table) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards(app_label, editor, second_state, first_state) self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) def test_alter_field(self): """ Tests the AlterField operation. """ project_state = self.set_up_test_model("test_alfl") # Test the state alteration operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) self.assertEqual(operation.describe(), "Alter field pink on Pony") new_state = project_state.clone() operation.state_forwards("test_alfl", new_state) self.assertIs(project_state.models["test_alfl", "pony"].get_field_by_name("pink").null, False) self.assertIs(new_state.models["test_alfl", "pony"].get_field_by_name("pink").null, True) # Test the database alteration self.assertColumnNotNull("test_alfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alfl", editor, project_state, new_state) self.assertColumnNull("test_alfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alfl", editor, new_state, project_state) self.assertColumnNotNull("test_alfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_alter_field_pk(self): """ Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness) """ project_state = self.set_up_test_model("test_alflpk") # Test the state alteration operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpk", new_state) self.assertIsInstance(project_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.AutoField) self.assertIsInstance(new_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.IntegerField) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpk", editor, project_state, new_state) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpk", editor, new_state, project_state) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_pk_fk(self): """ Tests the AlterField operation on primary keys changes any FKs pointing to it. """ project_state = self.set_up_test_model("test_alflpkfk", related_model=True) # Test the state alteration operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpkfk", new_state) self.assertIsInstance(project_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.AutoField) self.assertIsInstance(new_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.FloatField) def assertIdTypeEqualsFkType(): with connection.cursor() as cursor: id_type, id_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony") if c.name == "id" ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider") if c.name == "pony_id" ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_null, fk_null) assertIdTypeEqualsFkType() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpkfk", editor, project_state, new_state) assertIdTypeEqualsFkType() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpkfk", editor, new_state, project_state) assertIdTypeEqualsFkType() def test_alter_field_reloads_state_on_fk_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ]) def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)), migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)), ]) def test_rename_field_reloads_state_on_fk_target_changes(self): """ If RenameField doesn't reload state appropriately, the AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_rename_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameField('Rider', 'id', 'id2'), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ], atomic=connection.features.supports_atomic_references_rename) def test_rename_field(self): """ Tests the RenameField operation. """ project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True) # Test the state alteration operation = migrations.RenameField("Pony", "pink", "blue") self.assertEqual(operation.describe(), "Rename field pink on Pony to blue") new_state = project_state.clone() operation.state_forwards("test_rnfl", new_state) self.assertIn("blue", [n for n, f in new_state.models["test_rnfl", "pony"].fields]) self.assertNotIn("pink", [n for n, f in new_state.models["test_rnfl", "pony"].fields]) # Make sure the unique_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) # Make sure the index_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0]) # Test the database alteration self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") with connection.schema_editor() as editor: operation.database_forwards("test_rnfl", editor, project_state, new_state) self.assertColumnExists("test_rnfl_pony", "blue") self.assertColumnNotExists("test_rnfl_pony", "pink") # Ensure the unique constraint has been ported over with connection.cursor() as cursor: cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_rnfl_pony") # Ensure the index constraint has been ported over self.assertIndexExists("test_rnfl_pony", ["weight", "blue"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rnfl", editor, new_state, project_state) self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") # Ensure the index constraint has been reset self.assertIndexExists("test_rnfl_pony", ["weight", "pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"}) def test_rename_missing_field(self): state = ProjectState() state.add_model(ModelState('app', 'model', [])) with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"): migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state) def test_rename_referenced_field_state_forward(self): state = ProjectState() state.add_model(ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ])) state.add_model(ModelState('app', 'OtherModel', [ ('id', models.AutoField(primary_key=True)), ('fk', models.ForeignKey('Model', models.CASCADE, to_field='field')), ('fo', models.ForeignObject('Model', models.CASCADE, from_fields=('fk',), to_fields=('field',))), ])) operation = migrations.RenameField('Model', 'field', 'renamed') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].from_fields, ['self']) self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].from_fields, ('fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].to_fields, ('renamed',)) operation = migrations.RenameField('OtherModel', 'fk', 'renamed_fk') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].from_fields, ('self',)) self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].from_fields, ('renamed_fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].to_fields, ('renamed',)) def test_alter_unique_together(self): """ Tests the AlterUniqueTogether operation. """ project_state = self.set_up_test_model("test_alunto") # Test the state alteration operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))") new_state = project_state.clone() operation.state_forwards("test_alunto", new_state) self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # Make sure we can insert duplicate rows with connection.cursor() as cursor: cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alunto", editor, new_state, project_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test flat unique_together operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight")) operation.state_forwards("test_alunto", new_state) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterUniqueTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}}) def test_alter_unique_together_remove(self): operation = migrations.AlterUniqueTogether("Pony", None) self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))") def test_add_index(self): """ Test the AddIndex operation. """ project_state = self.set_up_test_model("test_adin") msg = ( "Indexes passed to AddIndex operations require a name argument. " "<Index: fields='pink'> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): migrations.AddIndex("Pony", models.Index(fields=["pink"])) index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx") operation = migrations.AddIndex("Pony", index) self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony") new_state = project_state.clone() operation.state_forwards("test_adin", new_state) # Test the database alteration self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1) self.assertIndexNotExists("test_adin_pony", ["pink"]) with connection.schema_editor() as editor: operation.database_forwards("test_adin", editor, project_state, new_state) self.assertIndexExists("test_adin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adin", editor, new_state, project_state) self.assertIndexNotExists("test_adin_pony", ["pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'index': index}) def test_remove_index(self): """ Test the RemoveIndex operation. """ project_state = self.set_up_test_model("test_rmin", multicol_index=True) self.assertTableExists("test_rmin_pony") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) operation = migrations.RemoveIndex("Pony", "pony_test_idx") self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony") new_state = project_state.clone() operation.state_forwards("test_rmin", new_state) # Test the state alteration self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_rmin", editor, project_state, new_state) self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmin", editor, new_state, project_state) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"}) # Also test a field dropped with index - sqlite remake issue operations = [ migrations.RemoveIndex("Pony", "pony_test_idx"), migrations.RemoveField("Pony", "pink"), ] self.assertColumnExists("test_rmin_pony", "pink") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test database alteration new_state = project_state.clone() self.apply_operations('test_rmin', new_state, operations=operations) self.assertColumnNotExists("test_rmin_pony", "pink") self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal self.unapply_operations("test_rmin", project_state, operations=operations) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) def test_add_index_state_forwards(self): project_state = self.set_up_test_model('test_adinsf') index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx') old_model = project_state.apps.get_model('test_adinsf', 'Pony') new_state = project_state.clone() operation = migrations.AddIndex('Pony', index) operation.state_forwards('test_adinsf', new_state) new_model = new_state.apps.get_model('test_adinsf', 'Pony') self.assertIsNot(old_model, new_model) def test_remove_index_state_forwards(self): project_state = self.set_up_test_model('test_rminsf') index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx') migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state) old_model = project_state.apps.get_model('test_rminsf', 'Pony') new_state = project_state.clone() operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx') operation.state_forwards('test_rminsf', new_state) new_model = new_state.apps.get_model('test_rminsf', 'Pony') self.assertIsNot(old_model, new_model) def test_alter_field_with_index(self): """ Test AlterField operation with an index to ensure indexes created via Meta.indexes don't get dropped with sqlite3 remake. """ project_state = self.set_up_test_model("test_alflin", index=True) operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards("test_alflin", new_state) # Test the database alteration self.assertColumnNotNull("test_alflin_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alflin", editor, project_state, new_state) # Index hasn't been dropped self.assertIndexExists("test_alflin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflin", editor, new_state, project_state) # Ensure the index is still there self.assertIndexExists("test_alflin_pony", ["pink"]) def test_alter_index_together(self): """ Tests the AlterIndexTogether operation. """ project_state = self.set_up_test_model("test_alinto") # Test the state alteration operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))") new_state = project_state.clone() operation.state_forwards("test_alinto", new_state) self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) # Make sure there's no matching index self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alinto", editor, project_state, new_state) self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alinto", editor, new_state, project_state) self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterIndexTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}}) def test_alter_index_together_remove(self): operation = migrations.AlterIndexTogether("Pony", None) self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))") @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint(self): project_state = self.set_up_test_model("test_addconstraint") gt_check = models.Q(pink__gt=2) gt_constraint = models.CheckConstraint(check=gt_check, name="test_add_constraint_pony_pink_gt_2") gt_operation = migrations.AddConstraint("Pony", gt_constraint) self.assertEqual( gt_operation.describe(), "Create constraint test_add_constraint_pony_pink_gt_2 on model Pony" ) # Test the state alteration new_state = project_state.clone() gt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 1) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Add another one. lt_check = models.Q(pink__lt=100) lt_constraint = models.CheckConstraint(check=lt_check, name="test_add_constraint_pony_pink_lt_100") lt_operation = migrations.AddConstraint("Pony", lt_constraint) lt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 2) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 2) with connection.schema_editor() as editor: lt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_addconstraint", editor, new_state, project_state) Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "AddConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'constraint': gt_constraint}) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_percent_escaping(self): app_label = 'add_constraint_string_quoting' operations = [ CreateModel( 'Author', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ('rebate', models.CharField(max_length=100)), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) # "%" generated in startswith lookup should be escaped in a way that is # considered a leading wildcard. check = models.Q(name__startswith='Albert') constraint = models.CheckConstraint(check=check, name='name_constraint') operation = migrations.AddConstraint('Author', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Artur') # Literal "%" should be escaped in a way that is not a considered a # wildcard. check = models.Q(rebate__endswith='%') constraint = models.CheckConstraint(check=check, name='rebate_constraint') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) Author = to_state.apps.get_model(app_label, 'Author') with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', rebate='10$') author = Author.objects.create(name='Albert', rebate='10%') self.assertEqual(Author.objects.get(), author) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_or_constraint(self): app_label = 'test_addorconstraint' constraint_name = 'add_constraint_or' from_state = self.set_up_test_model(app_label) check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0) constraint = models.CheckConstraint(check=check, name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Pony = to_state.apps.get_model(app_label, 'Pony') with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=2, weight=3.0) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=3, weight=1.0) Pony.objects.bulk_create([ Pony(pink=3, weight=-1.0), Pony(pink=1, weight=-1.0), Pony(pink=3, weight=3.0), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_remove_constraint(self): project_state = self.set_up_test_model("test_removeconstraint", constraints=[ models.CheckConstraint(check=models.Q(pink__gt=2), name="test_remove_constraint_pony_pink_gt_2"), models.CheckConstraint(check=models.Q(pink__lt=100), name="test_remove_constraint_pony_pink_lt_100"), ]) gt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_gt_2") self.assertEqual( gt_operation.describe(), "Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony" ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 1) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=1, weight=1.0).delete() with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Remove the other one. lt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_lt_100") lt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 0) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: lt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=100, weight=1.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_removeconstraint", editor, new_state, project_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "RemoveConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "test_remove_constraint_pony_pink_gt_2"}) def test_add_partial_unique_constraint(self): project_state = self.set_up_test_model('test_addpartialuniqueconstraint') partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.AddConstraint('Pony', partial_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq ' 'on model Pony' ) # Test the state alteration new_state = project_state.clone() operation.state_forwards('test_addpartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_addpartialuniqueconstraint', 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model('test_addpartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards('test_addpartialuniqueconstraint', editor, project_state, new_state) # Test constraint works Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_addpartialuniqueconstraint', editor, new_state, project_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'constraint': partial_unique_constraint}) def test_remove_partial_unique_constraint(self): project_state = self.set_up_test_model('test_removepartialuniqueconstraint', constraints=[ models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ), ]) gt_operation = migrations.RemoveConstraint('Pony', 'test_constraint_pony_pink_for_weight_gt_5_uniq') self.assertEqual( gt_operation.describe(), 'Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from model Pony' ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards('test_removepartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_removepartialuniqueconstraint', 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model('test_removepartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards('test_removepartialuniqueconstraint', editor, project_state, new_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) Pony.objects.create(pink=1, weight=7.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards('test_removepartialuniqueconstraint', editor, new_state, project_state) # Test constraint works if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'test_constraint_pony_pink_for_weight_gt_5_uniq', }) def test_alter_model_options(self): """ Tests the AlterModelOptions operation. """ project_state = self.set_up_test_model("test_almoop") # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}}) def test_alter_model_options_emptying(self): """ The AlterModelOptions operation removes keys from the dict (#23121) """ project_state = self.set_up_test_model("test_almoop", options=True) # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {}}) def test_alter_order_with_respect_to(self): """ Tests the AlterOrderWithRespectTo operation. """ project_state = self.set_up_test_model("test_alorwrtto", related_model=True) # Test the state alteration operation = migrations.AlterOrderWithRespectTo("Rider", "pony") self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony") new_state = project_state.clone() operation.state_forwards("test_alorwrtto", new_state) self.assertIsNone( project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None) ) self.assertEqual( new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None), "pony" ) # Make sure there's no matching index self.assertColumnNotExists("test_alorwrtto_rider", "_order") # Create some rows before alteration rendered_state = project_state.apps pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50) rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=1) rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=2) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alorwrtto", editor, project_state, new_state) self.assertColumnExists("test_alorwrtto_rider", "_order") # Check for correct value in rows updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all() self.assertEqual(updated_riders[0]._order, 0) self.assertEqual(updated_riders[1]._order, 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alorwrtto", editor, new_state, project_state) self.assertColumnNotExists("test_alorwrtto_rider", "_order") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterOrderWithRespectTo") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"}) def test_alter_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almoma") # Test the state alteration operation = migrations.AlterModelManagers( "Pony", managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Change managers on Pony") managers = project_state.models["test_almoma", "pony"].managers self.assertEqual(managers, []) new_state = project_state.clone() operation.state_forwards("test_almoma", new_state) self.assertIn(("test_almoma", "pony"), new_state.models) managers = new_state.models["test_almoma", "pony"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) rendered_state = new_state.apps model = rendered_state.get_model('test_almoma', 'pony') self.assertIsInstance(model.food_qs, models.Manager) self.assertIsInstance(model.food_mgr, FoodManager) self.assertIsInstance(model.food_mgr_kwargs, FoodManager) def test_alter_model_managers_emptying(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almomae", manager_model=True) # Test the state alteration operation = migrations.AlterModelManagers("Food", managers=[]) self.assertEqual(operation.describe(), "Change managers on Food") self.assertIn(("test_almomae", "food"), project_state.models) managers = project_state.models["test_almomae", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) new_state = project_state.clone() operation.state_forwards("test_almomae", new_state) managers = new_state.models["test_almomae", "food"].managers self.assertEqual(managers, []) def test_alter_fk(self): """ Creating and then altering an FK works correctly and deals with the pending SQL (#23091) """ project_state = self.set_up_test_model("test_alfk") # Test adding and then altering the FK in one go create_operation = migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ], ) create_state = project_state.clone() create_operation.state_forwards("test_alfk", create_state) alter_operation = migrations.AlterField( model_name='Rider', name='pony', field=models.ForeignKey("Pony", models.CASCADE, editable=False), ) alter_state = create_state.clone() alter_operation.state_forwards("test_alfk", alter_state) with connection.schema_editor() as editor: create_operation.database_forwards("test_alfk", editor, project_state, create_state) alter_operation.database_forwards("test_alfk", editor, create_state, alter_state) def test_alter_fk_non_fk(self): """ Altering an FK to a non-FK works (#23244) """ # Test the state alteration operation = migrations.AlterField( model_name="Rider", name="pony", field=models.FloatField(), ) project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True) # Test the database alteration self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") with connection.schema_editor() as editor: operation.database_forwards("test_afknfk", editor, project_state, new_state) self.assertColumnExists("test_afknfk_rider", "pony") self.assertColumnNotExists("test_afknfk_rider", "pony_id") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_afknfk", editor, new_state, project_state) self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( # Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively "CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n" "UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';" "UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';", # Run delete queries to test for parameter substitution failure # reported in #23426 "DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';" "DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';" "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) self.assertEqual(operation.describe(), "Raw SQL operation") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test SQL collection with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql)) operation.database_backwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql)) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 2) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'") self.assertEqual(cursor.fetchall()[0][0], 1) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'") self.assertEqual(cursor.fetchall()[0][0], 1) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunSQL") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_sql_params(self): """ #23426 - RunSQL should accept parameters. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( ["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"], ["DROP TABLE i_love_ponies"], ) param_operation = migrations.RunSQL( # forwards ( "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');", ["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']], ("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)), ), # backwards [ "DELETE FROM i_love_ponies WHERE special_thing = 'Django';", ["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None], ("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']), ] ) # Make sure there's no table self.assertTableNotExists("i_love_ponies") new_state = project_state.clone() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) # Test parameter passing with connection.schema_editor() as editor: param_operation.database_forwards("test_runsql", editor, project_state, new_state) # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 3) with connection.schema_editor() as editor: param_operation.database_backwards("test_runsql", editor, new_state, project_state) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") def test_run_sql_params_invalid(self): """ #23426 - RunSQL should fail when a list of statements with an incorrect number of tuples is given. """ project_state = self.set_up_test_model("test_runsql") new_state = project_state.clone() operation = migrations.RunSQL( # forwards [ ["INSERT INTO foo (bar) VALUES ('buz');"] ], # backwards ( ("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'), ), ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"): operation.database_forwards("test_runsql", editor, project_state, new_state) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"): operation.database_backwards("test_runsql", editor, new_state, project_state) def test_run_sql_noop(self): """ #24098 - Tests no-op RunSQL operations. """ operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, None, None) operation.database_backwards("test_runsql", editor, None, None) def test_run_python(self): """ Tests the RunPython operation """ project_state = self.set_up_test_model("test_runpython", mti_model=True) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) def inner_method_reverse(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.filter(pink=1, weight=3.55).delete() Pony.objects.filter(weight=5).delete() operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse) self.assertEqual(operation.describe(), "Raw Python operation") # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards("test_runpython", new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) # Now test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) # Now test we can't use a string with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'): migrations.RunPython("print 'ahahaha'") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code", "reverse_code"]) # Also test reversal fails, with an operation identical to above but without reverse_code set no_reverse_operation = migrations.RunPython(inner_method) self.assertFalse(no_reverse_operation.reversible) with connection.schema_editor() as editor: no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state) with self.assertRaises(NotImplementedError): no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) def create_ponies(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") pony1 = Pony.objects.create(pink=1, weight=3.55) self.assertIsNot(pony1.pk, None) pony2 = Pony.objects.create(weight=5) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_ponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code"]) def create_shetlandponies(models, schema_editor): ShetlandPony = models.get_model("test_runpython", "ShetlandPony") pony1 = ShetlandPony.objects.create(weight=4.0) self.assertIsNot(pony1.pk, None) pony2 = ShetlandPony.objects.create(weight=5.0) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_shetlandponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6) self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunPython(inner_method, elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_python_atomic(self): """ Tests the RunPython operation correctly handles the "atomic" keyword """ project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True) def inner_method(models, schema_editor): Pony = models.get_model("test_runpythonatomic", "Pony") Pony.objects.create(pink=1, weight=3.55) raise ValueError("Adrian hates ponies.") # Verify atomicity when applying. atomic_migration = Migration("test", "test_runpythonatomic") atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)] non_atomic_migration = Migration("test", "test_runpythonatomic") non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)] # If we're a fully-transactional database, both versions should rollback if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation should leave a row there else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Reset object count to zero and verify atomicity when unapplying. project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete() # On a fully-transactional database, both versions rollback. if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation leaves a row there. else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Verify deconstruction. definition = non_atomic_migration.operations[0].deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"]) def test_run_python_related_assignment(self): """ #24282 - Model changes to a FK reverse side update the model on the FK side as well. """ def inner_method(models, schema_editor): Author = models.get_model("test_authors", "Author") Book = models.get_model("test_books", "Book") author = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author) create_author = migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey("test_authors.Author", models.CASCADE)) ], options={}, ) add_hometown = migrations.AddField( "Author", "hometown", models.CharField(max_length=100), ) create_old_man = migrations.RunPython(inner_method, inner_method) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_authors", new_state) create_author.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_books", new_state) create_book.database_forwards("test_books", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: add_hometown.state_forwards("test_authors", new_state) add_hometown.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_old_man.state_forwards("test_books", new_state) create_old_man.database_forwards("test_books", editor, project_state, new_state) def test_model_with_bigautofield(self): """ A model with BigAutoField can be created. """ def create_data(models, schema_editor): Author = models.get_model("test_author", "Author") Book = models.get_model("test_book", "Book") author1 = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author1) Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1) author2 = Author.objects.create(id=2 ** 33, name="Remarque") Book.objects.create(title="All quiet on the western front", author=author2) Book.objects.create(title="Arc de Triomphe", author=author2) create_author = migrations.CreateModel( "Author", [ ("id", models.BigAutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.BigAutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE)) ], options={}, ) fill_data = migrations.RunPython(create_data) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_author", new_state) create_author.database_forwards("test_author", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_book", new_state) create_book.database_forwards("test_book", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_data.state_forwards("fill_data", new_state) fill_data.database_forwards("fill_data", editor, project_state, new_state) def test_autofield_foreignfield_growth(self): """ A field may be migrated from AutoField to BigAutoField. """ def create_initial_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog = Blog.objects.create(name="web development done right") Article.objects.create(name="Frameworks", blog=blog) Article.objects.create(name="Programming Languages", blog=blog) def create_big_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog2 = Blog.objects.create(name="Frameworks", id=2 ** 33) Article.objects.create(name="Django", blog=blog2) Article.objects.create(id=2 ** 33, name="Django2", blog=blog2) create_blog = migrations.CreateModel( "Blog", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_article = migrations.CreateModel( "Article", [ ("id", models.AutoField(primary_key=True)), ("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)), ("name", models.CharField(max_length=100)), ("data", models.TextField(default="")), ], options={}, ) fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data) fill_big_data = migrations.RunPython(create_big_data, create_big_data) grow_article_id = migrations.AlterField("Article", "id", models.BigAutoField(primary_key=True)) grow_blog_id = migrations.AlterField("Blog", "id", models.BigAutoField(primary_key=True)) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_blog.state_forwards("test_blog", new_state) create_blog.database_forwards("test_blog", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_article.state_forwards("test_article", new_state) create_article.database_forwards("test_article", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_initial_data.state_forwards("fill_initial_data", new_state) fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_article_id.state_forwards("test_article", new_state) grow_article_id.database_forwards("test_article", editor, project_state, new_state) state = new_state.clone() article = state.apps.get_model("test_article.Article") self.assertIsInstance(article._meta.pk, models.BigAutoField) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_blog_id.state_forwards("test_blog", new_state) grow_blog_id.database_forwards("test_blog", editor, project_state, new_state) state = new_state.clone() blog = state.apps.get_model("test_blog.Blog") self.assertIsInstance(blog._meta.pk, models.BigAutoField) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_big_data.state_forwards("fill_big_data", new_state) fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state) def test_run_python_noop(self): """ #24098 - Tests no-op RunPython operations. """ project_state = ProjectState() new_state = project_state.clone() operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) operation.database_backwards("test_runpython", editor, new_state, project_state) def test_separate_database_and_state(self): """ Tests the SeparateDatabaseAndState operation. """ project_state = self.set_up_test_model("test_separatedatabaseandstate") # Create the operation database_operation = migrations.RunSQL( "CREATE TABLE i_love_ponies (id int, special_thing int);", "DROP TABLE i_love_ponies;" ) state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))]) operation = migrations.SeparateDatabaseAndState( state_operations=[state_operation], database_operations=[database_operation] ) self.assertEqual(operation.describe(), "Custom state/database change combination") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_separatedatabaseandstate", new_state) self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "SeparateDatabaseAndState") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"]) def test_separate_database_and_state2(self): """ A complex SeparateDatabaseAndState operation: Multiple operations both for state and database. Verify the state dependencies within each list and that state ops don't affect the database. """ app_label = "test_separatedatabaseandstate2" project_state = self.set_up_test_model(app_label) # Create the operation database_operations = [ migrations.CreateModel( "ILovePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveponies"}, ), migrations.CreateModel( "ILoveMorePonies", # We use IntegerField and not AutoField because # the model is going to be deleted immediately # and with an AutoField this fails on Oracle [("id", models.IntegerField(primary_key=True))], options={"db_table": "ilovemoreponies"}, ), migrations.DeleteModel("ILoveMorePonies"), migrations.CreateModel( "ILoveEvenMorePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveevenmoreponies"}, ), ] state_operations = [ migrations.CreateModel( "SomethingElse", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingelse"}, ), migrations.DeleteModel("SomethingElse"), migrations.CreateModel( "SomethingCompletelyDifferent", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingcompletelydifferent"}, ), ] operation = migrations.SeparateDatabaseAndState( state_operations=state_operations, database_operations=database_operations, ) # Test the state alteration new_state = project_state.clone() operation.state_forwards(app_label, new_state) def assertModelsAndTables(after_db): # Tables and models exist, or don't, as they should: self.assertNotIn((app_label, "somethingelse"), new_state.models) self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1) self.assertNotIn((app_label, "iloveponiesonies"), new_state.models) self.assertNotIn((app_label, "ilovemoreponies"), new_state.models) self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models) self.assertTableNotExists("somethingelse") self.assertTableNotExists("somethingcompletelydifferent") self.assertTableNotExists("ilovemoreponies") if after_db: self.assertTableExists("iloveponies") self.assertTableExists("iloveevenmoreponies") else: self.assertTableNotExists("iloveponies") self.assertTableNotExists("iloveevenmoreponies") assertModelsAndTables(after_db=False) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertModelsAndTables(after_db=True) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertModelsAndTables(after_db=False) class SwappableOperationTests(OperationTestBase): """ Key operations ignore swappable models (we don't want to replicate all of them here, as the functionality is in a common base class anyway) """ available_apps = ['migrations'] @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_create_ignore_swapped(self): """ The CreateTable operation ignores swapped models. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], options={ "swappable": "TEST_SWAP_MODEL", }, ) # Test the state alteration (it should still be there!) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crigsw", new_state) self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crigsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crigsw", editor, project_state, new_state) self.assertTableNotExists("test_crigsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crigsw", editor, new_state, project_state) self.assertTableNotExists("test_crigsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_delete_ignore_swapped(self): """ Tests the DeleteModel operation ignores swapped models. """ operation = migrations.DeleteModel("Pony") project_state, new_state = self.make_test_state("test_dligsw", operation) # Test the database alteration self.assertTableNotExists("test_dligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dligsw", editor, project_state, new_state) self.assertTableNotExists("test_dligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dligsw", editor, new_state, project_state) self.assertTableNotExists("test_dligsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_add_field_ignore_swapped(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) project_state, new_state = self.make_test_state("test_adfligsw", operation) # Test the database alteration self.assertTableNotExists("test_adfligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_adfligsw", editor, project_state, new_state) self.assertTableNotExists("test_adfligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfligsw", editor, new_state, project_state) self.assertTableNotExists("test_adfligsw_pony") @override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel') def test_indexes_ignore_swapped(self): """ Add/RemoveIndex operations ignore swapped models. """ operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state('test_adinigsw', operation) with connection.schema_editor() as editor: # No database queries should be run for swapped models operation.database_forwards('test_adinigsw', editor, project_state, new_state) operation.database_backwards('test_adinigsw', editor, new_state, project_state) operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state("test_rminigsw", operation) with connection.schema_editor() as editor: operation.database_forwards('test_rminigsw', editor, project_state, new_state) operation.database_backwards('test_rminigsw', editor, new_state, project_state) class TestCreateModel(SimpleTestCase): def test_references_model_mixin(self): CreateModel('name', [], bases=(Mixin, models.Model)).references_model('other_model') class FieldOperationTests(SimpleTestCase): def test_references_model(self): operation = FieldOperation('MoDel', 'field', models.ForeignKey('Other', models.CASCADE)) # Model name match. self.assertIs(operation.references_model('mOdEl'), True) # Referenced field. self.assertIs(operation.references_model('oTher'), True) # Doesn't reference. self.assertIs(operation.references_model('Whatever'), False) def test_references_field_by_name(self): operation = FieldOperation('MoDel', 'field', models.BooleanField(default=False)) self.assertIs(operation.references_field('model', 'field'), True) def test_references_field_by_remote_field_model(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE)) self.assertIs(operation.references_field('Other', 'whatever'), True) self.assertIs(operation.references_field('Missing', 'whatever'), False) def test_references_field_by_from_fields(self): operation = FieldOperation( 'Model', 'field', models.fields.related.ForeignObject('Other', models.CASCADE, ['from'], ['to']) ) self.assertIs(operation.references_field('Model', 'from'), True) self.assertIs(operation.references_field('Model', 'to'), False) self.assertIs(operation.references_field('Other', 'from'), False) self.assertIs(operation.references_field('Model', 'to'), False) def test_references_field_by_to_fields(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE, to_field='field')) self.assertIs(operation.references_field('Other', 'field'), True) self.assertIs(operation.references_field('Other', 'whatever'), False) self.assertIs(operation.references_field('Missing', 'whatever'), False) def test_references_field_by_through(self): operation = FieldOperation('Model', 'field', models.ManyToManyField('Other', through='Through')) self.assertIs(operation.references_field('Other', 'whatever'), True) self.assertIs(operation.references_field('Through', 'whatever'), True) self.assertIs(operation.references_field('Missing', 'whatever'), False) def test_reference_field_by_through_fields(self): operation = FieldOperation( 'Model', 'field', models.ManyToManyField('Other', through='Through', through_fields=('first', 'second')) ) self.assertIs(operation.references_field('Other', 'whatever'), True) self.assertIs(operation.references_field('Through', 'whatever'), False) self.assertIs(operation.references_field('Through', 'first'), True) self.assertIs(operation.references_field('Through', 'second'), True)
8268618e73efc8b89165dd23ff28c9f4d15ad8eec5b84fb97bdc82ff21ba0a5a
import os import shutil import tempfile from contextlib import contextmanager from importlib import import_module from django.apps import apps from django.db import connections from django.db.migrations.recorder import MigrationRecorder from django.test import TransactionTestCase from django.test.utils import extend_sys_path from django.utils.module_loading import module_dir class MigrationTestBase(TransactionTestCase): """ Contains an extended set of asserts for testing migrations and schema operations. """ available_apps = ["migrations"] databases = {'default', 'other'} def tearDown(self): # Reset applied-migrations state. for db in connections: recorder = MigrationRecorder(connections[db]) recorder.migration_qs.filter(app='migrations').delete() def get_table_description(self, table, using='default'): with connections[using].cursor() as cursor: return connections[using].introspection.get_table_description(cursor, table) def assertTableExists(self, table, using='default'): with connections[using].cursor() as cursor: self.assertIn(table, connections[using].introspection.table_names(cursor)) def assertTableNotExists(self, table, using='default'): with connections[using].cursor() as cursor: self.assertNotIn(table, connections[using].introspection.table_names(cursor)) def assertColumnExists(self, table, column, using='default'): self.assertIn(column, [c.name for c in self.get_table_description(table, using=using)]) def assertColumnNotExists(self, table, column, using='default'): self.assertNotIn(column, [c.name for c in self.get_table_description(table, using=using)]) def _get_column_allows_null(self, table, column, using): return [c.null_ok for c in self.get_table_description(table, using=using) if c.name == column][0] def assertColumnNull(self, table, column, using='default'): self.assertEqual(self._get_column_allows_null(table, column, using), True) def assertColumnNotNull(self, table, column, using='default'): self.assertEqual(self._get_column_allows_null(table, column, using), False) def assertIndexExists(self, table, columns, value=True, using='default'): with connections[using].cursor() as cursor: self.assertEqual( value, any( c["index"] for c in connections[using].introspection.get_constraints(cursor, table).values() if c['columns'] == list(columns) ), ) def assertIndexNotExists(self, table, columns): return self.assertIndexExists(table, columns, False) def assertConstraintExists(self, table, name, value=True, using='default'): with connections[using].cursor() as cursor: constraints = connections[using].introspection.get_constraints(cursor, table).items() self.assertEqual( value, any(c['check'] for n, c in constraints if n == name), ) def assertConstraintNotExists(self, table, name): return self.assertConstraintExists(table, name, False) def assertFKExists(self, table, columns, to, value=True, using='default'): with connections[using].cursor() as cursor: self.assertEqual( value, any( c["foreign_key"] == to for c in connections[using].introspection.get_constraints(cursor, table).values() if c['columns'] == list(columns) ), ) def assertFKNotExists(self, table, columns, to): return self.assertFKExists(table, columns, to, False) @contextmanager def temporary_migration_module(self, app_label='migrations', module=None): """ Allows testing management commands in a temporary migrations module. Wrap all invocations to makemigrations and squashmigrations with this context manager in order to avoid creating migration files in your source tree inadvertently. Takes the application label that will be passed to makemigrations or squashmigrations and the Python path to a migrations module. The migrations module is used as a template for creating the temporary migrations module. If it isn't provided, the application's migrations module is used, if it exists. Returns the filesystem path to the temporary migrations module. """ with tempfile.TemporaryDirectory() as temp_dir: target_dir = tempfile.mkdtemp(dir=temp_dir) with open(os.path.join(target_dir, '__init__.py'), 'w'): pass target_migrations_dir = os.path.join(target_dir, 'migrations') if module is None: module = apps.get_app_config(app_label).name + '.migrations' try: source_migrations_dir = module_dir(import_module(module)) except (ImportError, ValueError): pass else: shutil.copytree(source_migrations_dir, target_migrations_dir) with extend_sys_path(temp_dir): new_module = os.path.basename(target_dir) + '.migrations' with self.settings(MIGRATION_MODULES={app_label: new_module}): yield target_migrations_dir
813b008db1d5b85c0479f24194dc6359ae49b7b1c4259b8fba71b247f465d31a
from django.db import connection, migrations, models from django.db.migrations.state import ProjectState from django.test import override_settings from .test_operations import OperationTestBase class AgnosticRouter: """ A router that doesn't have an opinion regarding migrating. """ def allow_migrate(self, db, app_label, **hints): return None class MigrateNothingRouter: """ A router that doesn't allow migrating. """ def allow_migrate(self, db, app_label, **hints): return False class MigrateEverythingRouter: """ A router that always allows migrating. """ def allow_migrate(self, db, app_label, **hints): return True class MigrateWhenFooRouter: """ A router that allows migrating depending on a hint. """ def allow_migrate(self, db, app_label, **hints): return hints.get('foo', False) class MultiDBOperationTests(OperationTestBase): databases = {'default', 'other'} def _test_create_model(self, app_label, should_run): """ CreateModel honors multi-db settings. """ operation = migrations.CreateModel( "Pony", [("id", models.AutoField(primary_key=True))], ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards(app_label, new_state) # Test the database alteration self.assertTableNotExists("%s_pony" % app_label) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) if should_run: self.assertTableExists("%s_pony" % app_label) else: self.assertTableNotExists("%s_pony" % app_label) # And test reversal with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertTableNotExists("%s_pony" % app_label) @override_settings(DATABASE_ROUTERS=[AgnosticRouter()]) def test_create_model(self): """ Test when router doesn't have an opinion (i.e. CreateModel should run). """ self._test_create_model("test_mltdb_crmo", should_run=True) @override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()]) def test_create_model2(self): """ Test when router returns False (i.e. CreateModel shouldn't run). """ self._test_create_model("test_mltdb_crmo2", should_run=False) @override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]) def test_create_model3(self): """ Test when router returns True (i.e. CreateModel should run). """ self._test_create_model("test_mltdb_crmo3", should_run=True) def test_create_model4(self): """ Test multiple routers. """ with override_settings(DATABASE_ROUTERS=[AgnosticRouter(), AgnosticRouter()]): self._test_create_model("test_mltdb_crmo4", should_run=True) with override_settings(DATABASE_ROUTERS=[MigrateNothingRouter(), MigrateEverythingRouter()]): self._test_create_model("test_mltdb_crmo4", should_run=False) with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter(), MigrateNothingRouter()]): self._test_create_model("test_mltdb_crmo4", should_run=True) def _test_run_sql(self, app_label, should_run, hints=None): with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]): project_state = self.set_up_test_model(app_label) sql = """ INSERT INTO {0}_pony (pink, weight) VALUES (1, 3.55); INSERT INTO {0}_pony (pink, weight) VALUES (3, 5.0); """.format(app_label) operation = migrations.RunSQL(sql, hints=hints or {}) # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model(app_label, "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony = project_state.apps.get_model(app_label, "Pony") if should_run: self.assertEqual(Pony.objects.count(), 2) else: self.assertEqual(Pony.objects.count(), 0) @override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()]) def test_run_sql_migrate_nothing_router(self): self._test_run_sql("test_mltdb_runsql", should_run=False) @override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()]) def test_run_sql_migrate_foo_router_without_hints(self): self._test_run_sql("test_mltdb_runsql2", should_run=False) @override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()]) def test_run_sql_migrate_foo_router_with_hints(self): self._test_run_sql('test_mltdb_runsql3', should_run=True, hints={'foo': True}) def _test_run_python(self, app_label, should_run, hints=None): with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]): project_state = self.set_up_test_model(app_label) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model(app_label, "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) operation = migrations.RunPython(inner_method, hints=hints or {}) # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model(app_label, "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony = project_state.apps.get_model(app_label, "Pony") if should_run: self.assertEqual(Pony.objects.count(), 2) else: self.assertEqual(Pony.objects.count(), 0) @override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()]) def test_run_python_migrate_nothing_router(self): self._test_run_python("test_mltdb_runpython", should_run=False) @override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()]) def test_run_python_migrate_foo_router_without_hints(self): self._test_run_python("test_mltdb_runpython2", should_run=False) @override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()]) def test_run_python_migrate_foo_router_with_hints(self): self._test_run_python('test_mltdb_runpython3', should_run=True, hints={'foo': True})
d6c9bd3fbcc6e4015a18d85e95d3248b1039a8ac96c5ae3d0c1ddf62940a9f04
import functools import re from unittest import mock from django.apps import apps from django.conf import settings from django.contrib.auth.models import AbstractBaseUser from django.core.validators import RegexValidator, validate_slug from django.db import connection, models from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.graph import MigrationGraph from django.db.migrations.loader import MigrationLoader from django.db.migrations.questioner import MigrationQuestioner from django.db.migrations.state import ModelState, ProjectState from django.test import TestCase, override_settings from django.test.utils import isolate_lru_cache from .models import FoodManager, FoodQuerySet class DeconstructibleObject: """ A custom deconstructible object. """ def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs def deconstruct(self): return ( self.__module__ + '.' + self.__class__.__name__, self.args, self.kwargs ) class AutodetectorTests(TestCase): """ Tests the migration autodetector. """ author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_name = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) author_name_null = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, null=True)), ]) author_name_longer = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400)), ]) author_name_renamed = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("names", models.CharField(max_length=200)), ]) author_name_default = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default='Ada Lovelace')), ]) author_name_check_constraint = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}, ) author_dates_of_birth_auto_now = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now=True)), ("date_time_of_birth", models.DateTimeField(auto_now=True)), ("time_of_birth", models.TimeField(auto_now=True)), ]) author_dates_of_birth_auto_now_add = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now_add=True)), ("date_time_of_birth", models.DateTimeField(auto_now_add=True)), ("time_of_birth", models.TimeField(auto_now_add=True)), ]) author_name_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_4 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_list_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 999])), ]) author_name_deconstructible_tuple_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 999))), ]) author_name_deconstructible_dict_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 999 })), ]) author_name_nested_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2-changed'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_extra_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), None, a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c-changed')), ))), ]) author_name_nested_deconstructible_extra_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), c=None, ))), ]) author_custom_pk = ModelState("testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]) author_with_biography_non_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField()), ("biography", models.TextField()), ]) author_with_biography_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(blank=True)), ("biography", models.TextField(blank=True)), ]) author_with_book = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_book_order_wrt = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ], options={"order_with_respect_to": "book"}) author_renamed_with_book = ModelState("testapp", "Writer", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_publisher_string = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher_name", models.CharField(max_length=200)), ]) author_with_publisher = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) author_with_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("auth.User", models.CASCADE)), ]) author_with_custom_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)), ]) author_proxy = ModelState("testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_options = ModelState("testapp", "AuthorProxy", [], { "proxy": True, "verbose_name": "Super Author", }, ("testapp.author",)) author_proxy_notproxy = ModelState("testapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_third = ModelState("thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_third_notproxy = ModelState("thirdapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_proxy = ModelState("testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy",)) author_unmanaged = ModelState("testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author",)) author_unmanaged_managed = ModelState("testapp", "AuthorUnmanaged", [], {}, ("testapp.author",)) author_unmanaged_default_pk = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_unmanaged_custom_pk = ModelState("testapp", "Author", [ ("pk_field", models.IntegerField(primary_key=True)), ]) author_with_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher")), ]) author_with_m2m_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", blank=True)), ]) author_with_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Contract")), ]) author_with_renamed_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Deal")), ]) author_with_former_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.CharField(max_length=100)), ]) author_with_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], { "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) author_with_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_with_new_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_two"}) author_renamed_with_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_renamed_with_new_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_three"}) contract = ModelState("testapp", "Contract", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) contract_renamed = ModelState("testapp", "Deal", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) publisher = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ]) publisher_with_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_aardvark_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_book = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Book", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) other_pony = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ]) other_pony_food = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ], managers=[ ('food_qs', FoodQuerySet.as_manager()), ('food_mgr', FoodManager('a', 'b')), ('food_mgr_kwargs', FoodManager('x', 'y', 3, 4)), ]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) book = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)), ]) book_migrations_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_no_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=200)), ]) book_with_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_field_and_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("writer", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author")), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors_through_attribution = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author", through="otherapp.Attribution")), ("title", models.CharField(max_length=200)), ]) book_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["author", "title"], name="book_title_author_idx")], }) book_unordered_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["title", "author"], name="book_author_title_idx")], }) book_foo_together = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("author", "title")}, "unique_together": {("author", "title")}, }) book_foo_together_2 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) book_foo_together_3 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield")}, "unique_together": {("title", "newfield")}, }) book_foo_together_4 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield2", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield2")}, "unique_together": {("title", "newfield2")}, }) attribution = ModelState("otherapp", "Attribution", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) edition = ModelState("thirdapp", "Edition", [ ("id", models.AutoField(primary_key=True)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) custom_user = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ], bases=(AbstractBaseUser,)) custom_user_no_inherit = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ]) aardvark = ModelState("thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_testapp = ModelState("testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_based_on_author = ModelState("testapp", "Aardvark", [], bases=("testapp.Author",)) aardvark_pk_fk_author = ModelState("testapp", "Aardvark", [ ("id", models.OneToOneField("testapp.Author", models.CASCADE, primary_key=True)), ]) knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))]) rabbit = ModelState("eggs", "Rabbit", [ ("id", models.AutoField(primary_key=True)), ("knight", models.ForeignKey("eggs.Knight", models.CASCADE)), ("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)), ], { "unique_together": {("parent", "knight")}, "indexes": [models.Index(fields=["parent", "knight"], name='rabbit_circular_fk_index')], }) def repr_changes(self, changes, include_dependencies=False): output = "" for app_label, migrations in sorted(changes.items()): output += " %s:\n" % app_label for migration in migrations: output += " %s\n" % migration.name for operation in migration.operations: output += " %s\n" % operation if include_dependencies: output += " Dependencies:\n" if migration.dependencies: for dep in migration.dependencies: output += " %s\n" % (dep,) else: output += " None\n" return output def assertNumberMigrations(self, changes, app_label, number): if len(changes.get(app_label, [])) != number: self.fail("Incorrect number of migrations (%s) for %s (expected %s)\n%s" % ( len(changes.get(app_label, [])), app_label, number, self.repr_changes(changes), )) def assertMigrationDependencies(self, changes, app_label, position, dependencies): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if set(migration.dependencies) != set(dependencies): self.fail("Migration dependencies mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, dependencies, self.repr_changes(changes, include_dependencies=True), )) def assertOperationTypes(self, changes, app_label, position, types): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] real_types = [operation.__class__.__name__ for operation in migration.operations] if types != real_types: self.fail("Operation type mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, types, self.repr_changes(changes), )) def assertOperationAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] for attr, value in attrs.items(): if getattr(operation, attr, None) != value: self.fail("Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(operation, attr, None), self.repr_changes(changes), )) def assertOperationFieldAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] if not hasattr(operation, 'field'): self.fail("No field attribute for %s.%s op #%s." % ( app_label, migration.name, operation_position, )) field = operation.field for attr, value in attrs.items(): if getattr(field, attr, None) != value: self.fail("Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(field, attr, None), self.repr_changes(changes), )) def make_project_state(self, model_states): "Shortcut to make ProjectStates from lists of predefined models" project_state = ProjectState() for model_state in model_states: project_state.add_model(model_state.clone()) return project_state def get_changes(self, before_states, after_states, questioner=None): return MigrationAutodetector( self.make_project_state(before_states), self.make_project_state(after_states), questioner, )._detect_changes() def test_arrange_for_graph(self): """Tests auto-naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("otherapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph changes = autodetector.arrange_for_graph(changes, graph) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_author") self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_pony_stable") self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_trim_apps(self): """ Trim does not remove dependencies but does remove unwanted apps. """ # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable, self.third_thing]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_initial": True})) changes = autodetector._detect_changes() # Run through arrange_for_graph graph = MigrationGraph() changes = autodetector.arrange_for_graph(changes, graph) changes["testapp"][0].dependencies.append(("otherapp", "0001_initial")) changes = autodetector._trim_to_apps(changes, {"testapp"}) # Make sure there's the right set of migrations self.assertEqual(changes["testapp"][0].name, "0001_initial") self.assertEqual(changes["otherapp"][0].name, "0001_initial") self.assertNotIn("thirdapp", changes) def test_custom_migration_name(self): """Tests custom naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph migration_name = 'custom_name' changes = autodetector.arrange_for_graph(changes, graph, migration_name) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name) self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name) self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_new_model(self): """Tests autodetection of new models.""" changes = self.get_changes([], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) def test_old_model(self): """Tests deletion of old models.""" changes = self.get_changes([self.author_empty], []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") def test_add_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_empty], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_not_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition') def test_add_date_fields_with_auto_now_add_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) self.assertEqual(mocked_ask_method.call_count, 3) def test_remove_field(self): """Tests autodetection of removed fields.""" changes = self.get_changes([self.author_name], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") def test_alter_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_name], [self.author_name_longer]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) def test_supports_functools_partial(self): def _content_file_name(instance, filename, key, **kwargs): return '{}/{}'.format(instance, filename) def content_file_name(key, **kwargs): return functools.partial(_content_file_name, key, **kwargs) # An unchanged partial reference. before = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] after = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] changes = self.get_changes(before, after) self.assertNumberMigrations(changes, 'testapp', 0) # A changed partial reference. args_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('other-file'))), ])] changes = self.get_changes(before, args_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) # Can't use assertOperationFieldAttributes because we need the # deconstructed version, i.e., the exploded func/args/keywords rather # than the partial: we don't care if it's not the same instance of the # partial, only if it's the same source function, args, and keywords. value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('other-file',), {}), (value.func, value.args, value.keywords) ) kwargs_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file', spam='eggs'))), ])] changes = self.get_changes(before, kwargs_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('file',), {'spam': 'eggs'}), (value.func, value.args, value.keywords) ) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', side_effect=AssertionError("Should not have prompted for not null addition")) def test_alter_field_to_not_null_with_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default='Ada Lovelace') @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value=models.NOT_PROVIDED, ) def test_alter_field_to_not_null_without_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default=models.NOT_PROVIDED) @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value='Some Name', ) def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=False) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default="Some Name") def test_rename_field(self): """Tests autodetection of renamed fields.""" changes = self.get_changes( [self.author_name], [self.author_name_renamed], MigrationQuestioner({"ask_rename": True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="name", new_name="names") def test_rename_field_foreign_key_to_field(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='field')), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='renamed_field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) # Right number/type of migrations? self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField']) self.assertOperationAttributes(changes, 'app', 0, 0, old_name='field', new_name='renamed_field') def test_rename_foreign_object_fields(self): fields = ('first', 'second') renamed_fields = ('first_renamed', 'second_renamed') before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=fields, )), ]), ] # Case 1: to_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ], options={'unique_together': {renamed_fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=renamed_fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField', 'AlterUniqueTogether']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='foo', old_name='second', new_name='second_renamed', ) # Case 2: from_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=renamed_fields, to_fields=fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='bar', old_name='second', new_name='second_renamed', ) def test_rename_field_preserved_db_column(self): """ RenameField is used if a field is renamed and db_column equal to the old field's column is added. """ before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField()), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(db_column='field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'AlterField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', old_name='field', new_name='renamed_field', ) self.assertOperationAttributes(changes, 'app', 0, 1, model_name='foo', name='renamed_field') self.assertEqual(changes['app'][0].operations[-1].field.deconstruct(), ( 'renamed_field', 'django.db.models.IntegerField', [], {'db_column': 'field'}, )) def test_rename_related_field_preserved_db_column(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('renamed_foo', models.ForeignKey('app.Foo', models.CASCADE, db_column='foo_id')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'AlterField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', old_name='foo', new_name='renamed_foo', ) self.assertOperationAttributes(changes, 'app', 0, 1, model_name='bar', name='renamed_foo') self.assertEqual(changes['app'][0].operations[-1].field.deconstruct(), ( 'renamed_foo', 'django.db.models.ForeignKey', [], {'to': 'app.Foo', 'on_delete': models.CASCADE, 'db_column': 'foo_id'}, )) def test_rename_model(self): """Tests autodetection of renamed models.""" changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_author_renamed], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Now that RenameModel handles related fields too, there should be # no AlterField for the related field. self.assertNumberMigrations(changes, 'otherapp', 0) def test_rename_m2m_through_model(self): """ Tests autodetection of renamed models that are used in M2M relations as through models. """ changes = self.get_changes( [self.author_with_m2m_through, self.publisher, self.contract], [self.author_with_renamed_m2m_through, self.publisher, self.contract_renamed], MigrationQuestioner({'ask_rename_model': True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='Contract', new_name='Deal') def test_rename_model_with_renamed_rel_field(self): """ Tests autodetection of renamed models while simultaneously renaming one of the fields that relate to the renamed model. """ changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_field_and_author_renamed], MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Right number/type of migrations for related field rename? # Alter is already taken care of. self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, old_name="author", new_name="writer") def test_rename_model_with_fks_in_different_position(self): """ #24537 - The order of fields in a model does not influence the RenameModel detection. """ before = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "EntityB", [ ("id", models.AutoField(primary_key=True)), ("some_label", models.CharField(max_length=255)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ]), ] after = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "RenamedEntityB", [ ("id", models.AutoField(primary_key=True)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ("some_label", models.CharField(max_length=255)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({"ask_rename_model": True})) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB") def test_rename_model_reverse_relation_dependencies(self): """ The migration to rename a model pointed to by a foreign key in another app must run after the other app's migration that adds the foreign key with model's original name. Therefore, the renaming migration has a dependency on that other migration. """ before = [ ModelState('testapp', 'EntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.EntityA', models.CASCADE)), ]), ] after = [ ModelState('testapp', 'RenamedEntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.RenamedEntityA', models.CASCADE)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename_model': True})) self.assertNumberMigrations(changes, 'testapp', 1) self.assertMigrationDependencies(changes, 'testapp', 0, [('otherapp', '__first__')]) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='EntityA', new_name='RenamedEntityA') def test_fk_dependency(self): """Having a ForeignKey automatically adds a dependency.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (author), # thirdapp (edition) depends on otherapp (book) changes = self.get_changes([], [self.author_name, self.book, self.edition]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("testapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="Edition") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("otherapp", "auto_1")]) def test_proxy_fk_dependency(self): """FK dependencies still work on proxy models.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (authorproxy) changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("thirdapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="AuthorProxy") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("testapp", "auto_1")]) def test_same_app_no_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_circular_fk_dependency(self): """ Having a circular ForeignKey dependency automatically resolves the situation into 2 migrations on one side and 1 on the other. """ changes = self.get_changes([], [self.author_with_book, self.book, self.publisher_with_book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 2) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'otherapp', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'otherapp', 0, []) self.assertMigrationDependencies(changes, 'otherapp', 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]) # both split migrations should be `initial` self.assertTrue(changes['otherapp'][0].initial) self.assertTrue(changes['otherapp'][1].initial) def test_same_app_circular_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher_with_author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self): """ #22275 - A migration with circular FK dependency does not try to create unique together constraint and indexes before creating all required fields first. """ changes = self.get_changes([], [self.knight, self.rabbit]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'eggs', 1) self.assertOperationTypes( changes, 'eggs', 0, ["CreateModel", "CreateModel", "AddIndex", "AlterUniqueTogether"] ) self.assertNotIn("unique_together", changes['eggs'][0].operations[0].options) self.assertNotIn("unique_together", changes['eggs'][0].operations[1].options) self.assertMigrationDependencies(changes, 'eggs', 0, []) def test_alter_db_table_add(self): """Tests detection for adding db_table in model's options.""" changes = self.get_changes([self.author_empty], [self.author_with_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_one") def test_alter_db_table_change(self): """Tests detection for changing db_table in model's options'.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_with_new_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_two") def test_alter_db_table_remove(self): """Tests detection for removing db_table in model's options.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table=None) def test_alter_db_table_no_changes(self): """ Alter_db_table doesn't generate a migration if no changes have been made. """ changes = self.get_changes([self.author_with_db_table_options], [self.author_with_db_table_options]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_keep_db_table_with_model_change(self): """ Tests when model changes but db_table stays as-is, autodetector must not create more than one operation. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") def test_alter_db_table_with_model_change(self): """ Tests when model and db_table changes, autodetector must create two operations. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_new_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel", "AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") self.assertOperationAttributes(changes, "testapp", 0, 1, name="newauthor", table="author_three") def test_identical_regex_doesnt_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[-a-zA-Z0-9_]+\\Z'), "Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens.", 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 0) def test_different_regex_does_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[a-z]+\\Z', 32), "Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens.", 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterField"]) def test_empty_foo_together(self): """ #23452 - Empty unique/index_together shouldn't generate a migration. """ # Explicitly testing for not specified, since this is the case after # a CreateModel operation w/o any definition on the original model model_state_not_specified = ModelState("a", "model", [("id", models.AutoField(primary_key=True))]) # Explicitly testing for None, since this was the issue in #23452 after # an AlterFooTogether operation with e.g. () as value model_state_none = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": None, "unique_together": None, }) # Explicitly testing for the empty set, since we now always have sets. # During removal (('col1', 'col2'),) --> () this becomes set([]) model_state_empty = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": set(), "unique_together": set(), }) def test(from_state, to_state, msg): changes = self.get_changes([from_state], [to_state]) if changes: ops = ', '.join(o.__class__.__name__ for o in changes['a'][0].operations) self.fail('Created operation(s) %s from %s' % (ops, msg)) tests = ( (model_state_not_specified, model_state_not_specified, '"not specified" to "not specified"'), (model_state_not_specified, model_state_none, '"not specified" to "None"'), (model_state_not_specified, model_state_empty, '"not specified" to "empty"'), (model_state_none, model_state_not_specified, '"None" to "not specified"'), (model_state_none, model_state_none, '"None" to "None"'), (model_state_none, model_state_empty, '"None" to "empty"'), (model_state_empty, model_state_not_specified, '"empty" to "not specified"'), (model_state_empty, model_state_none, '"empty" to "None"'), (model_state_empty, model_state_empty, '"empty" to "empty"'), ) for t in tests: test(*t) def test_create_model_with_indexes(self): """Test creation of new model with indexes already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'indexes': [models.Index(fields=['name'], name='create_model_with_indexes_idx')]}) changes = self.get_changes([], [author]) added_index = models.Index(fields=['name'], name='create_model_with_indexes_idx') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', index=added_index) def test_add_indexes(self): """Test change detection of new indexes.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_indexes]) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AddIndex']) added_index = models.Index(fields=['author', 'title'], name='book_title_author_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', index=added_index) def test_remove_indexes(self): """Test change detection of removed indexes.""" changes = self.get_changes([self.author_empty, self.book_indexes], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') def test_order_fields_indexes(self): """Test change detection of reordering of fields in indexes.""" changes = self.get_changes( [self.author_empty, self.book_indexes], [self.author_empty, self.book_unordered_indexes] ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') added_index = models.Index(fields=['title', 'author'], name='book_author_title_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='book', index=added_index) def test_create_model_with_check_constraint(self): """Test creation of new model with constraints already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}) changes = self.get_changes([], [author]) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddConstraint']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', constraint=added_constraint) def test_add_constraints(self): """Test change detection of new constraints.""" changes = self.get_changes([self.author_name], [self.author_name_check_constraint]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AddConstraint']) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', constraint=added_constraint) def test_remove_constraints(self): """Test change detection of removed constraints.""" changes = self.get_changes([self.author_name_check_constraint], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RemoveConstraint']) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', name='name_contains_bob') def test_add_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) def test_remove_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book_foo_together], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) def test_foo_together_remove_fk(self): """Tests unique_together and field removal detection & ordering""" changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_with_no_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, [ "AlterUniqueTogether", "AlterIndexTogether", "RemoveField" ]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="author") def test_foo_together_no_changes(self): """ index/unique_together doesn't generate a migration if no changes have been made. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together] ) # Right number of migrations? self.assertEqual(len(changes), 0) def test_foo_together_ordering(self): """ index/unique_together also triggers on ordering changes. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together_2] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("title", "author")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("title", "author")}) def test_add_field_and_foo_together(self): """ Added fields will be created before using them in index/unique_together. """ changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together_3]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AddField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={("title", "newfield")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield")}) def test_create_model_and_unique_together(self): author = ModelState("otherapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) book_with_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) changes = self.get_changes([self.book_with_no_author], [author, book_with_author]) # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 4) # Right actions order? self.assertOperationTypes( changes, 'otherapp', 0, ['CreateModel', 'AddField', 'AlterUniqueTogether', 'AlterIndexTogether'] ) def test_remove_field_and_foo_together(self): """ Removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether", "RemoveField"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="newfield") def test_rename_field_and_foo_together(self): """ Removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together_4], MigrationQuestioner({"ask_rename": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["RenameField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={ ("title", "newfield2") }) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield2")}) def test_proxy(self): """The autodetector correctly deals with proxy models.""" # First, we test adding a proxy model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"]) self.assertOperationAttributes( changes, "testapp", 0, 0, name="AuthorProxy", options={"proxy": True, "indexes": [], "constraints": []} ) # Now, we test turning a proxy model into a non-proxy model # It should delete the proxy then make the real one changes = self.get_changes( [self.author_empty, self.author_proxy], [self.author_empty, self.author_proxy_notproxy] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy") self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy", options={}) def test_proxy_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on proxy models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'pk_field') def test_proxy_to_mti_with_fk_to_proxy(self): # First, test the pk table and field name. changes = self.get_changes( [], [self.author_empty, self.author_proxy_third, self.book_proxy_fk], ) self.assertEqual( changes['otherapp'][0].operations[0].fields[2][1].remote_field.model._meta.db_table, 'testapp_author', ) self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Change AuthorProxy to use MTI. changes = self.get_changes( [self.author_empty, self.author_proxy_third, self.book_proxy_fk], [self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk], ) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AuthorProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on thirdapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('thirdapp', 'auto_1')]) # Now, test the pk table and field name. self.assertEqual( changes['otherapp'][0].operations[0].field.remote_field.model._meta.db_table, 'thirdapp_authorproxy', ) self.assertEqual(changes['otherapp'][0].operations[0].field.remote_field.field_name, 'author_ptr') def test_proxy_to_mti_with_fk_to_proxy_proxy(self): # First, test the pk table and field name. changes = self.get_changes( [], [self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], ) self.assertEqual( changes['otherapp'][0].operations[0].fields[1][1].remote_field.model._meta.db_table, 'testapp_author', ) self.assertEqual(changes['otherapp'][0].operations[0].fields[1][1].remote_field.field_name, 'id') # Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy, # a proxy of AuthorProxy. changes = self.get_changes( [self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], [self.author_empty, self.author_proxy_notproxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], ) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AAuthorProxyProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on testapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', 'auto_1')]) # Now, test the pk table and field name. self.assertEqual( changes['otherapp'][0].operations[0].field.remote_field.model._meta.db_table, 'testapp_authorproxy', ) self.assertEqual(changes['otherapp'][0].operations[0].field.remote_field.field_name, 'author_ptr') def test_unmanaged_create(self): """The autodetector correctly deals with managed models.""" # First, we test adding an unmanaged model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_unmanaged]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="AuthorUnmanaged", options={"managed": False}) def test_unmanaged_delete(self): changes = self.get_changes([self.author_empty, self.author_unmanaged], [self.author_empty]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel']) def test_unmanaged_to_managed(self): # Now, we test turning an unmanaged model into a managed model changes = self.get_changes( [self.author_empty, self.author_unmanaged], [self.author_empty, self.author_unmanaged_managed] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="authorunmanaged", options={}) def test_managed_to_unmanaged(self): # Now, we turn managed to unmanaged. changes = self.get_changes( [self.author_empty, self.author_unmanaged_managed], [self.author_empty, self.author_unmanaged] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}) def test_unmanaged_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on unmanaged models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'pk_field') @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable(self): with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([self.custom_user], [self.custom_user, self.author_with_custom_user]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("__setting__", "AUTH_USER_MODEL")]) def test_swappable_changed(self): with isolate_lru_cache(apps.get_swappable_settings_name): before = self.make_project_state([self.custom_user, self.author_with_user]) with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"): after = self.make_project_state([self.custom_user, self.author_with_custom_user]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name='user') fk_field = changes['testapp'][0].operations[0].field to_model = '%s.%s' % ( fk_field.remote_field.model._meta.app_label, fk_field.remote_field.model._meta.object_name, ) self.assertEqual(to_model, 'thirdapp.CustomUser') def test_add_field_with_default(self): """#22030 - Adding a field with a default should work.""" changes = self.get_changes([self.author_empty], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="name") def test_custom_deconstructible(self): """ Two instances which deconstruct to the same value aren't considered a change. """ changes = self.get_changes([self.author_name_deconstructible_1], [self.author_name_deconstructible_2]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_deconstruct_field_kwarg(self): """Field instances are handled correctly by nested deconstruction.""" changes = self.get_changes([self.author_name_deconstructible_3], [self.author_name_deconstructible_4]) self.assertEqual(changes, {}) def test_deconstructible_list(self): """Nested deconstruction descends into lists.""" # When lists contain items that deconstruct to identical values, those lists # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed lists should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_tuple(self): """Nested deconstruction descends into tuples.""" # When tuples contain items that deconstruct to identical values, those tuples # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed tuples should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_dict(self): """Nested deconstruction descends into dict values.""" # When dicts contain items whose values deconstruct to identical values, # those dicts should be considered equal for the purpose of detecting # state changes (even if the original values are unequal). changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed dicts should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_3] ) self.assertEqual(len(changes), 1) def test_nested_deconstructible_objects(self): """ Nested deconstruction is applied recursively to the args/kwargs of deconstructed objects. """ # If the items within a deconstructed object's args/kwargs have the same # deconstructed values - whether or not the items themselves are different # instances - then the object as a whole is regarded as unchanged. changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_2] ) self.assertEqual(changes, {}) # Differences that exist solely within the args list of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_arg] ) self.assertEqual(len(changes), 1) # Additional args should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_arg] ) self.assertEqual(len(changes), 1) # Differences that exist solely within the kwargs dict of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_kwarg] ) self.assertEqual(len(changes), 1) # Additional kwargs should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_kwarg] ) self.assertEqual(len(changes), 1) def test_deconstruct_type(self): """ #22951 -- Uninstantiated classes with deconstruct are correctly returned by deep_deconstruct during serialization. """ author = ModelState( "testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField( max_length=200, # IntegerField intentionally not instantiated. default=models.IntegerField, )) ], ) changes = self.get_changes([], [author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) def test_replace_string_with_foreignkey(self): """ #22300 - Adding an FK in the same "spot" as a deleted CharField should work. """ changes = self.get_changes([self.author_with_publisher_string], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publisher_name") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publisher") def test_foreign_key_removed_before_target_model(self): """ Removing an FK and the model it targets in the same change must remove the FK field before the model to maintain consistency. """ changes = self.get_changes( [self.author_with_publisher, self.publisher], [self.author_name] ) # removes both the model and FK # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Publisher") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_many_to_many(self, mocked_ask_method): """#22435 - Adding a ManyToManyField should not prompt for a default.""" changes = self.get_changes([self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_alter_many_to_many(self): changes = self.get_changes( [self.author_with_m2m, self.publisher], [self.author_with_m2m_blank, self.publisher] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_create_with_through_model(self): """ Adding a m2m with a through model and the models that use it should be ordered correctly. """ changes = self.get_changes([], [self.author_with_m2m_through, self.publisher, self.contract]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ 'CreateModel', 'CreateModel', 'CreateModel', 'AddField', ]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Contract') self.assertOperationAttributes(changes, 'testapp', 0, 3, model_name='author', name='publishers') def test_many_to_many_removed_before_through_model(self): """ Removing a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.book_with_no_author, self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') def test_many_to_many_removed_before_through_model_2(self): """ Removing a model that contains a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 2, name='Book') def test_m2m_w_through_multistep_remove(self): """ A model with a m2m field that specifies a "through" model cannot be removed in the same migration as that through model as the schema will pass through an inconsistent state. The autodetector should produce two migrations to avoid this issue. """ changes = self.get_changes([self.author_with_m2m_through, self.publisher, self.contract], [self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ "RemoveField", "RemoveField", "DeleteModel", "DeleteModel" ]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 1, name="publisher", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 3, name="Contract") def test_concrete_field_changed_to_many_to_many(self): """ #23938 - Changing a concrete field into a ManyToManyField first removes the concrete field and then adds the m2m field. """ changes = self.get_changes([self.author_with_former_m2m], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publishers", model_name='author') def test_many_to_many_changed_to_concrete_field(self): """ #23938 - Changing a ManyToManyField into a concrete field first removes the m2m field and then adds the concrete field. """ changes = self.get_changes([self.author_with_m2m, self.publisher], [self.author_with_former_m2m]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "AddField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Publisher') self.assertOperationFieldAttributes(changes, 'testapp', 0, 1, max_length=100) def test_non_circular_foreignkey_dependency_removal(self): """ If two models with a ForeignKey from one to the other are removed at the same time, the autodetector should remove them in the correct order. """ changes = self.get_changes([self.author_with_publisher, self.publisher_with_author], []) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='publisher') self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher") def test_alter_model_options(self): """Changing a model's options should make a change.""" changes = self.get_changes([self.author_empty], [self.author_with_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, options={ "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) # Changing them back to empty should also make a change changes = self.get_changes([self.author_with_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", options={}) def test_alter_model_options_proxy(self): """Changing a proxy model's options should also make a change.""" changes = self.get_changes( [self.author_proxy, self.author_empty], [self.author_proxy_options, self.author_empty] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorproxy", options={ "verbose_name": "Super Author" }) def test_set_alter_order_with_respect_to(self): """Setting order_with_respect_to adds a field.""" changes = self.get_changes([self.book, self.author_with_book], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to="book") def test_add_alter_order_with_respect_to(self): """ Setting order_with_respect_to when adding the FK too does things in the right order. """ changes = self.get_changes([self.author_name], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name="book") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author", order_with_respect_to="book") def test_remove_alter_order_with_respect_to(self): """ Removing order_with_respect_to when removing the FK too does things in the right order. """ changes = self.get_changes([self.book, self.author_with_book_order_wrt], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo", "RemoveField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to=None) self.assertOperationAttributes(changes, 'testapp', 0, 1, model_name="author", name="book") def test_add_model_order_with_respect_to(self): """ Setting order_with_respect_to when adding the whole model does things in the right order. """ changes = self.get_changes([], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes( changes, 'testapp', 0, 0, name="Author", options={'order_with_respect_to': 'book'} ) self.assertNotIn("_order", [name for name, field in changes['testapp'][0].operations[0].fields]) def test_alter_model_managers(self): """ Changing the model managers adds a new operation. """ changes = self.get_changes([self.other_pony], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["AlterModelManagers"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) self.assertEqual(changes['otherapp'][0].operations[0].managers[1][1].args, ('a', 'b', 1, 2)) self.assertEqual(changes['otherapp'][0].operations[0].managers[2][1].args, ('x', 'y', 3, 4)) def test_swappable_first_inheritance(self): """Swappable models get their CreateModel first.""" changes = self.get_changes([], [self.custom_user, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_default_related_name_option(self): model_state = ModelState('app', 'model', [ ('id', models.AutoField(primary_key=True)), ], options={'default_related_name': 'related_name'}) changes = self.get_changes([], [model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['CreateModel']) self.assertOperationAttributes( changes, 'app', 0, 0, name='model', options={'default_related_name': 'related_name'}, ) altered_model_state = ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ]) changes = self.get_changes([model_state], [altered_model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['AlterModelOptions']) self.assertOperationAttributes(changes, 'app', 0, 0, name='model', options={}) @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable_first_setting(self): """Swappable models get their CreateModel first.""" with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_bases_first(self): """Bases of other models come first.""" changes = self.get_changes([], [self.aardvark_based_on_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_multiple_bases(self): """#23956 - Inheriting models doesn't move *_ptr fields into AddField operations.""" A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))]) B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))]) C = ModelState("app", "C", [], bases=("app.A", "app.B")) D = ModelState("app", "D", [], bases=("app.A", "app.B")) E = ModelState("app", "E", [], bases=("app.A", "app.B")) changes = self.get_changes([], [A, B, C, D, E]) # Right number/type of migrations? self.assertNumberMigrations(changes, "app", 1) self.assertOperationTypes(changes, "app", 0, [ "CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel" ]) self.assertOperationAttributes(changes, "app", 0, 0, name="A") self.assertOperationAttributes(changes, "app", 0, 1, name="B") self.assertOperationAttributes(changes, "app", 0, 2, name="C") self.assertOperationAttributes(changes, "app", 0, 3, name="D") self.assertOperationAttributes(changes, "app", 0, 4, name="E") def test_proxy_bases_first(self): """Bases of proxies come first.""" changes = self.get_changes([], [self.author_empty, self.author_proxy, self.author_proxy_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="AuthorProxy") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="AAuthorProxyProxy") def test_pk_fk_included(self): """ A relation used as the primary key is kept as part of CreateModel. """ changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_first_dependency(self): """ A dependency to an app with no migrations uses __first__. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "__first__")]) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_last_dependency(self): """ A dependency to an app with existing migrations uses the last migration of that app. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "0002_second")]) def test_alter_fk_before_model_deletion(self): """ ForeignKeys are altered _before_ the model they used to refer to are deleted. """ changes = self.get_changes( [self.author_name, self.publisher_with_author], [self.aardvark_testapp, self.publisher_with_aardvark_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "AlterField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Aardvark") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="Author") def test_fk_dependency_other_app(self): """ #23100 - ForeignKeys correctly depend on other apps' models. """ changes = self.get_changes([self.author_name, self.book], [self.author_with_book, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="book") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "__first__")]) def test_circular_dependency_mixed_addcreate(self): """ #23315 - The dependency resolver knows to put all CreateModel before AddField and not become unsolvable. """ address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)), ]) person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ]) apackage = ModelState("b", "APackage", [ ("id", models.AutoField(primary_key=True)), ("person", models.ForeignKey("a.Person", models.CASCADE)), ]) country = ModelState("b", "DeliveryCountry", [ ("id", models.AutoField(primary_key=True)), ]) changes = self.get_changes([], [address, person, apackage, country]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel", "CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertOperationTypes(changes, 'b', 0, ["CreateModel", "CreateModel"]) @override_settings(AUTH_USER_MODEL="a.Tenant") def test_circular_dependency_swappable(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): tenant = ModelState("a", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("b.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) address = ModelState("b", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('a', 'auto_1'), ('b', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('__setting__', 'AUTH_USER_MODEL')]) @override_settings(AUTH_USER_MODEL="b.Tenant") def test_circular_dependency_swappable2(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models but with the swappable not being the first migrated model. """ with isolate_lru_cache(apps.get_swappable_settings_name): address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) tenant = ModelState("b", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("a.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('__setting__', 'AUTH_USER_MODEL'), ('a', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('a', 'auto_1')]) @override_settings(AUTH_USER_MODEL="a.Person") def test_circular_dependency_swappable_self(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ("parent1", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE, related_name='children')) ]) changes = self.get_changes([], [person]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'a', 0, []) @override_settings(AUTH_USER_MODEL='a.User') def test_swappable_circular_multi_mti(self): with isolate_lru_cache(apps.get_swappable_settings_name): parent = ModelState('a', 'Parent', [ ('user', models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)) ]) child = ModelState('a', 'Child', [], bases=('a.Parent',)) user = ModelState('a', 'User', [], bases=(AbstractBaseUser, 'a.Child')) changes = self.get_changes([], [parent, child, user]) self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ['CreateModel', 'CreateModel', 'CreateModel', 'AddField']) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and blank `CharField` or `TextField` without default should not prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_blank]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition') def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and non-blank `CharField` or `TextField` without default should prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_non_blank]) self.assertEqual(mocked_ask_method.call_count, 2) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) def test_mti_inheritance_model_removal(self): Animal = ModelState('app', 'Animal', [ ("id", models.AutoField(primary_key=True)), ]) Dog = ModelState('app', 'Dog', [], bases=('app.Animal',)) changes = self.get_changes([Animal, Dog], [Animal]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['DeleteModel']) self.assertOperationAttributes(changes, 'app', 0, 0, name='Dog')
9a6d62868e82e75c696a3f2a547b9fddb001cf71a13d39b8d6e69699e7b4bdb1
from django.apps.registry import Apps from django.contrib.contenttypes.fields import GenericForeignKey from django.db import models from django.db.migrations.exceptions import InvalidBasesError from django.db.migrations.operations import ( AddField, AlterField, DeleteModel, RemoveField, ) from django.db.migrations.state import ( ModelState, ProjectState, get_related_models_recursive, ) from django.test import SimpleTestCase, override_settings from django.test.utils import isolate_apps from .models import ( FoodManager, FoodQuerySet, ModelWithCustomBase, NoMigrationFoodManager, UnicodeModel, ) class StateTests(SimpleTestCase): """ Tests state construction, rendering and modification by operations. """ def test_create(self): """ Tests making a ProjectState from an Apps """ new_apps = Apps(["migrations"]) class Author(models.Model): name = models.CharField(max_length=255) bio = models.TextField() age = models.IntegerField(blank=True, null=True) class Meta: app_label = "migrations" apps = new_apps unique_together = ["name", "bio"] index_together = ["bio", "age"] class AuthorProxy(Author): class Meta: app_label = "migrations" apps = new_apps proxy = True ordering = ["name"] class SubAuthor(Author): width = models.FloatField(null=True) class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): title = models.CharField(max_length=1000) author = models.ForeignKey(Author, models.CASCADE) contributors = models.ManyToManyField(Author) class Meta: app_label = "migrations" apps = new_apps verbose_name = "tome" db_table = "test_tome" indexes = [models.Index(fields=['title'])] class Food(models.Model): food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() food_no_mgr = NoMigrationFoodManager('x', 'y') class Meta: app_label = "migrations" apps = new_apps class FoodNoManagers(models.Model): class Meta: app_label = "migrations" apps = new_apps class FoodNoDefaultManager(models.Model): food_no_mgr = NoMigrationFoodManager('x', 'y') food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() class Meta: app_label = "migrations" apps = new_apps mgr1 = FoodManager('a', 'b') mgr2 = FoodManager('x', 'y', c=3, d=4) class FoodOrderedManagers(models.Model): # The managers on this model should be ordered by their creation # counter and not by the order in model body food_no_mgr = NoMigrationFoodManager('x', 'y') food_mgr2 = mgr2 food_mgr1 = mgr1 class Meta: app_label = "migrations" apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] author_proxy_state = project_state.models['migrations', 'authorproxy'] sub_author_state = project_state.models['migrations', 'subauthor'] book_state = project_state.models['migrations', 'book'] food_state = project_state.models['migrations', 'food'] food_no_managers_state = project_state.models['migrations', 'foodnomanagers'] food_no_default_manager_state = project_state.models['migrations', 'foodnodefaultmanager'] food_order_manager_state = project_state.models['migrations', 'foodorderedmanagers'] book_index = models.Index(fields=['title']) book_index.set_name_with_model(Book) self.assertEqual(author_state.app_label, "migrations") self.assertEqual(author_state.name, "Author") self.assertEqual([x for x, y in author_state.fields], ["id", "name", "bio", "age"]) self.assertEqual(author_state.fields[1][1].max_length, 255) self.assertIs(author_state.fields[2][1].null, False) self.assertIs(author_state.fields[3][1].null, True) self.assertEqual( author_state.options, { "unique_together": {("name", "bio")}, "index_together": {("bio", "age")}, "indexes": [], "constraints": [], } ) self.assertEqual(author_state.bases, (models.Model,)) self.assertEqual(book_state.app_label, "migrations") self.assertEqual(book_state.name, "Book") self.assertEqual([x for x, y in book_state.fields], ["id", "title", "author", "contributors"]) self.assertEqual(book_state.fields[1][1].max_length, 1000) self.assertIs(book_state.fields[2][1].null, False) self.assertEqual(book_state.fields[3][1].__class__.__name__, "ManyToManyField") self.assertEqual( book_state.options, {"verbose_name": "tome", "db_table": "test_tome", "indexes": [book_index], "constraints": []}, ) self.assertEqual(book_state.bases, (models.Model,)) self.assertEqual(author_proxy_state.app_label, "migrations") self.assertEqual(author_proxy_state.name, "AuthorProxy") self.assertEqual(author_proxy_state.fields, []) self.assertEqual( author_proxy_state.options, {"proxy": True, "ordering": ["name"], "indexes": [], "constraints": []}, ) self.assertEqual(author_proxy_state.bases, ("migrations.author",)) self.assertEqual(sub_author_state.app_label, "migrations") self.assertEqual(sub_author_state.name, "SubAuthor") self.assertEqual(len(sub_author_state.fields), 2) self.assertEqual(sub_author_state.bases, ("migrations.author",)) # The default manager is used in migrations self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr']) self.assertTrue(all(isinstance(name, str) for name, mgr in food_state.managers)) self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2)) # No explicit managers defined. Migrations will fall back to the default self.assertEqual(food_no_managers_state.managers, []) # food_mgr is used in migration but isn't the default mgr, hence add the # default self.assertEqual([name for name, mgr in food_no_default_manager_state.managers], ['food_no_mgr', 'food_mgr']) self.assertTrue(all(isinstance(name, str) for name, mgr in food_no_default_manager_state.managers)) self.assertEqual(food_no_default_manager_state.managers[0][1].__class__, models.Manager) self.assertIsInstance(food_no_default_manager_state.managers[1][1], FoodManager) self.assertEqual([name for name, mgr in food_order_manager_state.managers], ['food_mgr1', 'food_mgr2']) self.assertTrue(all(isinstance(name, str) for name, mgr in food_order_manager_state.managers)) self.assertEqual([mgr.args for name, mgr in food_order_manager_state.managers], [('a', 'b', 1, 2), ('x', 'y', 3, 4)]) def test_custom_default_manager_added_to_the_model_state(self): """ When the default manager of the model is a custom manager, it needs to be added to the model state. """ new_apps = Apps(['migrations']) custom_manager = models.Manager() class Author(models.Model): objects = models.TextField() authors = custom_manager class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.managers, [('authors', custom_manager)]) def test_custom_default_manager_named_objects_with_false_migration_flag(self): """ When a manager is added with a name of 'objects' but it does not have `use_in_migrations = True`, no migration should be added to the model state (#26643). """ new_apps = Apps(['migrations']) class Author(models.Model): objects = models.Manager() class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.managers, []) def test_no_duplicate_managers(self): """ When a manager is added with `use_in_migrations = True` and a parent model had a manager with the same name and `use_in_migrations = True`, the parent's manager shouldn't appear in the model state (#26881). """ new_apps = Apps(['migrations']) class PersonManager(models.Manager): use_in_migrations = True class Person(models.Model): objects = PersonManager() class Meta: abstract = True class BossManager(PersonManager): use_in_migrations = True class Boss(Person): objects = BossManager() class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) boss_state = project_state.models['migrations', 'boss'] self.assertEqual(boss_state.managers, [('objects', Boss.objects)]) def test_custom_default_manager(self): new_apps = Apps(['migrations']) class Author(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps default_manager_name = 'manager2' project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.options['default_manager_name'], 'manager2') self.assertEqual(author_state.managers, [('manager2', Author.manager1)]) def test_custom_base_manager(self): new_apps = Apps(['migrations']) class Author(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps base_manager_name = 'manager2' class Author2(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps base_manager_name = 'manager1' project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.options['base_manager_name'], 'manager2') self.assertEqual(author_state.managers, [ ('manager1', Author.manager1), ('manager2', Author.manager2), ]) author2_state = project_state.models['migrations', 'author2'] self.assertEqual(author2_state.options['base_manager_name'], 'manager1') self.assertEqual(author2_state.managers, [ ('manager1', Author2.manager1), ]) def test_apps_bulk_update(self): """ StateApps.bulk_update() should update apps.ready to False and reset the value afterwards. """ project_state = ProjectState() apps = project_state.apps with apps.bulk_update(): self.assertFalse(apps.ready) self.assertTrue(apps.ready) with self.assertRaises(ValueError): with apps.bulk_update(): self.assertFalse(apps.ready) raise ValueError() self.assertTrue(apps.ready) def test_render(self): """ Tests rendering a ProjectState into an Apps. """ project_state = ProjectState() project_state.add_model(ModelState( app_label="migrations", name="Tag", fields=[ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ("hidden", models.BooleanField()), ], )) project_state.add_model(ModelState( app_label="migrations", name="SubTag", fields=[ ('tag_ptr', models.OneToOneField( 'migrations.Tag', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("awesome", models.BooleanField()), ], bases=("migrations.Tag",), )) base_mgr = models.Manager() mgr1 = FoodManager('a', 'b') mgr2 = FoodManager('x', 'y', c=3, d=4) project_state.add_model(ModelState( app_label="migrations", name="Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ # The ordering we really want is objects, mgr1, mgr2 ('default', base_mgr), ('food_mgr2', mgr2), ('food_mgr1', mgr1), ] )) new_apps = project_state.apps self.assertEqual(new_apps.get_model("migrations", "Tag")._meta.get_field("name").max_length, 100) self.assertIs(new_apps.get_model("migrations", "Tag")._meta.get_field("hidden").null, False) self.assertEqual(len(new_apps.get_model("migrations", "SubTag")._meta.local_fields), 2) Food = new_apps.get_model("migrations", "Food") self.assertEqual([mgr.name for mgr in Food._meta.managers], ['default', 'food_mgr1', 'food_mgr2']) self.assertTrue(all(isinstance(mgr.name, str) for mgr in Food._meta.managers)) self.assertEqual([mgr.__class__ for mgr in Food._meta.managers], [models.Manager, FoodManager, FoodManager]) def test_render_model_inheritance(self): class Book(models.Model): title = models.CharField(max_length=1000) class Meta: app_label = "migrations" apps = Apps() class Novel(Book): class Meta: app_label = "migrations" apps = Apps() # First, test rendering individually apps = Apps(["migrations"]) # We shouldn't be able to render yet ms = ModelState.from_model(Novel) with self.assertRaises(InvalidBasesError): ms.render(apps) # Once the parent model is in the app registry, it should be fine ModelState.from_model(Book).render(apps) ModelState.from_model(Novel).render(apps) def test_render_model_with_multiple_inheritance(self): class Foo(models.Model): class Meta: app_label = "migrations" apps = Apps() class Bar(models.Model): class Meta: app_label = "migrations" apps = Apps() class FooBar(Foo, Bar): class Meta: app_label = "migrations" apps = Apps() class AbstractSubFooBar(FooBar): class Meta: abstract = True apps = Apps() class SubFooBar(AbstractSubFooBar): class Meta: app_label = "migrations" apps = Apps() apps = Apps(["migrations"]) # We shouldn't be able to render yet ms = ModelState.from_model(FooBar) with self.assertRaises(InvalidBasesError): ms.render(apps) # Once the parent models are in the app registry, it should be fine ModelState.from_model(Foo).render(apps) self.assertSequenceEqual(ModelState.from_model(Foo).bases, [models.Model]) ModelState.from_model(Bar).render(apps) self.assertSequenceEqual(ModelState.from_model(Bar).bases, [models.Model]) ModelState.from_model(FooBar).render(apps) self.assertSequenceEqual(ModelState.from_model(FooBar).bases, ['migrations.foo', 'migrations.bar']) ModelState.from_model(SubFooBar).render(apps) self.assertSequenceEqual(ModelState.from_model(SubFooBar).bases, ['migrations.foobar']) def test_render_project_dependencies(self): """ The ProjectState render method correctly renders models to account for inter-model base dependencies. """ new_apps = Apps() class A(models.Model): class Meta: app_label = "migrations" apps = new_apps class B(A): class Meta: app_label = "migrations" apps = new_apps class C(B): class Meta: app_label = "migrations" apps = new_apps class D(A): class Meta: app_label = "migrations" apps = new_apps class E(B): class Meta: app_label = "migrations" apps = new_apps proxy = True class F(D): class Meta: app_label = "migrations" apps = new_apps proxy = True # Make a ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.add_model(ModelState.from_model(D)) project_state.add_model(ModelState.from_model(E)) project_state.add_model(ModelState.from_model(F)) final_apps = project_state.apps self.assertEqual(len(final_apps.get_models()), 6) # Now make an invalid ProjectState and make sure it fails project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.add_model(ModelState.from_model(F)) with self.assertRaises(InvalidBasesError): project_state.apps def test_render_unique_app_labels(self): """ The ProjectState render method doesn't raise an ImproperlyConfigured exception about unique labels if two dotted app names have the same last part. """ class A(models.Model): class Meta: app_label = "django.contrib.auth" class B(models.Model): class Meta: app_label = "vendor.auth" # Make a ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) self.assertEqual(len(project_state.apps.get_models()), 2) def test_add_relations(self): """ #24573 - Adding relations to existing models should reload the referenced models too. """ new_apps = Apps() class A(models.Model): class Meta: app_label = 'something' apps = new_apps class B(A): class Meta: app_label = 'something' apps = new_apps class C(models.Model): class Meta: app_label = 'something' apps = new_apps project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.apps # We need to work with rendered models old_state = project_state.clone() model_a_old = old_state.apps.get_model('something', 'A') model_b_old = old_state.apps.get_model('something', 'B') model_c_old = old_state.apps.get_model('something', 'C') # The relations between the old models are correct self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old) self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old) operation = AddField('c', 'to_a', models.OneToOneField( 'something.A', models.CASCADE, related_name='from_c', )) operation.state_forwards('something', project_state) model_a_new = project_state.apps.get_model('something', 'A') model_b_new = project_state.apps.get_model('something', 'B') model_c_new = project_state.apps.get_model('something', 'C') # All models have changed self.assertIsNot(model_a_old, model_a_new) self.assertIsNot(model_b_old, model_b_new) self.assertIsNot(model_c_old, model_c_new) # The relations between the old models still hold self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old) self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old) # The relations between the new models correct self.assertIs(model_a_new._meta.get_field('b').related_model, model_b_new) self.assertIs(model_b_new._meta.get_field('a_ptr').related_model, model_a_new) self.assertIs(model_a_new._meta.get_field('from_c').related_model, model_c_new) self.assertIs(model_c_new._meta.get_field('to_a').related_model, model_a_new) def test_remove_relations(self): """ #24225 - Relations between models are updated while remaining the relations and references for models of an old state. """ new_apps = Apps() class A(models.Model): class Meta: app_label = "something" apps = new_apps class B(models.Model): to_a = models.ForeignKey(A, models.CASCADE) class Meta: app_label = "something" apps = new_apps def get_model_a(state): return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0] project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1) old_state = project_state.clone() operation = RemoveField("b", "to_a") operation.state_forwards("something", project_state) # Model from old_state still has the relation model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) self.assertEqual(len(model_a_old._meta.related_objects), 1) self.assertEqual(len(model_a_new._meta.related_objects), 0) # Same test for deleted model project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) old_state = project_state.clone() operation = DeleteModel("b") operation.state_forwards("something", project_state) model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) self.assertEqual(len(model_a_old._meta.related_objects), 1) self.assertEqual(len(model_a_new._meta.related_objects), 0) def test_self_relation(self): """ #24513 - Modifying an object pointing to itself would cause it to be rendered twice and thus breaking its related M2M through objects. """ class A(models.Model): to_a = models.ManyToManyField('something.A', symmetrical=False) class Meta: app_label = "something" def get_model_a(state): return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0] project_state = ProjectState() project_state.add_model((ModelState.from_model(A))) self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1) old_state = project_state.clone() operation = AlterField( model_name="a", name="to_a", field=models.ManyToManyField("something.A", symmetrical=False, blank=True) ) # At this point the model would be rendered twice causing its related # M2M through objects to point to an old copy and thus breaking their # attribute lookup. operation.state_forwards("something", project_state) model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) # The old model's _meta is still consistent field_to_a_old = model_a_old._meta.get_field("to_a") self.assertEqual(field_to_a_old.m2m_field_name(), "from_a") self.assertEqual(field_to_a_old.m2m_reverse_field_name(), "to_a") self.assertIs(field_to_a_old.related_model, model_a_old) self.assertIs(field_to_a_old.remote_field.through._meta.get_field('to_a').related_model, model_a_old) self.assertIs(field_to_a_old.remote_field.through._meta.get_field('from_a').related_model, model_a_old) # The new model's _meta is still consistent field_to_a_new = model_a_new._meta.get_field("to_a") self.assertEqual(field_to_a_new.m2m_field_name(), "from_a") self.assertEqual(field_to_a_new.m2m_reverse_field_name(), "to_a") self.assertIs(field_to_a_new.related_model, model_a_new) self.assertIs(field_to_a_new.remote_field.through._meta.get_field('to_a').related_model, model_a_new) self.assertIs(field_to_a_new.remote_field.through._meta.get_field('from_a').related_model, model_a_new) def test_equality(self): """ == and != are implemented correctly. """ # Test two things that should be equal project_state = ProjectState() project_state.add_model(ModelState( "migrations", "Tag", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ("hidden", models.BooleanField()), ], {}, None, )) project_state.apps # Fill the apps cached property other_state = project_state.clone() self.assertEqual(project_state, project_state) self.assertEqual(project_state, other_state) self.assertIs(project_state != project_state, False) self.assertIs(project_state != other_state, False) self.assertNotEqual(project_state.apps, other_state.apps) # Make a very small change (max_len 99) and see if that affects it project_state = ProjectState() project_state.add_model(ModelState( "migrations", "Tag", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=99)), ("hidden", models.BooleanField()), ], {}, None, )) self.assertNotEqual(project_state, other_state) self.assertIs(project_state == other_state, False) def test_dangling_references_throw_error(self): new_apps = Apps() class Author(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Publisher(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) publisher = models.ForeignKey(Publisher, models.CASCADE) class Meta: app_label = "migrations" apps = new_apps class Magazine(models.Model): authors = models.ManyToManyField(Author) class Meta: app_label = "migrations" apps = new_apps # Make a valid ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Publisher)) project_state.add_model(ModelState.from_model(Book)) project_state.add_model(ModelState.from_model(Magazine)) self.assertEqual(len(project_state.apps.get_models()), 4) # now make an invalid one with a ForeignKey project_state = ProjectState() project_state.add_model(ModelState.from_model(Book)) msg = ( "The field migrations.Book.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Book.publisher was declared with a lazy reference " "to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps # And another with ManyToManyField. project_state = ProjectState() project_state.add_model(ModelState.from_model(Magazine)) msg = ( "The field migrations.Magazine.authors was declared with a lazy reference " "to 'migrations.author\', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Magazine_authors.author was declared with a lazy reference " "to \'migrations.author\', but app 'migrations' doesn't provide model 'author'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps # And now with multiple models and multiple fields. project_state.add_model(ModelState.from_model(Book)) msg = ( "The field migrations.Book.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Book.publisher was declared with a lazy reference " "to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'.\n" "The field migrations.Magazine.authors was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Magazine_authors.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps def test_real_apps(self): """ Including real apps can resolve dangling FK errors. This test relies on the fact that contenttypes is always loaded. """ new_apps = Apps() class TestModel(models.Model): ct = models.ForeignKey("contenttypes.ContentType", models.CASCADE) class Meta: app_label = "migrations" apps = new_apps # If we just stick it into an empty state it should fail project_state = ProjectState() project_state.add_model(ModelState.from_model(TestModel)) with self.assertRaises(ValueError): project_state.apps # If we include the real app it should succeed project_state = ProjectState(real_apps=["contenttypes"]) project_state.add_model(ModelState.from_model(TestModel)) rendered_state = project_state.apps self.assertEqual( len([x for x in rendered_state.get_models() if x._meta.app_label == "migrations"]), 1, ) def test_ignore_order_wrt(self): """ Makes sure ProjectState doesn't include OrderWrt fields when making from existing models. """ new_apps = Apps() class Author(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) class Meta: app_label = "migrations" apps = new_apps order_with_respect_to = "author" # Make a valid ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Book)) self.assertEqual( [name for name, field in project_state.models["migrations", "book"].fields], ["id", "author"], ) def test_manager_refer_correct_model_version(self): """ #24147 - Managers refer to the correct version of a historical model """ project_state = ProjectState() project_state.add_model(ModelState( app_label="migrations", name="Tag", fields=[ ("id", models.AutoField(primary_key=True)), ("hidden", models.BooleanField()), ], managers=[ ('food_mgr', FoodManager('a', 'b')), ('food_qs', FoodQuerySet.as_manager()), ] )) old_model = project_state.apps.get_model('migrations', 'tag') new_state = project_state.clone() operation = RemoveField("tag", "hidden") operation.state_forwards("migrations", new_state) new_model = new_state.apps.get_model('migrations', 'tag') self.assertIsNot(old_model, new_model) self.assertIs(old_model, old_model.food_mgr.model) self.assertIs(old_model, old_model.food_qs.model) self.assertIs(new_model, new_model.food_mgr.model) self.assertIs(new_model, new_model.food_qs.model) self.assertIsNot(old_model.food_mgr, new_model.food_mgr) self.assertIsNot(old_model.food_qs, new_model.food_qs) self.assertIsNot(old_model.food_mgr.model, new_model.food_mgr.model) self.assertIsNot(old_model.food_qs.model, new_model.food_qs.model) def test_choices_iterator(self): """ #24483 - ProjectState.from_apps should not destructively consume Field.choices iterators. """ new_apps = Apps(["migrations"]) choices = [('a', 'A'), ('b', 'B')] class Author(models.Model): name = models.CharField(max_length=255) choice = models.CharField(max_length=255, choices=iter(choices)) class Meta: app_label = "migrations" apps = new_apps ProjectState.from_apps(new_apps) choices_field = Author._meta.get_field('choice') self.assertEqual(list(choices_field.choices), choices) class ModelStateTests(SimpleTestCase): def test_custom_model_base(self): state = ModelState.from_model(ModelWithCustomBase) self.assertEqual(state.bases, (models.Model,)) def test_bound_field_sanity_check(self): field = models.CharField(max_length=1) field.model = models.Model with self.assertRaisesMessage(ValueError, 'ModelState.fields cannot be bound to a model - "field" is.'): ModelState('app', 'Model', [('field', field)]) def test_sanity_check_to(self): field = models.ForeignKey(UnicodeModel, models.CASCADE) with self.assertRaisesMessage( ValueError, 'ModelState.fields cannot refer to a model class - "field.to" does. ' 'Use a string reference instead.' ): ModelState('app', 'Model', [('field', field)]) def test_sanity_check_through(self): field = models.ManyToManyField('UnicodeModel') field.remote_field.through = UnicodeModel with self.assertRaisesMessage( ValueError, 'ModelState.fields cannot refer to a model class - "field.through" does. ' 'Use a string reference instead.' ): ModelState('app', 'Model', [('field', field)]) def test_sanity_index_name(self): field = models.IntegerField() options = {'indexes': [models.Index(fields=['field'])]} msg = "Indexes passed to ModelState require a name attribute. <Index: fields='field'> doesn't have one." with self.assertRaisesMessage(ValueError, msg): ModelState('app', 'Model', [('field', field)], options=options) def test_fields_immutability(self): """ Rendering a model state doesn't alter its internal fields. """ apps = Apps() field = models.CharField(max_length=1) state = ModelState('app', 'Model', [('name', field)]) Model = state.render(apps) self.assertNotEqual(Model._meta.get_field('name'), field) def test_repr(self): field = models.CharField(max_length=1) state = ModelState('app', 'Model', [('name', field)], bases=['app.A', 'app.B', 'app.C']) self.assertEqual(repr(state), "<ModelState: 'app.Model'>") project_state = ProjectState() project_state.add_model(state) with self.assertRaisesMessage(InvalidBasesError, "Cannot resolve bases for [<ModelState: 'app.Model'>]"): project_state.apps @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_create_swappable(self): """ Tests making a ProjectState from an Apps with a swappable model """ new_apps = Apps(['migrations']) class Author(models.Model): name = models.CharField(max_length=255) bio = models.TextField() age = models.IntegerField(blank=True, null=True) class Meta: app_label = 'migrations' apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' author_state = ModelState.from_model(Author) self.assertEqual(author_state.app_label, 'migrations') self.assertEqual(author_state.name, 'Author') self.assertEqual([x for x, y in author_state.fields], ['id', 'name', 'bio', 'age']) self.assertEqual(author_state.fields[1][1].max_length, 255) self.assertIs(author_state.fields[2][1].null, False) self.assertIs(author_state.fields[3][1].null, True) self.assertEqual(author_state.options, {'swappable': 'TEST_SWAPPABLE_MODEL', 'indexes': [], "constraints": []}) self.assertEqual(author_state.bases, (models.Model,)) self.assertEqual(author_state.managers, []) @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_create_swappable_from_abstract(self): """ A swappable model inheriting from a hierarchy: concrete -> abstract -> concrete. """ new_apps = Apps(['migrations']) class SearchableLocation(models.Model): keywords = models.CharField(max_length=256) class Meta: app_label = 'migrations' apps = new_apps class Station(SearchableLocation): name = models.CharField(max_length=128) class Meta: abstract = True class BusStation(Station): bus_routes = models.CharField(max_length=128) inbound = models.BooleanField(default=False) class Meta(Station.Meta): app_label = 'migrations' apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' station_state = ModelState.from_model(BusStation) self.assertEqual(station_state.app_label, 'migrations') self.assertEqual(station_state.name, 'BusStation') self.assertEqual( [x for x, y in station_state.fields], ['searchablelocation_ptr', 'name', 'bus_routes', 'inbound'] ) self.assertEqual(station_state.fields[1][1].max_length, 128) self.assertEqual(station_state.fields[2][1].null, False) self.assertEqual( station_state.options, {'abstract': False, 'swappable': 'TEST_SWAPPABLE_MODEL', 'indexes': [], 'constraints': []} ) self.assertEqual(station_state.bases, ('migrations.searchablelocation',)) self.assertEqual(station_state.managers, []) @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_custom_manager_swappable(self): """ Tests making a ProjectState from unused models with custom managers """ new_apps = Apps(['migrations']) class Food(models.Model): food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() food_no_mgr = NoMigrationFoodManager('x', 'y') class Meta: app_label = "migrations" apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' food_state = ModelState.from_model(Food) # The default manager is used in migrations self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr']) self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2)) @isolate_apps('migrations', 'django.contrib.contenttypes') def test_order_with_respect_to_private_field(self): class PrivateFieldModel(models.Model): content_type = models.ForeignKey('contenttypes.ContentType', models.CASCADE) object_id = models.PositiveIntegerField() private = GenericForeignKey() class Meta: order_with_respect_to = 'private' state = ModelState.from_model(PrivateFieldModel) self.assertNotIn('order_with_respect_to', state.options) @isolate_apps('migrations') def test_abstract_model_children_inherit_indexes(self): class Abstract(models.Model): name = models.CharField(max_length=50) class Meta: app_label = 'migrations' abstract = True indexes = [models.indexes.Index(fields=['name'])] class Child1(Abstract): pass class Child2(Abstract): pass child1_state = ModelState.from_model(Child1) child2_state = ModelState.from_model(Child2) index_names = [index.name for index in child1_state.options['indexes']] self.assertEqual(index_names, ['migrations__name_b0afd7_idx']) index_names = [index.name for index in child2_state.options['indexes']] self.assertEqual(index_names, ['migrations__name_016466_idx']) # Modifying the state doesn't modify the index on the model. child1_state.options['indexes'][0].name = 'bar' self.assertEqual(Child1._meta.indexes[0].name, 'migrations__name_b0afd7_idx') @isolate_apps('migrations') def test_explicit_index_name(self): class TestModel(models.Model): name = models.CharField(max_length=50) class Meta: app_label = 'migrations' indexes = [models.indexes.Index(fields=['name'], name='foo_idx')] model_state = ModelState.from_model(TestModel) index_names = [index.name for index in model_state.options['indexes']] self.assertEqual(index_names, ['foo_idx']) @isolate_apps('migrations') def test_from_model_constraints(self): class ModelWithConstraints(models.Model): size = models.IntegerField() class Meta: constraints = [models.CheckConstraint(check=models.Q(size__gt=1), name='size_gt_1')] state = ModelState.from_model(ModelWithConstraints) model_constraints = ModelWithConstraints._meta.constraints state_constraints = state.options['constraints'] self.assertEqual(model_constraints, state_constraints) self.assertIsNot(model_constraints, state_constraints) self.assertIsNot(model_constraints[0], state_constraints[0]) class RelatedModelsTests(SimpleTestCase): def setUp(self): self.apps = Apps(['migrations.related_models_app']) def create_model(self, name, foreign_keys=[], bases=(), abstract=False, proxy=False): test_name = 'related_models_app' assert not (abstract and proxy) meta_contents = { 'abstract': abstract, 'app_label': test_name, 'apps': self.apps, 'proxy': proxy, } meta = type("Meta", (), meta_contents) if not bases: bases = (models.Model,) body = { 'Meta': meta, '__module__': "__fake__", } fname_base = fname = '%s_%%d' % name.lower() for i, fk in enumerate(foreign_keys, 1): fname = fname_base % i body[fname] = fk return type(name, bases, body) def assertRelated(self, model, needle): self.assertEqual( get_related_models_recursive(model), {(n._meta.app_label, n._meta.model_name) for n in needle}, ) def test_unrelated(self): A = self.create_model("A") B = self.create_model("B") self.assertRelated(A, []) self.assertRelated(B, []) def test_direct_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B") self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_direct_hidden_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE, related_name='+')]) B = self.create_model("B") self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_fk_through_proxy(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) C = self.create_model("C", bases=(B,), proxy=True) D = self.create_model("D", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) self.assertRelated(A, [B, C, D]) self.assertRelated(B, [A, C, D]) self.assertRelated(C, [A, B, D]) self.assertRelated(D, [A, B, C]) def test_nested_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) C = self.create_model("C") self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_two_sided(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('A', models.CASCADE)]) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_circle(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) C = self.create_model("C", foreign_keys=[models.ForeignKey('A', models.CASCADE)]) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,)) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_nested_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,)) C = self.create_model("C", bases=(B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_multiple_bases(self): A = self.create_model("A") B = self.create_model("B") C = self.create_model("C", bases=(A, B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_multiple_nested_bases(self): A = self.create_model("A") B = self.create_model("B") C = self.create_model("C", bases=(A, B,)) D = self.create_model("D") E = self.create_model("E", bases=(D,)) F = self.create_model("F", bases=(C, E,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, C, D, E, F]) self.assertRelated(B, [A, C, D, E, F]) self.assertRelated(C, [A, B, D, E, F]) self.assertRelated(D, [A, B, C, E, F]) self.assertRelated(E, [A, B, C, D, F]) self.assertRelated(F, [A, B, C, D, E]) self.assertRelated(Y, [Z]) self.assertRelated(Z, [Y]) def test_base_to_base_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('Y', models.CASCADE)]) B = self.create_model("B", bases=(A,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, Y, Z]) self.assertRelated(B, [A, Y, Z]) self.assertRelated(Y, [A, B, Z]) self.assertRelated(Z, [A, B, Y]) def test_base_to_subclass_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('Z', models.CASCADE)]) B = self.create_model("B", bases=(A,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, Y, Z]) self.assertRelated(B, [A, Y, Z]) self.assertRelated(Y, [A, B, Z]) self.assertRelated(Z, [A, B, Y]) def test_direct_m2m(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B')]) B = self.create_model("B") self.assertRelated(A, [A.a_1.rel.through, B]) self.assertRelated(B, [A, A.a_1.rel.through]) def test_direct_m2m_self(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('A')]) self.assertRelated(A, [A.a_1.rel.through]) def test_intermediate_m2m_self(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('A', through='T')]) T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('A', models.CASCADE), ]) self.assertRelated(A, [T]) self.assertRelated(T, [A]) def test_intermediate_m2m(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), ]) self.assertRelated(A, [B, T]) self.assertRelated(B, [A, T]) self.assertRelated(T, [A, B]) def test_intermediate_m2m_extern_fk(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") Z = self.create_model("Z") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), models.ForeignKey('Z', models.CASCADE), ]) self.assertRelated(A, [B, T, Z]) self.assertRelated(B, [A, T, Z]) self.assertRelated(T, [A, B, Z]) self.assertRelated(Z, [A, B, T]) def test_intermediate_m2m_base(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") S = self.create_model("S") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), ], bases=(S,)) self.assertRelated(A, [B, S, T]) self.assertRelated(B, [A, S, T]) self.assertRelated(S, [A, B, T]) self.assertRelated(T, [A, B, S]) def test_generic_fk(self): A = self.create_model("A", foreign_keys=[ models.ForeignKey('B', models.CASCADE), GenericForeignKey(), ]) B = self.create_model("B", foreign_keys=[ models.ForeignKey('C', models.CASCADE), ]) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_abstract_base(self): A = self.create_model("A", abstract=True) B = self.create_model("B", bases=(A,)) self.assertRelated(A, [B]) self.assertRelated(B, []) def test_nested_abstract_base(self): A = self.create_model("A", abstract=True) B = self.create_model("B", bases=(A,), abstract=True) C = self.create_model("C", bases=(B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [C]) self.assertRelated(C, []) def test_proxy_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) self.assertRelated(A, [B]) self.assertRelated(B, []) def test_nested_proxy_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) C = self.create_model("C", bases=(B,), proxy=True) self.assertRelated(A, [B, C]) self.assertRelated(B, [C]) self.assertRelated(C, []) def test_multiple_mixed_bases(self): A = self.create_model("A", abstract=True) M = self.create_model("M") P = self.create_model("P") Q = self.create_model("Q", bases=(P,), proxy=True) Z = self.create_model("Z", bases=(A, M, Q)) # M has a pointer O2O field p_ptr to P self.assertRelated(A, [M, P, Q, Z]) self.assertRelated(M, [P, Q, Z]) self.assertRelated(P, [M, Q, Z]) self.assertRelated(Q, [M, P, Z]) self.assertRelated(Z, [M, P, Q])
07e2f9913b42c27236aeff0ddc72292fe9d0c2f97d4c9fbedff7dc865e8268e5
from template_tests.test_response import test_processor_name from django.template import Context, EngineHandler, RequestContext from django.template.backends.django import DjangoTemplates from django.template.library import InvalidTemplateLibrary from django.test import RequestFactory, override_settings from .test_dummy import TemplateStringsTests class DjangoTemplatesTests(TemplateStringsTests): engine_class = DjangoTemplates backend_name = 'django' request_factory = RequestFactory() def test_context_has_priority_over_template_context_processors(self): # See ticket #23789. engine = DjangoTemplates({ 'DIRS': [], 'APP_DIRS': False, 'NAME': 'django', 'OPTIONS': { 'context_processors': [test_processor_name], }, }) template = engine.from_string('{{ processors }}') request = self.request_factory.get('/') # Context processors run content = template.render({}, request) self.assertEqual(content, 'yes') # Context overrides context processors content = template.render({'processors': 'no'}, request) self.assertEqual(content, 'no') def test_render_requires_dict(self): """django.Template.render() requires a dict.""" engine = DjangoTemplates({ 'DIRS': [], 'APP_DIRS': False, 'NAME': 'django', 'OPTIONS': {}, }) template = engine.from_string('') context = Context() request_context = RequestContext(self.request_factory.get('/'), {}) msg = 'context must be a dict rather than Context.' with self.assertRaisesMessage(TypeError, msg): template.render(context) msg = 'context must be a dict rather than RequestContext.' with self.assertRaisesMessage(TypeError, msg): template.render(request_context) @override_settings(INSTALLED_APPS=['template_backends.apps.good']) def test_templatetag_discovery(self): engine = DjangoTemplates({ 'DIRS': [], 'APP_DIRS': False, 'NAME': 'django', 'OPTIONS': { 'libraries': { 'alternate': 'template_backends.apps.good.templatetags.good_tags', 'override': 'template_backends.apps.good.templatetags.good_tags', }, }, }) # libraries are discovered from installed applications self.assertEqual( engine.engine.libraries['good_tags'], 'template_backends.apps.good.templatetags.good_tags', ) self.assertEqual( engine.engine.libraries['subpackage.tags'], 'template_backends.apps.good.templatetags.subpackage.tags', ) # libraries are discovered from django.templatetags self.assertEqual( engine.engine.libraries['static'], 'django.templatetags.static', ) # libraries passed in OPTIONS are registered self.assertEqual( engine.engine.libraries['alternate'], 'template_backends.apps.good.templatetags.good_tags', ) # libraries passed in OPTIONS take precedence over discovered ones self.assertEqual( engine.engine.libraries['override'], 'template_backends.apps.good.templatetags.good_tags', ) @override_settings(INSTALLED_APPS=['template_backends.apps.importerror']) def test_templatetag_discovery_import_error(self): """ Import errors in tag modules should be reraised with a helpful message. """ with self.assertRaisesMessage( InvalidTemplateLibrary, "ImportError raised when trying to load " "'template_backends.apps.importerror.templatetags.broken_tags'" ): DjangoTemplates({ 'DIRS': [], 'APP_DIRS': False, 'NAME': 'django', 'OPTIONS': {}, }) def test_builtins_discovery(self): engine = DjangoTemplates({ 'DIRS': [], 'APP_DIRS': False, 'NAME': 'django', 'OPTIONS': { 'builtins': ['template_backends.apps.good.templatetags.good_tags'], }, }) self.assertEqual( engine.engine.builtins, [ 'django.template.defaulttags', 'django.template.defaultfilters', 'django.template.loader_tags', 'template_backends.apps.good.templatetags.good_tags', ] ) def test_autoescape_off(self): templates = [{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': {'autoescape': False}, }] engines = EngineHandler(templates=templates) self.assertEqual( engines['django'].from_string('Hello, {{ name }}').render({'name': 'Bob & Jim'}), 'Hello, Bob & Jim' ) def test_autoescape_default(self): templates = [{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', }] engines = EngineHandler(templates=templates) self.assertEqual( engines['django'].from_string('Hello, {{ name }}').render({'name': 'Bob & Jim'}), 'Hello, Bob &amp; Jim' ) default_loaders = [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ] @override_settings(DEBUG=False) def test_non_debug_default_template_loaders(self): engine = DjangoTemplates({'DIRS': [], 'APP_DIRS': True, 'NAME': 'django', 'OPTIONS': {}}) self.assertEqual(engine.engine.loaders, [('django.template.loaders.cached.Loader', self.default_loaders)]) @override_settings(DEBUG=True) def test_debug_default_template_loaders(self): engine = DjangoTemplates({'DIRS': [], 'APP_DIRS': True, 'NAME': 'django', 'OPTIONS': {}}) self.assertEqual(engine.engine.loaders, self.default_loaders)
9d32cc03ce554af606efbbc7dfa64216642e4608ec4520ca6e1d9e6d55e1709a
import os import sys import tempfile import unittest import warnings from io import StringIO from unittest import mock from django.apps import apps from django.contrib.sites.models import Site from django.core import management from django.core.files.temp import NamedTemporaryFile from django.core.management import CommandError from django.core.management.commands.dumpdata import ProxyModelWarning from django.core.serializers.base import ProgressBar from django.db import IntegrityError, connection from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature from .models import ( Article, Category, NaturalKeyThing, PrimaryKeyUUIDModel, ProxySpy, Spy, Tag, Visa, ) class TestCaseFixtureLoadingTests(TestCase): fixtures = ['fixture1.json', 'fixture2.json'] def test_class_fixtures(self): "Test case has installed 3 fixture objects" self.assertEqual(Article.objects.count(), 3) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Django conquers world!>', '<Article: Copyright is fine the way it is>', '<Article: Poker has no place on ESPN>', ]) class SubclassTestCaseFixtureLoadingTests(TestCaseFixtureLoadingTests): """ Make sure that subclasses can remove fixtures from parent class (#21089). """ fixtures = [] def test_class_fixtures(self): "There were no fixture objects installed" self.assertEqual(Article.objects.count(), 0) class DumpDataAssertMixin: def _dumpdata_assert(self, args, output, format='json', filename=None, natural_foreign_keys=False, natural_primary_keys=False, use_base_manager=False, exclude_list=[], primary_keys=''): new_io = StringIO() filename = filename and os.path.join(tempfile.gettempdir(), filename) management.call_command( 'dumpdata', *args, format=format, stdout=new_io, stderr=new_io, output=filename, use_natural_foreign_keys=natural_foreign_keys, use_natural_primary_keys=natural_primary_keys, use_base_manager=use_base_manager, exclude=exclude_list, primary_keys=primary_keys, ) if filename: with open(filename) as f: command_output = f.read() os.remove(filename) else: command_output = new_io.getvalue().strip() if format == "json": self.assertJSONEqual(command_output, output) elif format == "xml": self.assertXMLEqual(command_output, output) else: self.assertEqual(command_output, output) class FixtureLoadingTests(DumpDataAssertMixin, TestCase): def test_loading_and_dumping(self): apps.clear_cache() Site.objects.all().delete() # Load fixture 1. Single JSON file, with two objects. management.call_command('loaddata', 'fixture1.json', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Time to reform copyright>', '<Article: Poker has no place on ESPN>', ]) # Dump the current contents of the database as a JSON fixture self._dumpdata_assert( ['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place ' 'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]' ) # Try just dumping the contents of fixtures.Category self._dumpdata_assert( ['fixtures.Category'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", ' '"title": "News Stories"}}]' ) # ...and just fixtures.Article self._dumpdata_assert( ['fixtures.Article'], '[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", ' '"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": ' '"Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]' ) # ...and both self._dumpdata_assert( ['fixtures.Category', 'fixtures.Article'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", ' '"title": "News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has ' 'no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", ' '"fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]' ) # Specify a specific model twice self._dumpdata_assert( ['fixtures.Article', 'fixtures.Article'], ( '[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", ' '"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": ' '"Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]' ) ) # Specify a dump that specifies Article both explicitly and implicitly self._dumpdata_assert( ['fixtures.Article', 'fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place ' 'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]' ) # Specify a dump that specifies Article both explicitly and implicitly, # but lists the app first (#22025). self._dumpdata_assert( ['fixtures', 'fixtures.Article'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place ' 'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]' ) # Same again, but specify in the reverse order self._dumpdata_assert( ['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no ' 'place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields":' ' {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]' ) # Specify one model from one application, and an entire other application. self._dumpdata_assert( ['fixtures.Category', 'sites'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}, {"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": ' '"example.com"}}]' ) # Load fixture 2. JSON file imported by default. Overwrites some existing objects management.call_command('loaddata', 'fixture2.json', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Django conquers world!>', '<Article: Copyright is fine the way it is>', '<Article: Poker has no place on ESPN>', ]) # Load fixture 3, XML format. management.call_command('loaddata', 'fixture3.xml', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: XML identified as leading cause of cancer>', '<Article: Django conquers world!>', '<Article: Copyright is fine the way it is>', '<Article: Poker on TV is great!>', ]) # Load fixture 6, JSON file with dynamic ContentType fields. Testing ManyToOne. management.call_command('loaddata', 'fixture6.json', verbosity=0) self.assertQuerysetEqual(Tag.objects.all(), [ '<Tag: <Article: Copyright is fine the way it is> tagged "copyright">', '<Tag: <Article: Copyright is fine the way it is> tagged "law">', ], ordered=False) # Load fixture 7, XML file with dynamic ContentType fields. Testing ManyToOne. management.call_command('loaddata', 'fixture7.xml', verbosity=0) self.assertQuerysetEqual(Tag.objects.all(), [ '<Tag: <Article: Copyright is fine the way it is> tagged "copyright">', '<Tag: <Article: Copyright is fine the way it is> tagged "legal">', '<Tag: <Article: Django conquers world!> tagged "django">', '<Tag: <Article: Django conquers world!> tagged "world domination">', ], ordered=False) # Load fixture 8, JSON file with dynamic Permission fields. Testing ManyToMany. management.call_command('loaddata', 'fixture8.json', verbosity=0) self.assertQuerysetEqual(Visa.objects.all(), [ '<Visa: Django Reinhardt Can add user, Can change user, Can delete user>', '<Visa: Stephane Grappelli Can add user>', '<Visa: Prince >' ], ordered=False) # Load fixture 9, XML file with dynamic Permission fields. Testing ManyToMany. management.call_command('loaddata', 'fixture9.xml', verbosity=0) self.assertQuerysetEqual(Visa.objects.all(), [ '<Visa: Django Reinhardt Can add user, Can change user, Can delete user>', '<Visa: Stephane Grappelli Can add user, Can delete user>', '<Visa: Artist formerly known as "Prince" Can change user>' ], ordered=False) # object list is unaffected self.assertQuerysetEqual(Article.objects.all(), [ '<Article: XML identified as leading cause of cancer>', '<Article: Django conquers world!>', '<Article: Copyright is fine the way it is>', '<Article: Poker on TV is great!>', ]) # By default, you get raw keys on dumpdata self._dumpdata_assert( ['fixtures.book'], '[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [3, 1]}}]' ) # But you can get natural keys if you ask for them and they are available self._dumpdata_assert( ['fixtures.book'], '[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist ' 'formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]', natural_foreign_keys=True ) # You can also omit the primary keys for models that we can get later with natural keys. self._dumpdata_assert( ['fixtures.person'], '[{"fields": {"name": "Django Reinhardt"}, "model": "fixtures.person"}, {"fields": {"name": "Stephane ' 'Grappelli"}, "model": "fixtures.person"}, {"fields": {"name": "Artist formerly known as ' '\\"Prince\\""}, "model": "fixtures.person"}]', natural_primary_keys=True ) # Dump the current contents of the database as a JSON fixture self._dumpdata_assert( ['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker on TV is ' 'great!", "pub_date": "2006-06-16T11:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}, {"pk": 4, ' '"model": "fixtures.article", "fields": {"headline": "Django conquers world!", "pub_date": ' '"2006-06-16T15:00:00"}}, {"pk": 5, "model": "fixtures.article", "fields": {"headline": "XML ' 'identified as leading cause of cancer", "pub_date": "2006-06-16T16:00:00"}}, {"pk": 1, "model": ' '"fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": ' '3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": ' '"legal", "tagged_id": 3}}, {"pk": 3, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", ' '"article"], "name": "django", "tagged_id": 4}}, {"pk": 4, "model": "fixtures.tag", "fields": ' '{"tagged_type": ["fixtures", "article"], "name": "world domination", "tagged_id": 4}}, {"pk": 1, ' '"model": "fixtures.person", "fields": {"name": "Django Reinhardt"}}, {"pk": 2, "model": ' '"fixtures.person", "fields": {"name": "Stephane Grappelli"}}, {"pk": 3, "model": "fixtures.person", ' '"fields": {"name": "Artist formerly known as \\"Prince\\""}}, {"pk": 1, "model": "fixtures.visa", ' '"fields": {"person": ["Django Reinhardt"], "permissions": [["add_user", "auth", "user"], ' '["change_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, {"pk": 2, "model": ' '"fixtures.visa", "fields": {"person": ["Stephane Grappelli"], "permissions": [["add_user", "auth", ' '"user"], ["delete_user", "auth", "user"]]}}, {"pk": 3, "model": "fixtures.visa", "fields": {"person":' ' ["Artist formerly known as \\"Prince\\""], "permissions": [["change_user", "auth", "user"]]}}, ' '{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist ' 'formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]', natural_foreign_keys=True ) # Dump the current contents of the database as an XML fixture self._dumpdata_assert( ['fixtures'], '<?xml version="1.0" encoding="utf-8"?><django-objects version="1.0"><object pk="1" ' 'model="fixtures.category"><field type="CharField" name="title">News Stories</field><field ' 'type="TextField" name="description">Latest news stories</field></object><object pk="2" ' 'model="fixtures.article"><field type="CharField" name="headline">Poker on TV is great!</field><field ' 'type="DateTimeField" name="pub_date">2006-06-16T11:00:00</field></object><object pk="3" ' 'model="fixtures.article"><field type="CharField" name="headline">Copyright is fine the way it ' 'is</field><field type="DateTimeField" name="pub_date">2006-06-16T14:00:00</field></object><object ' 'pk="4" model="fixtures.article"><field type="CharField" name="headline">Django conquers world!' '</field><field type="DateTimeField" name="pub_date">2006-06-16T15:00:00</field></object><object ' 'pk="5" model="fixtures.article"><field type="CharField" name="headline">XML identified as leading ' 'cause of cancer</field><field type="DateTimeField" name="pub_date">2006-06-16T16:00:00</field>' '</object><object pk="1" model="fixtures.tag"><field type="CharField" name="name">copyright</field>' '<field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures' '</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3' '</field></object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">legal' '</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>' 'fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" ' 'name="tagged_id">3</field></object><object pk="3" model="fixtures.tag"><field type="CharField" ' 'name="name">django</field><field to="contenttypes.contenttype" name="tagged_type" ' 'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field ' 'type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="4" model="fixtures.tag">' '<field type="CharField" name="name">world domination</field><field to="contenttypes.contenttype" ' 'name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field>' '<field type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="1" ' 'model="fixtures.person"><field type="CharField" name="name">Django Reinhardt</field></object>' '<object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane Grappelli' '</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">' 'Artist formerly known as "Prince"</field></object><object pk="1" model="fixtures.visa"><field ' 'to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Django Reinhardt</natural></field>' '<field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>add_user' '</natural><natural>auth</natural><natural>user</natural></object><object><natural>change_user' '</natural><natural>auth</natural><natural>user</natural></object><object><natural>delete_user' '</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="2" ' 'model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Stephane' ' Grappelli</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel">' '<object><natural>add_user</natural><natural>auth</natural><natural>user</natural></object><object>' '<natural>delete_user</natural><natural>auth</natural><natural>user</natural></object></field>' '</object><object pk="3" model="fixtures.visa"><field to="fixtures.person" name="person" ' 'rel="ManyToOneRel"><natural>Artist formerly known as "Prince"</natural></field><field ' 'to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>change_user</natural>' '<natural>auth</natural><natural>user</natural></object></field></object><object pk="1" ' 'model="fixtures.book"><field type="CharField" name="name">Music for all ages</field><field ' 'to="fixtures.person" name="authors" rel="ManyToManyRel"><object><natural>Artist formerly known as ' '"Prince"</natural></object><object><natural>Django Reinhardt</natural></object></field></object>' '</django-objects>', format='xml', natural_foreign_keys=True ) def test_dumpdata_with_excludes(self): # Load fixture1 which has a site, two articles, and a category Site.objects.all().delete() management.call_command('loaddata', 'fixture1.json', verbosity=0) # Excluding fixtures app should only leave sites self._dumpdata_assert( ['sites', 'fixtures'], '[{"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}]', exclude_list=['fixtures'], ) # Excluding fixtures.Article/Book should leave fixtures.Category self._dumpdata_assert( ['sites', 'fixtures'], '[{"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}, ' '{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}]', exclude_list=['fixtures.Article', 'fixtures.Book'] ) # Excluding fixtures and fixtures.Article/Book should be a no-op self._dumpdata_assert( ['sites', 'fixtures'], '[{"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}, ' '{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}]', exclude_list=['fixtures.Article', 'fixtures.Book'] ) # Excluding sites and fixtures.Article/Book should only leave fixtures.Category self._dumpdata_assert( ['sites', 'fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}]', exclude_list=['fixtures.Article', 'fixtures.Book', 'sites'] ) # Excluding a bogus app should throw an error with self.assertRaisesMessage(management.CommandError, "No installed app with label 'foo_app'."): self._dumpdata_assert(['fixtures', 'sites'], '', exclude_list=['foo_app']) # Excluding a bogus model should throw an error with self.assertRaisesMessage(management.CommandError, "Unknown model: fixtures.FooModel"): self._dumpdata_assert(['fixtures', 'sites'], '', exclude_list=['fixtures.FooModel']) @unittest.skipIf(sys.platform.startswith('win'), "Windows doesn't support '?' in filenames.") def test_load_fixture_with_special_characters(self): management.call_command('loaddata', 'fixture_with[special]chars', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), ['<Article: How To Deal With Special Characters>']) def test_dumpdata_with_filtering_manager(self): spy1 = Spy.objects.create(name='Paul') spy2 = Spy.objects.create(name='Alex', cover_blown=True) self.assertQuerysetEqual(Spy.objects.all(), ['<Spy: Paul>']) # Use the default manager self._dumpdata_assert( ['fixtures.Spy'], '[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]' % spy1.pk ) # Dump using Django's base manager. Should return all objects, # even those normally filtered by the manager self._dumpdata_assert( ['fixtures.Spy'], '[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": true}}, {"pk": %d, "model": ' '"fixtures.spy", "fields": {"cover_blown": false}}]' % (spy2.pk, spy1.pk), use_base_manager=True ) def test_dumpdata_with_pks(self): management.call_command('loaddata', 'fixture1.json', verbosity=0) management.call_command('loaddata', 'fixture2.json', verbosity=0) self._dumpdata_assert( ['fixtures.Article'], '[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", ' '"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": ' '"Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}]', primary_keys='2,3' ) self._dumpdata_assert( ['fixtures.Article'], '[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", ' '"pub_date": "2006-06-16T12:00:00"}}]', primary_keys='2' ) with self.assertRaisesMessage(management.CommandError, "You can only use --pks option with one model"): self._dumpdata_assert( ['fixtures'], '[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", ' '"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}]', primary_keys='2,3' ) with self.assertRaisesMessage(management.CommandError, "You can only use --pks option with one model"): self._dumpdata_assert( '', '[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", ' '"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}]', primary_keys='2,3' ) with self.assertRaisesMessage(management.CommandError, "You can only use --pks option with one model"): self._dumpdata_assert( ['fixtures.Article', 'fixtures.category'], '[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", ' '"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}]', primary_keys='2,3' ) def test_dumpdata_with_uuid_pks(self): m1 = PrimaryKeyUUIDModel.objects.create() m2 = PrimaryKeyUUIDModel.objects.create() output = StringIO() management.call_command( 'dumpdata', 'fixtures.PrimaryKeyUUIDModel', '--pks', ', '.join([str(m1.id), str(m2.id)]), stdout=output, ) result = output.getvalue() self.assertIn('"pk": "%s"' % m1.id, result) self.assertIn('"pk": "%s"' % m2.id, result) def test_dumpdata_with_file_output(self): management.call_command('loaddata', 'fixture1.json', verbosity=0) self._dumpdata_assert( ['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place ' 'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]', filename='dumpdata.json' ) def test_dumpdata_progressbar(self): """ Dumpdata shows a progress bar on the command line when --output is set, stdout is a tty, and verbosity > 0. """ management.call_command('loaddata', 'fixture1.json', verbosity=0) new_io = StringIO() new_io.isatty = lambda: True with NamedTemporaryFile() as file: options = { 'format': 'json', 'stdout': new_io, 'stderr': new_io, 'output': file.name, } management.call_command('dumpdata', 'fixtures', **options) self.assertTrue(new_io.getvalue().endswith('[' + '.' * ProgressBar.progress_width + ']\n')) # Test no progress bar when verbosity = 0 options['verbosity'] = 0 new_io = StringIO() new_io.isatty = lambda: True options.update({'stdout': new_io, 'stderr': new_io}) management.call_command('dumpdata', 'fixtures', **options) self.assertEqual(new_io.getvalue(), '') def test_dumpdata_proxy_without_concrete(self): """ A warning is displayed if a proxy model is dumped without its concrete parent. """ ProxySpy.objects.create(name='Paul') msg = "fixtures.ProxySpy is a proxy model and won't be serialized." with self.assertWarnsMessage(ProxyModelWarning, msg): self._dumpdata_assert(['fixtures.ProxySpy'], '[]') def test_dumpdata_proxy_with_concrete(self): """ A warning isn't displayed if a proxy model is dumped with its concrete parent. """ spy = ProxySpy.objects.create(name='Paul') with warnings.catch_warnings(record=True) as warning_list: warnings.simplefilter('always') self._dumpdata_assert( ['fixtures.ProxySpy', 'fixtures.Spy'], '[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]' % spy.pk ) self.assertEqual(len(warning_list), 0) def test_compress_format_loading(self): # Load fixture 4 (compressed), using format specification management.call_command('loaddata', 'fixture4.json', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Django pets kitten>', ]) def test_compressed_specified_loading(self): # Load fixture 5 (compressed), using format *and* compression specification management.call_command('loaddata', 'fixture5.json.zip', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: WoW subscribers now outnumber readers>', ]) def test_compressed_loading(self): # Load fixture 5 (compressed), only compression specification management.call_command('loaddata', 'fixture5.zip', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: WoW subscribers now outnumber readers>', ]) def test_ambiguous_compressed_fixture(self): # The name "fixture5" is ambiguous, so loading it will raise an error with self.assertRaises(management.CommandError) as cm: management.call_command('loaddata', 'fixture5', verbosity=0) self.assertIn("Multiple fixtures named 'fixture5'", cm.exception.args[0]) def test_db_loading(self): # Load db fixtures 1 and 2. These will load using the 'default' database identifier implicitly management.call_command('loaddata', 'db_fixture_1', verbosity=0) management.call_command('loaddata', 'db_fixture_2', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Who needs more than one database?>', '<Article: Who needs to use compressed data?>', ]) def test_loaddata_error_message(self): """ Loading a fixture which contains an invalid object outputs an error message which contains the pk of the object that triggered the error. """ # MySQL needs a little prodding to reject invalid data. # This won't affect other tests because the database connection # is closed at the end of each test. if connection.vendor == 'mysql': connection.cursor().execute("SET sql_mode = 'TRADITIONAL'") with self.assertRaises(IntegrityError) as cm: management.call_command('loaddata', 'invalid.json', verbosity=0) self.assertIn("Could not load fixtures.Article(pk=1):", cm.exception.args[0]) @unittest.skipUnless(connection.vendor == 'postgresql', 'psycopg2 prohibits null characters in data.') def test_loaddata_null_characters_on_postgresql(self): msg = ( 'Could not load fixtures.Article(pk=2): ' 'A string literal cannot contain NUL (0x00) characters.' ) with self.assertRaisesMessage(ValueError, msg): management.call_command('loaddata', 'null_character_in_field_value.json') def test_loaddata_app_option(self): with self.assertRaisesMessage(CommandError, "No fixture named 'db_fixture_1' found."): management.call_command('loaddata', 'db_fixture_1', verbosity=0, app_label="someotherapp") self.assertQuerysetEqual(Article.objects.all(), []) management.call_command('loaddata', 'db_fixture_1', verbosity=0, app_label="fixtures") self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Who needs more than one database?>', ]) def test_loaddata_verbosity_three(self): output = StringIO() management.call_command('loaddata', 'fixture1.json', verbosity=3, stdout=output, stderr=output) command_output = output.getvalue() self.assertIn( "\rProcessed 1 object(s).\rProcessed 2 object(s)." "\rProcessed 3 object(s).\rProcessed 4 object(s).\n", command_output ) def test_loading_using(self): # Load db fixtures 1 and 2. These will load using the 'default' database identifier explicitly management.call_command('loaddata', 'db_fixture_1', verbosity=0, database='default') management.call_command('loaddata', 'db_fixture_2', verbosity=0, database='default') self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Who needs more than one database?>', '<Article: Who needs to use compressed data?>', ]) def test_unmatched_identifier_loading(self): # Try to load db fixture 3. This won't load because the database identifier doesn't match with self.assertRaisesMessage(CommandError, "No fixture named 'db_fixture_3' found."): management.call_command('loaddata', 'db_fixture_3', verbosity=0) with self.assertRaisesMessage(CommandError, "No fixture named 'db_fixture_3' found."): management.call_command('loaddata', 'db_fixture_3', verbosity=0, database='default') self.assertQuerysetEqual(Article.objects.all(), []) def test_output_formats(self): # Load back in fixture 1, we need the articles from it management.call_command('loaddata', 'fixture1', verbosity=0) # Try to load fixture 6 using format discovery management.call_command('loaddata', 'fixture6', verbosity=0) self.assertQuerysetEqual(Tag.objects.all(), [ '<Tag: <Article: Time to reform copyright> tagged "copyright">', '<Tag: <Article: Time to reform copyright> tagged "law">' ], ordered=False) # Dump the current contents of the database as a JSON fixture self._dumpdata_assert( ['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place ' 'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}, {"pk": 1, "model": ' '"fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": ' '3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": ' '"law", "tagged_id": 3}}, {"pk": 1, "model": "fixtures.person", "fields": {"name": "Django ' 'Reinhardt"}}, {"pk": 2, "model": "fixtures.person", "fields": {"name": "Stephane Grappelli"}}, ' '{"pk": 3, "model": "fixtures.person", "fields": {"name": "Prince"}}]', natural_foreign_keys=True ) # Dump the current contents of the database as an XML fixture self._dumpdata_assert( ['fixtures'], '<?xml version="1.0" encoding="utf-8"?><django-objects version="1.0"><object pk="1" ' 'model="fixtures.category"><field type="CharField" name="title">News Stories</field><field ' 'type="TextField" name="description">Latest news stories</field></object><object pk="2" ' 'model="fixtures.article"><field type="CharField" name="headline">Poker has no place on ESPN</field>' '<field type="DateTimeField" name="pub_date">2006-06-16T12:00:00</field></object><object pk="3" ' 'model="fixtures.article"><field type="CharField" name="headline">Time to reform copyright</field>' '<field type="DateTimeField" name="pub_date">2006-06-16T13:00:00</field></object><object pk="1" ' 'model="fixtures.tag"><field type="CharField" name="name">copyright</field><field ' 'to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural>' '<natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field>' '</object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">law</field><field ' 'to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural>' '<natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field>' '</object><object pk="1" model="fixtures.person"><field type="CharField" name="name">Django Reinhardt' '</field></object><object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane ' 'Grappelli</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">' 'Prince</field></object></django-objects>', format='xml', natural_foreign_keys=True ) def test_loading_with_exclude_app(self): Site.objects.all().delete() management.call_command('loaddata', 'fixture1', exclude=['fixtures'], verbosity=0) self.assertFalse(Article.objects.exists()) self.assertFalse(Category.objects.exists()) self.assertQuerysetEqual(Site.objects.all(), ['<Site: example.com>']) def test_loading_with_exclude_model(self): Site.objects.all().delete() management.call_command('loaddata', 'fixture1', exclude=['fixtures.Article'], verbosity=0) self.assertFalse(Article.objects.exists()) self.assertQuerysetEqual(Category.objects.all(), ['<Category: News Stories>']) self.assertQuerysetEqual(Site.objects.all(), ['<Site: example.com>']) def test_exclude_option_errors(self): """Excluding a bogus app or model should raise an error.""" msg = "No installed app with label 'foo_app'." with self.assertRaisesMessage(management.CommandError, msg): management.call_command('loaddata', 'fixture1', exclude=['foo_app'], verbosity=0) msg = "Unknown model: fixtures.FooModel" with self.assertRaisesMessage(management.CommandError, msg): management.call_command('loaddata', 'fixture1', exclude=['fixtures.FooModel'], verbosity=0) def test_stdin_without_format(self): """Reading from stdin raises an error if format isn't specified.""" msg = '--format must be specified when reading from stdin.' with self.assertRaisesMessage(management.CommandError, msg): management.call_command('loaddata', '-', verbosity=0) def test_loading_stdin(self): """Loading fixtures from stdin with json and xml.""" tests_dir = os.path.dirname(__file__) fixture_json = os.path.join(tests_dir, 'fixtures', 'fixture1.json') fixture_xml = os.path.join(tests_dir, 'fixtures', 'fixture3.xml') with mock.patch('django.core.management.commands.loaddata.sys.stdin', open(fixture_json)): management.call_command('loaddata', '--format=json', '-', verbosity=0) self.assertEqual(Article.objects.count(), 2) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Time to reform copyright>', '<Article: Poker has no place on ESPN>', ]) with mock.patch('django.core.management.commands.loaddata.sys.stdin', open(fixture_xml)): management.call_command('loaddata', '--format=xml', '-', verbosity=0) self.assertEqual(Article.objects.count(), 3) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: XML identified as leading cause of cancer>', '<Article: Time to reform copyright>', '<Article: Poker on TV is great!>', ]) class NonexistentFixtureTests(TestCase): """ Custom class to limit fixture dirs. """ def test_loaddata_not_existent_fixture_file(self): stdout_output = StringIO() with self.assertRaisesMessage(CommandError, "No fixture named 'this_fixture_doesnt_exist' found."): management.call_command('loaddata', 'this_fixture_doesnt_exist', stdout=stdout_output) @mock.patch('django.db.connection.enable_constraint_checking') @mock.patch('django.db.connection.disable_constraint_checking') def test_nonexistent_fixture_no_constraint_checking( self, disable_constraint_checking, enable_constraint_checking): """ If no fixtures match the loaddata command, constraints checks on the database shouldn't be disabled. This is performance critical on MSSQL. """ with self.assertRaisesMessage(CommandError, "No fixture named 'this_fixture_doesnt_exist' found."): management.call_command('loaddata', 'this_fixture_doesnt_exist', verbosity=0) disable_constraint_checking.assert_not_called() enable_constraint_checking.assert_not_called() class FixtureTransactionTests(DumpDataAssertMixin, TransactionTestCase): available_apps = [ 'fixtures', 'django.contrib.sites', ] @skipUnlessDBFeature('supports_forward_references') def test_format_discovery(self): # Load fixture 1 again, using format discovery management.call_command('loaddata', 'fixture1', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Time to reform copyright>', '<Article: Poker has no place on ESPN>', ]) # Try to load fixture 2 using format discovery; this will fail # because there are two fixture2's in the fixtures directory with self.assertRaises(management.CommandError) as cm: management.call_command('loaddata', 'fixture2', verbosity=0) self.assertIn("Multiple fixtures named 'fixture2'", cm.exception.args[0]) # object list is unaffected self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Time to reform copyright>', '<Article: Poker has no place on ESPN>', ]) # Dump the current contents of the database as a JSON fixture self._dumpdata_assert( ['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": ' '"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place ' 'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": ' '{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]' ) # Load fixture 4 (compressed), using format discovery management.call_command('loaddata', 'fixture4', verbosity=0) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Django pets kitten>', '<Article: Time to reform copyright>', '<Article: Poker has no place on ESPN>', ]) class ForwardReferenceTests(TestCase): def test_forward_reference_fk(self): management.call_command('loaddata', 'forward_reference_fk.json', verbosity=0) self.assertEqual(NaturalKeyThing.objects.count(), 2) t1, t2 = NaturalKeyThing.objects.all() self.assertEqual(t1.other_thing, t2) self.assertEqual(t2.other_thing, t1) def test_forward_reference_m2m(self): management.call_command('loaddata', 'forward_reference_m2m.json', verbosity=0) self.assertEqual(NaturalKeyThing.objects.count(), 3) t1 = NaturalKeyThing.objects.get_by_natural_key('t1') self.assertQuerysetEqual( t1.other_things.order_by('key'), ['<NaturalKeyThing: t2>', '<NaturalKeyThing: t3>'] )
7c3c518781b1c3a7fe56d09d0d01c0eccc2f7a9727e6ea20fefd953cc28df340
""" Fixtures. Fixtures are a way of loading data into the database in bulk. Fixure data can be stored in any serializable format (including JSON and XML). Fixtures are identified by name, and are stored in either a directory named 'fixtures' in the application directory, or in one of the directories named in the ``FIXTURE_DIRS`` setting. """ import uuid from django.contrib.auth.models import Permission from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models class Category(models.Model): title = models.CharField(max_length=100) description = models.TextField() class Meta: ordering = ('title',) def __str__(self): return self.title class Article(models.Model): headline = models.CharField(max_length=100, default='Default headline') pub_date = models.DateTimeField() class Meta: ordering = ('-pub_date', 'headline') def __str__(self): return self.headline class Blog(models.Model): name = models.CharField(max_length=100) featured = models.ForeignKey(Article, models.CASCADE, related_name='fixtures_featured_set') articles = models.ManyToManyField(Article, blank=True, related_name='fixtures_articles_set') def __str__(self): return self.name class Tag(models.Model): name = models.CharField(max_length=100) tagged_type = models.ForeignKey(ContentType, models.CASCADE, related_name="fixtures_tag_set") tagged_id = models.PositiveIntegerField(default=0) tagged = GenericForeignKey(ct_field='tagged_type', fk_field='tagged_id') def __str__(self): return '<%s: %s> tagged "%s"' % (self.tagged.__class__.__name__, self.tagged, self.name) class PersonManager(models.Manager): def get_by_natural_key(self, name): return self.get(name=name) class Person(models.Model): objects = PersonManager() name = models.CharField(max_length=100) class Meta: ordering = ('name',) def __str__(self): return self.name def natural_key(self): return (self.name,) class SpyManager(PersonManager): def get_queryset(self): return super().get_queryset().filter(cover_blown=False) class Spy(Person): objects = SpyManager() cover_blown = models.BooleanField(default=False) class ProxySpy(Spy): class Meta: proxy = True class Visa(models.Model): person = models.ForeignKey(Person, models.CASCADE) permissions = models.ManyToManyField(Permission, blank=True) def __str__(self): return '%s %s' % (self.person.name, ', '.join(p.name for p in self.permissions.all())) class Book(models.Model): name = models.CharField(max_length=100) authors = models.ManyToManyField(Person) class Meta: ordering = ('name',) def __str__(self): authors = ' and '.join(a.name for a in self.authors.all()) return '%s by %s' % (self.name, authors) if authors else self.name class PrimaryKeyUUIDModel(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4) class NaturalKeyThing(models.Model): key = models.CharField(max_length=100) other_thing = models.ForeignKey('NaturalKeyThing', on_delete=models.CASCADE, null=True) other_things = models.ManyToManyField('NaturalKeyThing', related_name='thing_m2m_set') class Manager(models.Manager): def get_by_natural_key(self, key): return self.get(key=key) objects = Manager() def natural_key(self): return (self.key,) def __str__(self): return self.key
595998fbf1beaf82d47d7431fb8c7d3a17232ac0fea7df98839ec5c8b766ec37
from django import forms from django.core.exceptions import NON_FIELD_ERRORS from django.test import TestCase from django.utils.functional import lazy from . import ValidationAssertions from .models import ( Article, Author, GenericIPAddressTestModel, GenericIPAddrUnpackUniqueTest, ModelToValidate, ) class BaseModelValidationTests(ValidationAssertions, TestCase): def test_missing_required_field_raises_error(self): mtv = ModelToValidate(f_with_custom_validator=42) self.assertFailsValidation(mtv.full_clean, ['name', 'number']) def test_with_correct_value_model_validates(self): mtv = ModelToValidate(number=10, name='Some Name') self.assertIsNone(mtv.full_clean()) def test_custom_validate_method(self): mtv = ModelToValidate(number=11) self.assertFailsValidation(mtv.full_clean, [NON_FIELD_ERRORS, 'name']) def test_wrong_FK_value_raises_error(self): mtv = ModelToValidate(number=10, name='Some Name', parent_id=3) self.assertFieldFailsValidationWithMessage( mtv.full_clean, 'parent', ['model to validate instance with id %r does not exist.' % mtv.parent_id] ) mtv = ModelToValidate(number=10, name='Some Name', ufm_id='Some Name') self.assertFieldFailsValidationWithMessage( mtv.full_clean, 'ufm', ["unique fields model instance with unique_charfield %r does not exist." % mtv.name] ) def test_correct_FK_value_validates(self): parent = ModelToValidate.objects.create(number=10, name='Some Name') mtv = ModelToValidate(number=10, name='Some Name', parent_id=parent.pk) self.assertIsNone(mtv.full_clean()) def test_limited_FK_raises_error(self): # The limit_choices_to on the parent field says that a parent object's # number attribute must be 10, so this should fail validation. parent = ModelToValidate.objects.create(number=11, name='Other Name') mtv = ModelToValidate(number=10, name='Some Name', parent_id=parent.pk) self.assertFailsValidation(mtv.full_clean, ['parent']) def test_wrong_email_value_raises_error(self): mtv = ModelToValidate(number=10, name='Some Name', email='not-an-email') self.assertFailsValidation(mtv.full_clean, ['email']) def test_correct_email_value_passes(self): mtv = ModelToValidate(number=10, name='Some Name', email='[email protected]') self.assertIsNone(mtv.full_clean()) def test_wrong_url_value_raises_error(self): mtv = ModelToValidate(number=10, name='Some Name', url='not a url') self.assertFieldFailsValidationWithMessage(mtv.full_clean, 'url', ['Enter a valid URL.']) def test_text_greater_that_charfields_max_length_raises_errors(self): mtv = ModelToValidate(number=10, name='Some Name' * 100) self.assertFailsValidation(mtv.full_clean, ['name']) def test_malformed_slug_raises_error(self): mtv = ModelToValidate(number=10, name='Some Name', slug='##invalid##') self.assertFailsValidation(mtv.full_clean, ['slug']) def test_full_clean_does_not_mutate_exclude(self): mtv = ModelToValidate(f_with_custom_validator=42) exclude = ['number'] self.assertFailsValidation(mtv.full_clean, ['name'], exclude=exclude) self.assertEqual(len(exclude), 1) self.assertEqual(exclude[0], 'number') class ArticleForm(forms.ModelForm): class Meta: model = Article exclude = ['author'] class ModelFormsTests(TestCase): @classmethod def setUpTestData(cls): cls.author = Author.objects.create(name='Joseph Kocherhans') def test_partial_validation(self): # Make sure the "commit=False and set field values later" idiom still # works with model validation. data = { 'title': 'The state of model validation', 'pub_date': '2010-1-10 14:49:00' } form = ArticleForm(data) self.assertEqual(list(form.errors), []) article = form.save(commit=False) article.author = self.author article.save() def test_validation_with_empty_blank_field(self): # Since a value for pub_date wasn't provided and the field is # blank=True, model-validation should pass. # Also, Article.clean() should be run, so pub_date will be filled after # validation, so the form should save cleanly even though pub_date is # not allowed to be null. data = { 'title': 'The state of model validation', } article = Article(author_id=self.author.id) form = ArticleForm(data, instance=article) self.assertEqual(list(form.errors), []) self.assertIsNotNone(form.instance.pub_date) article = form.save() def test_validation_with_invalid_blank_field(self): # Even though pub_date is set to blank=True, an invalid value was # provided, so it should fail validation. data = { 'title': 'The state of model validation', 'pub_date': 'never' } article = Article(author_id=self.author.id) form = ArticleForm(data, instance=article) self.assertEqual(list(form.errors), ['pub_date']) class GenericIPAddressFieldTests(ValidationAssertions, TestCase): def test_correct_generic_ip_passes(self): giptm = GenericIPAddressTestModel(generic_ip="1.2.3.4") self.assertIsNone(giptm.full_clean()) giptm = GenericIPAddressTestModel(generic_ip=" 1.2.3.4 ") self.assertIsNone(giptm.full_clean()) giptm = GenericIPAddressTestModel(generic_ip="1.2.3.4\n") self.assertIsNone(giptm.full_clean()) giptm = GenericIPAddressTestModel(generic_ip="2001::2") self.assertIsNone(giptm.full_clean()) def test_invalid_generic_ip_raises_error(self): giptm = GenericIPAddressTestModel(generic_ip="294.4.2.1") self.assertFailsValidation(giptm.full_clean, ['generic_ip']) giptm = GenericIPAddressTestModel(generic_ip="1:2") self.assertFailsValidation(giptm.full_clean, ['generic_ip']) giptm = GenericIPAddressTestModel(generic_ip=1) self.assertFailsValidation(giptm.full_clean, ['generic_ip']) giptm = GenericIPAddressTestModel(generic_ip=lazy(lambda: 1, int)) self.assertFailsValidation(giptm.full_clean, ['generic_ip']) def test_correct_v4_ip_passes(self): giptm = GenericIPAddressTestModel(v4_ip="1.2.3.4") self.assertIsNone(giptm.full_clean()) def test_invalid_v4_ip_raises_error(self): giptm = GenericIPAddressTestModel(v4_ip="294.4.2.1") self.assertFailsValidation(giptm.full_clean, ['v4_ip']) giptm = GenericIPAddressTestModel(v4_ip="2001::2") self.assertFailsValidation(giptm.full_clean, ['v4_ip']) def test_correct_v6_ip_passes(self): giptm = GenericIPAddressTestModel(v6_ip="2001::2") self.assertIsNone(giptm.full_clean()) def test_invalid_v6_ip_raises_error(self): giptm = GenericIPAddressTestModel(v6_ip="1.2.3.4") self.assertFailsValidation(giptm.full_clean, ['v6_ip']) giptm = GenericIPAddressTestModel(v6_ip="1:2") self.assertFailsValidation(giptm.full_clean, ['v6_ip']) def test_v6_uniqueness_detection(self): # These two addresses are the same with different syntax giptm = GenericIPAddressTestModel(generic_ip="2001::1:0:0:0:0:2") giptm.save() giptm = GenericIPAddressTestModel(generic_ip="2001:0:1:2") self.assertFailsValidation(giptm.full_clean, ['generic_ip']) def test_v4_unpack_uniqueness_detection(self): # These two are different, because we are not doing IPv4 unpacking giptm = GenericIPAddressTestModel(generic_ip="::ffff:10.10.10.10") giptm.save() giptm = GenericIPAddressTestModel(generic_ip="10.10.10.10") self.assertIsNone(giptm.full_clean()) # These two are the same, because we are doing IPv4 unpacking giptm = GenericIPAddrUnpackUniqueTest(generic_v4unpack_ip="::ffff:18.52.18.52") giptm.save() giptm = GenericIPAddrUnpackUniqueTest(generic_v4unpack_ip="18.52.18.52") self.assertFailsValidation(giptm.full_clean, ['generic_v4unpack_ip']) def test_empty_generic_ip_passes(self): giptm = GenericIPAddressTestModel(generic_ip="") self.assertIsNone(giptm.full_clean()) giptm = GenericIPAddressTestModel(generic_ip=None) self.assertIsNone(giptm.full_clean())
ffb00a56a02ea5db118761c2beb73a331c3036795ad23fd8cb4058767b831f96
from django.core.exceptions import ValidationError class ValidationAssertions: def assertFailsValidation(self, clean, failed_fields, **kwargs): with self.assertRaises(ValidationError) as cm: clean(**kwargs) self.assertEqual(sorted(failed_fields), sorted(cm.exception.message_dict)) def assertFieldFailsValidationWithMessage(self, clean, field_name, message): with self.assertRaises(ValidationError) as cm: clean() self.assertIn(field_name, cm.exception.message_dict) self.assertEqual(message, cm.exception.message_dict[field_name])
95ce8d73ff78770d7301fd0ac7117446535117cc370a274dac955d2174fa3335
from django.test import SimpleTestCase from . import ValidationAssertions from .models import CustomMessagesModel class CustomMessagesTests(ValidationAssertions, SimpleTestCase): def test_custom_simple_validator_message(self): cmm = CustomMessagesModel(number=12) self.assertFieldFailsValidationWithMessage(cmm.full_clean, 'number', ['AAARGH']) def test_custom_null_message(self): cmm = CustomMessagesModel() self.assertFieldFailsValidationWithMessage(cmm.full_clean, 'number', ['NULL'])
7cb001fad6fa360eee97aa9f64c4200163efccf12d7068c4f26798e5864fac98
from django.test import SimpleTestCase from . import ValidationAssertions from .models import ModelToValidate class TestModelsWithValidators(ValidationAssertions, SimpleTestCase): def test_custom_validator_passes_for_correct_value(self): mtv = ModelToValidate(number=10, name='Some Name', f_with_custom_validator=42, f_with_iterable_of_validators=42) self.assertIsNone(mtv.full_clean()) def test_custom_validator_raises_error_for_incorrect_value(self): mtv = ModelToValidate(number=10, name='Some Name', f_with_custom_validator=12, f_with_iterable_of_validators=42) self.assertFailsValidation(mtv.full_clean, ['f_with_custom_validator']) self.assertFieldFailsValidationWithMessage( mtv.full_clean, 'f_with_custom_validator', ['This is not the answer to life, universe and everything!'] ) def test_field_validators_can_be_any_iterable(self): mtv = ModelToValidate(number=10, name='Some Name', f_with_custom_validator=42, f_with_iterable_of_validators=12) self.assertFailsValidation(mtv.full_clean, ['f_with_iterable_of_validators']) self.assertFieldFailsValidationWithMessage( mtv.full_clean, 'f_with_iterable_of_validators', ['This is not the answer to life, universe and everything!'] )
8a1cd6a0ec9cd3963c49bbd9022fcaf0d402dc9842af6de95131b4519942e189
import os import sys import unittest from types import ModuleType, SimpleNamespace from unittest import mock from django.conf import ENVIRONMENT_VARIABLE, LazySettings, Settings, settings from django.core.exceptions import ImproperlyConfigured from django.http import HttpRequest from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, modify_settings, override_settings, signals, ) from django.test.utils import requires_tz_support @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], ITEMS_OUTER=[1, 2, 3], TEST='override', TEST_OUTER='outer') class FullyDecoratedTranTestCase(TransactionTestCase): available_apps = [] def test_override(self): self.assertEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) self.assertEqual(settings.TEST, 'override') self.assertEqual(settings.TEST_OUTER, 'outer') @modify_settings(ITEMS={ 'append': ['e', 'f'], 'prepend': ['a'], 'remove': ['d', 'c'], }) def test_method_list_override(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'e', 'f']) self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) @modify_settings(ITEMS={ 'append': ['b'], 'prepend': ['d'], 'remove': ['a', 'c', 'e'], }) def test_method_list_override_no_ops(self): self.assertEqual(settings.ITEMS, ['b', 'd']) @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) def test_method_list_override_strings(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) @modify_settings(ITEMS={'remove': ['b', 'd']}) @modify_settings(ITEMS={'append': ['b'], 'prepend': ['d']}) def test_method_list_override_nested_order(self): self.assertEqual(settings.ITEMS, ['d', 'c', 'b']) @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') self.assertEqual(settings.TEST_OUTER, 'outer') def test_decorated_testcase_name(self): self.assertEqual(FullyDecoratedTranTestCase.__name__, 'FullyDecoratedTranTestCase') def test_decorated_testcase_module(self): self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__) @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], TEST='override') class FullyDecoratedTestCase(TestCase): def test_override(self): self.assertEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertEqual(settings.TEST, 'override') @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) self.assertEqual(settings.TEST, 'override2') class ClassDecoratedTestCaseSuper(TestCase): """ Dummy class for testing max recursion error in child class call to super(). Refs #17011. """ def test_max_recursion_error(self): pass @override_settings(TEST='override') class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper): @classmethod def setUpClass(cls): super().setUpClass() cls.foo = getattr(settings, 'TEST', 'BUG') def test_override(self): self.assertEqual(settings.TEST, 'override') def test_setupclass_override(self): """Settings are overridden within setUpClass (#21281).""" self.assertEqual(self.foo, 'override') @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') def test_max_recursion_error(self): """ Overriding a method on a super class and then calling that method on the super class should not trigger infinite recursion. See #17011. """ super().test_max_recursion_error() @modify_settings(ITEMS={'append': 'mother'}) @override_settings(ITEMS=['father'], TEST='override-parent') class ParentDecoratedTestCase(TestCase): pass @modify_settings(ITEMS={'append': ['child']}) @override_settings(TEST='override-child') class ChildDecoratedTestCase(ParentDecoratedTestCase): def test_override_settings_inheritance(self): self.assertEqual(settings.ITEMS, ['father', 'mother', 'child']) self.assertEqual(settings.TEST, 'override-child') class SettingsTests(SimpleTestCase): def setUp(self): self.testvalue = None signals.setting_changed.connect(self.signal_callback) def tearDown(self): signals.setting_changed.disconnect(self.signal_callback) def signal_callback(self, sender, setting, value, **kwargs): if setting == 'TEST': self.testvalue = value def test_override(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) self.assertEqual('test', settings.TEST) del settings.TEST def test_override_change(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test2' self.assertEqual('test', settings.TEST) del settings.TEST def test_override_doesnt_leak(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test' with self.assertRaises(AttributeError): getattr(settings, 'TEST') @override_settings(TEST='override') def test_decorator(self): self.assertEqual('override', settings.TEST) def test_context_manager(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') override = override_settings(TEST='override') with self.assertRaises(AttributeError): getattr(settings, 'TEST') override.enable() self.assertEqual('override', settings.TEST) override.disable() with self.assertRaises(AttributeError): getattr(settings, 'TEST') def test_class_decorator(self): # SimpleTestCase can be decorated by override_settings, but not ut.TestCase class SimpleTestCaseSubclass(SimpleTestCase): pass class UnittestTestCaseSubclass(unittest.TestCase): pass decorated = override_settings(TEST='override')(SimpleTestCaseSubclass) self.assertIsInstance(decorated, type) self.assertTrue(issubclass(decorated, SimpleTestCase)) with self.assertRaisesMessage(Exception, "Only subclasses of Django SimpleTestCase"): decorated = override_settings(TEST='override')(UnittestTestCaseSubclass) def test_signal_callback_context_manager(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.settings(TEST='override'): self.assertEqual(self.testvalue, 'override') self.assertIsNone(self.testvalue) @override_settings(TEST='override') def test_signal_callback_decorator(self): self.assertEqual(self.testvalue, 'override') # # Regression tests for #10130: deleting settings. # def test_settings_delete(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) del settings.TEST msg = "'Settings' object has no attribute 'TEST'" with self.assertRaisesMessage(AttributeError, msg): getattr(settings, 'TEST') def test_settings_delete_wrapped(self): with self.assertRaisesMessage(TypeError, "can't delete _wrapped."): delattr(settings, '_wrapped') def test_override_settings_delete(self): """ Allow deletion of a setting in an overridden settings set (#18824) """ previous_i18n = settings.USE_I18N previous_l10n = settings.USE_L10N with self.settings(USE_I18N=False): del settings.USE_I18N with self.assertRaises(AttributeError): getattr(settings, 'USE_I18N') # Should also work for a non-overridden setting del settings.USE_L10N with self.assertRaises(AttributeError): getattr(settings, 'USE_L10N') self.assertNotIn('USE_I18N', dir(settings)) self.assertNotIn('USE_L10N', dir(settings)) self.assertEqual(settings.USE_I18N, previous_i18n) self.assertEqual(settings.USE_L10N, previous_l10n) def test_override_settings_nested(self): """ override_settings uses the actual _wrapped attribute at runtime, not when it was instantiated. """ with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.assertRaises(AttributeError): getattr(settings, 'TEST2') inner = override_settings(TEST2='override') with override_settings(TEST='override'): self.assertEqual('override', settings.TEST) with inner: self.assertEqual('override', settings.TEST) self.assertEqual('override', settings.TEST2) # inner's __exit__ should have restored the settings of the outer # context manager, not those when the class was instantiated self.assertEqual('override', settings.TEST) with self.assertRaises(AttributeError): getattr(settings, 'TEST2') with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.assertRaises(AttributeError): getattr(settings, 'TEST2') def test_no_secret_key(self): settings_module = ModuleType('fake_settings_module') sys.modules['fake_settings_module'] = settings_module msg = 'The SECRET_KEY setting must not be empty.' try: with self.assertRaisesMessage(ImproperlyConfigured, msg): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] def test_no_settings_module(self): msg = ( 'Requested setting%s, but settings are not configured. You ' 'must either define the environment variable DJANGO_SETTINGS_MODULE ' 'or call settings.configure() before accessing settings.' ) orig_settings = os.environ[ENVIRONMENT_VARIABLE] os.environ[ENVIRONMENT_VARIABLE] = '' try: with self.assertRaisesMessage(ImproperlyConfigured, msg % 's'): settings._setup() with self.assertRaisesMessage(ImproperlyConfigured, msg % ' TEST'): settings._setup('TEST') finally: os.environ[ENVIRONMENT_VARIABLE] = orig_settings def test_already_configured(self): with self.assertRaisesMessage(RuntimeError, 'Settings already configured.'): settings.configure() def test_nonupper_settings_prohibited_in_configure(self): s = LazySettings() with self.assertRaisesMessage(TypeError, "Setting 'foo' must be uppercase."): s.configure(foo='bar') def test_nonupper_settings_ignored_in_default_settings(self): s = LazySettings() s.configure(SimpleNamespace(foo='bar')) with self.assertRaises(AttributeError): getattr(s, 'foo') @requires_tz_support @mock.patch('django.conf.global_settings.TIME_ZONE', 'test') def test_incorrect_timezone(self): with self.assertRaisesMessage(ValueError, 'Incorrect timezone setting: test'): settings._setup() class TestComplexSettingOverride(SimpleTestCase): def setUp(self): self.old_warn_override_settings = signals.COMPLEX_OVERRIDE_SETTINGS.copy() signals.COMPLEX_OVERRIDE_SETTINGS.add('TEST_WARN') def tearDown(self): signals.COMPLEX_OVERRIDE_SETTINGS = self.old_warn_override_settings self.assertNotIn('TEST_WARN', signals.COMPLEX_OVERRIDE_SETTINGS) def test_complex_override_warning(self): """Regression test for #19031""" msg = 'Overriding setting TEST_WARN can lead to unexpected behavior.' with self.assertWarnsMessage(UserWarning, msg) as cm: with override_settings(TEST_WARN='override'): self.assertEqual(settings.TEST_WARN, 'override') self.assertEqual(cm.filename, __file__) class SecureProxySslHeaderTest(SimpleTestCase): @override_settings(SECURE_PROXY_SSL_HEADER=None) def test_none(self): req = HttpRequest() self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTOCOL', 'https')) def test_set_without_xheader(self): req = HttpRequest() self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTOCOL', 'https')) def test_set_with_xheader_wrong(self): req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'wrongvalue' self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTOCOL', 'https')) def test_set_with_xheader_right(self): req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'https' self.assertIs(req.is_secure(), True) class IsOverriddenTest(SimpleTestCase): def test_configure(self): s = LazySettings() s.configure(SECRET_KEY='foo') self.assertTrue(s.is_overridden('SECRET_KEY')) def test_module(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' sys.modules['fake_settings_module'] = settings_module try: s = Settings('fake_settings_module') self.assertTrue(s.is_overridden('SECRET_KEY')) self.assertFalse(s.is_overridden('ALLOWED_HOSTS')) finally: del sys.modules['fake_settings_module'] def test_override(self): self.assertFalse(settings.is_overridden('ALLOWED_HOSTS')) with override_settings(ALLOWED_HOSTS=[]): self.assertTrue(settings.is_overridden('ALLOWED_HOSTS')) def test_unevaluated_lazysettings_repr(self): lazy_settings = LazySettings() expected = '<LazySettings [Unevaluated]>' self.assertEqual(repr(lazy_settings), expected) def test_evaluated_lazysettings_repr(self): lazy_settings = LazySettings() module = os.environ.get(ENVIRONMENT_VARIABLE) expected = '<LazySettings "%s">' % module # Force evaluation of the lazy object. lazy_settings.APPEND_SLASH self.assertEqual(repr(lazy_settings), expected) def test_usersettingsholder_repr(self): lazy_settings = LazySettings() lazy_settings.configure(APPEND_SLASH=False) expected = '<UserSettingsHolder>' self.assertEqual(repr(lazy_settings._wrapped), expected) def test_settings_repr(self): module = os.environ.get(ENVIRONMENT_VARIABLE) lazy_settings = Settings(module) expected = '<Settings "%s">' % module self.assertEqual(repr(lazy_settings), expected) class TestListSettings(unittest.TestCase): """ Make sure settings that should be lists or tuples throw ImproperlyConfigured if they are set to a string instead of a list or tuple. """ list_or_tuple_settings = ( "INSTALLED_APPS", "TEMPLATE_DIRS", "LOCALE_PATHS", ) def test_tuple_settings(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' for setting in self.list_or_tuple_settings: setattr(settings_module, setting, ('non_list_or_tuple_value')) sys.modules['fake_settings_module'] = settings_module try: with self.assertRaises(ImproperlyConfigured): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] delattr(settings_module, setting) class SettingChangeEnterException(Exception): pass class SettingChangeExitException(Exception): pass class OverrideSettingsIsolationOnExceptionTests(SimpleTestCase): """ The override_settings context manager restore settings if one of the receivers of "setting_changed" signal fails. Check the three cases of receiver failure detailed in receiver(). In each case, ALL receivers are called when exiting the context manager. """ def setUp(self): signals.setting_changed.connect(self.receiver) self.addCleanup(signals.setting_changed.disconnect, self.receiver) # Create a spy that's connected to the `setting_changed` signal and # executed AFTER `self.receiver`. self.spy_receiver = mock.Mock() signals.setting_changed.connect(self.spy_receiver) self.addCleanup(signals.setting_changed.disconnect, self.spy_receiver) def receiver(self, **kwargs): """ A receiver that fails while certain settings are being changed. - SETTING_BOTH raises an error while receiving the signal on both entering and exiting the context manager. - SETTING_ENTER raises an error only on enter. - SETTING_EXIT raises an error only on exit. """ setting = kwargs['setting'] enter = kwargs['enter'] if setting in ('SETTING_BOTH', 'SETTING_ENTER') and enter: raise SettingChangeEnterException if setting in ('SETTING_BOTH', 'SETTING_EXIT') and not enter: raise SettingChangeExitException def check_settings(self): """Assert that settings for these tests aren't present.""" self.assertFalse(hasattr(settings, 'SETTING_BOTH')) self.assertFalse(hasattr(settings, 'SETTING_ENTER')) self.assertFalse(hasattr(settings, 'SETTING_EXIT')) self.assertFalse(hasattr(settings, 'SETTING_PASS')) def check_spy_receiver_exit_calls(self, call_count): """ Assert that `self.spy_receiver` was called exactly `call_count` times with the ``enter=False`` keyword argument. """ kwargs_with_exit = [ kwargs for args, kwargs in self.spy_receiver.call_args_list if ('enter', False) in kwargs.items() ] self.assertEqual(len(kwargs_with_exit), call_count) def test_override_settings_both(self): """Receiver fails on both enter and exit.""" with self.assertRaises(SettingChangeEnterException): with override_settings(SETTING_PASS='BOTH', SETTING_BOTH='BOTH'): pass self.check_settings() # Two settings were touched, so expect two calls of `spy_receiver`. self.check_spy_receiver_exit_calls(call_count=2) def test_override_settings_enter(self): """Receiver fails on enter only.""" with self.assertRaises(SettingChangeEnterException): with override_settings(SETTING_PASS='ENTER', SETTING_ENTER='ENTER'): pass self.check_settings() # Two settings were touched, so expect two calls of `spy_receiver`. self.check_spy_receiver_exit_calls(call_count=2) def test_override_settings_exit(self): """Receiver fails on exit only.""" with self.assertRaises(SettingChangeExitException): with override_settings(SETTING_PASS='EXIT', SETTING_EXIT='EXIT'): pass self.check_settings() # Two settings were touched, so expect two calls of `spy_receiver`. self.check_spy_receiver_exit_calls(call_count=2) def test_override_settings_reusable_on_enter(self): """ Error is raised correctly when reusing the same override_settings instance. """ @override_settings(SETTING_ENTER='ENTER') def decorated_function(): pass with self.assertRaises(SettingChangeEnterException): decorated_function() signals.setting_changed.disconnect(self.receiver) # This call shouldn't raise any errors. decorated_function()
1796c08d22a123e165355c73f13bff2cb270f1f1e8ac046200688c4d246ed69a
import sys from types import ModuleType from django.conf import FILE_CHARSET_DEPRECATED_MSG, Settings, settings from django.test import SimpleTestCase, ignore_warnings from django.utils.deprecation import RemovedInDjango31Warning class DeprecationTests(SimpleTestCase): msg = FILE_CHARSET_DEPRECATED_MSG def test_override_settings_warning(self): with self.assertRaisesMessage(RemovedInDjango31Warning, self.msg): with self.settings(FILE_CHARSET='latin1'): pass def test_settings_init_warning(self): settings_module = ModuleType('fake_settings_module') settings_module.FILE_CHARSET = 'latin1' settings_module.SECRET_KEY = 'ABC' sys.modules['fake_settings_module'] = settings_module try: with self.assertRaisesMessage(RemovedInDjango31Warning, self.msg): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] def test_access_warning(self): with self.assertRaisesMessage(RemovedInDjango31Warning, self.msg): settings.FILE_CHARSET # Works a second time. with self.assertRaisesMessage(RemovedInDjango31Warning, self.msg): settings.FILE_CHARSET @ignore_warnings(category=RemovedInDjango31Warning) def test_access(self): with self.settings(FILE_CHARSET='latin1'): self.assertEqual(settings.FILE_CHARSET, 'latin1') # Works a second time. self.assertEqual(settings.FILE_CHARSET, 'latin1')
961b0d5a0835666299a91aee24ed2bfd341df952ee76efddcc27ec9d604b8b77
from django.test import TestCase from .models import Person class PropertyTests(TestCase): @classmethod def setUpTestData(cls): cls.a = Person.objects.create(first_name='John', last_name='Lennon') def test_getter(self): self.assertEqual(self.a.full_name, 'John Lennon') def test_setter(self): # The "full_name" property hasn't provided a "set" method. with self.assertRaises(AttributeError): setattr(self.a, 'full_name', 'Paul McCartney') # And cannot be used to initialize the class. with self.assertRaisesMessage(TypeError, "Person() got an unexpected keyword argument 'full_name'"): Person(full_name='Paul McCartney') # But "full_name_2" has, and it can be used to initialize the class. a2 = Person(full_name_2='Paul McCartney') a2.save() self.assertEqual(a2.first_name, 'Paul')
f72e3931dc6c84ea994d6a789bda5e691b8222a07d380425fb55457d9ed5c7e7
from unittest import mock from django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.db import connections from django.test import TestCase, override_settings from django.urls import path, reverse class Router: target_db = None def db_for_read(self, model, **hints): return self.target_db db_for_write = db_for_read site = admin.AdminSite(name='test_adminsite') site.register(User, admin_class=UserAdmin) urlpatterns = [ path('admin/', site.urls), ] @override_settings(ROOT_URLCONF=__name__, DATABASE_ROUTERS=['%s.Router' % __name__]) class MultiDatabaseTests(TestCase): databases = {'default', 'other'} @classmethod def setUpTestData(cls): cls.superusers = {} for db in connections: Router.target_db = db cls.superusers[db] = User.objects.create_superuser( username='admin', password='something', email='[email protected]', ) @mock.patch('django.contrib.auth.admin.transaction') def test_add_view(self, mock): for db in connections: with self.subTest(db_connection=db): Router.target_db = db self.client.force_login(self.superusers[db]) self.client.post(reverse('test_adminsite:auth_user_add'), { 'username': 'some_user', 'password1': 'helloworld', 'password2': 'helloworld', }) mock.atomic.assert_called_with(using=db)
181a6221caba36617074d72435460cfefdeb0533ae07fccb2c80f92f0ae41bd9
import datetime import itertools import os import re from importlib import import_module from unittest import mock from urllib.parse import quote from django.apps import apps from django.conf import settings from django.contrib.admin.models import LogEntry from django.contrib.auth import ( BACKEND_SESSION_KEY, REDIRECT_FIELD_NAME, SESSION_KEY, ) from django.contrib.auth.forms import ( AuthenticationForm, PasswordChangeForm, SetPasswordForm, ) from django.contrib.auth.models import Permission, User from django.contrib.auth.views import ( INTERNAL_RESET_SESSION_TOKEN, LoginView, logout_then_login, redirect_to_login, ) from django.contrib.contenttypes.models import ContentType from django.contrib.sessions.middleware import SessionMiddleware from django.contrib.sites.requests import RequestSite from django.core import mail from django.db import connection from django.http import HttpRequest from django.middleware.csrf import CsrfViewMiddleware, get_token from django.test import Client, TestCase, override_settings from django.test.client import RedirectCycleError from django.urls import NoReverseMatch, reverse, reverse_lazy from django.utils.http import urlsafe_base64_encode from .client import PasswordResetConfirmClient from .models import CustomUser, UUIDUser from .settings import AUTH_TEMPLATES @override_settings( LANGUAGES=[('en', 'English')], LANGUAGE_CODE='en', TEMPLATES=AUTH_TEMPLATES, ROOT_URLCONF='auth_tests.urls', ) class AuthViewsTestCase(TestCase): """ Helper base class for all the follow test cases. """ @classmethod def setUpTestData(cls): cls.u1 = User.objects.create_user(username='testclient', password='password', email='[email protected]') cls.u3 = User.objects.create_user(username='staff', password='password', email='[email protected]') def login(self, username='testclient', password='password'): response = self.client.post('/login/', { 'username': username, 'password': password, }) self.assertIn(SESSION_KEY, self.client.session) return response def logout(self): response = self.client.get('/admin/logout/') self.assertEqual(response.status_code, 200) self.assertNotIn(SESSION_KEY, self.client.session) def assertFormError(self, response, error): """Assert that error is found in response.context['form'] errors""" form_errors = list(itertools.chain(*response.context['form'].errors.values())) self.assertIn(str(error), form_errors) @override_settings(ROOT_URLCONF='django.contrib.auth.urls') class AuthViewNamedURLTests(AuthViewsTestCase): def test_named_urls(self): "Named URLs should be reversible" expected_named_urls = [ ('login', [], {}), ('logout', [], {}), ('password_change', [], {}), ('password_change_done', [], {}), ('password_reset', [], {}), ('password_reset_done', [], {}), ('password_reset_confirm', [], { 'uidb64': 'aaaaaaa', 'token': '1111-aaaaa', }), ('password_reset_complete', [], {}), ] for name, args, kwargs in expected_named_urls: with self.subTest(name=name): try: reverse(name, args=args, kwargs=kwargs) except NoReverseMatch: self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name) class PasswordResetTest(AuthViewsTestCase): def setUp(self): self.client = PasswordResetConfirmClient() def test_email_not_found(self): """If the provided email is not registered, don't raise any error but also don't send any email.""" response = self.client.get('/password_reset/') self.assertEqual(response.status_code, 200) response = self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 0) def test_email_found(self): "Email is sent if a valid email address is provided for password reset" response = self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertIn("http://", mail.outbox[0].body) self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email) # optional multipart text/html email has been added. Make sure original, # default functionality is 100% the same self.assertFalse(mail.outbox[0].message().is_multipart()) def test_extra_email_context(self): """ extra_email_context should be available in the email template context. """ response = self.client.post( '/password_reset_extra_email_context/', {'email': '[email protected]'}, ) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertIn('Email email context: "Hello!"', mail.outbox[0].body) self.assertIn('http://custom.example.com/reset/', mail.outbox[0].body) def test_html_mail_template(self): """ A multipart email with text/plain and text/html is sent if the html_email_template parameter is passed to the view """ response = self.client.post('/password_reset/html_email_template/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) message = mail.outbox[0].message() self.assertEqual(len(message.get_payload()), 2) self.assertTrue(message.is_multipart()) self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain') self.assertEqual(message.get_payload(1).get_content_type(), 'text/html') self.assertNotIn('<html>', message.get_payload(0).get_payload()) self.assertIn('<html>', message.get_payload(1).get_payload()) def test_email_found_custom_from(self): "Email is sent if a valid email address is provided for password reset when a custom from_email is provided." response = self.client.post('/password_reset_from_email/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertEqual("[email protected]", mail.outbox[0].from_email) # Skip any 500 handler action (like sending more mail...) @override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True) def test_poisoned_http_host(self): "Poisoned HTTP_HOST headers can't be used for reset emails" # This attack is based on the way browsers handle URLs. The colon # should be used to separate the port, but if the URL contains an @, # the colon is interpreted as part of a username for login purposes, # making 'evil.com' the request domain. Since HTTP_HOST is used to # produce a meaningful reset URL, we need to be certain that the # HTTP_HOST header isn't poisoned. This is done as a check when get_host() # is invoked, but we check here as a practical consequence. with self.assertLogs('django.security.DisallowedHost', 'ERROR'): response = self.client.post( '/password_reset/', {'email': '[email protected]'}, HTTP_HOST='www.example:[email protected]' ) self.assertEqual(response.status_code, 400) self.assertEqual(len(mail.outbox), 0) # Skip any 500 handler action (like sending more mail...) @override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True) def test_poisoned_http_host_admin_site(self): "Poisoned HTTP_HOST headers can't be used for reset emails on admin views" with self.assertLogs('django.security.DisallowedHost', 'ERROR'): response = self.client.post( '/admin_password_reset/', {'email': '[email protected]'}, HTTP_HOST='www.example:[email protected]' ) self.assertEqual(response.status_code, 400) self.assertEqual(len(mail.outbox), 0) def _test_confirm_start(self): # Start by creating the email self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(len(mail.outbox), 1) return self._read_signup_email(mail.outbox[0]) def _read_signup_email(self, email): urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body) self.assertIsNotNone(urlmatch, "No URL found in sent email") return urlmatch.group(), urlmatch.groups()[0] def test_confirm_valid(self): url, path = self._test_confirm_start() response = self.client.get(path) # redirect to a 'complete' page: self.assertContains(response, "Please enter your new password") def test_confirm_invalid(self): url, path = self._test_confirm_start() # Let's munge the token in the path, but keep the same length, # in case the URLconf will reject a different length. path = path[:-5] + ("0" * 4) + path[-1] response = self.client.get(path) self.assertContains(response, "The password reset link was invalid") def test_confirm_invalid_user(self): # A nonexistent user returns a 200 response, not a 404. response = self.client.get('/reset/123456/1-1/') self.assertContains(response, "The password reset link was invalid") def test_confirm_overflow_user(self): # A base36 user id that overflows int returns a 200 response. response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/') self.assertContains(response, "The password reset link was invalid") def test_confirm_invalid_post(self): # Same as test_confirm_invalid, but trying to do a POST instead. url, path = self._test_confirm_start() path = path[:-5] + ("0" * 4) + path[-1] self.client.post(path, { 'new_password1': 'anewpassword', 'new_password2': ' anewpassword', }) # Check the password has not been changed u = User.objects.get(email='[email protected]') self.assertTrue(not u.check_password("anewpassword")) def test_confirm_invalid_hash(self): """A POST with an invalid token is rejected.""" u = User.objects.get(email='[email protected]') original_password = u.password url, path = self._test_confirm_start() path_parts = path.split('-') path_parts[-1] = ("0") * 20 + '/' path = '-'.join(path_parts) response = self.client.post(path, { 'new_password1': 'anewpassword', 'new_password2': 'anewpassword', }) self.assertIs(response.context['validlink'], False) u.refresh_from_db() self.assertEqual(original_password, u.password) # password hasn't changed def test_confirm_complete(self): url, path = self._test_confirm_start() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) # Check the password has been changed u = User.objects.get(email='[email protected]') self.assertTrue(u.check_password("anewpassword")) # The reset token is deleted from the session. self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session) # Check we can't use the link again response = self.client.get(path) self.assertContains(response, "The password reset link was invalid") def test_confirm_different_passwords(self): url, path = self._test_confirm_start() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'x'}) self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch']) def test_reset_redirect_default(self): response = self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertRedirects(response, '/password_reset/done/', fetch_redirect_response=False) def test_reset_custom_redirect(self): response = self.client.post('/password_reset/custom_redirect/', {'email': '[email protected]'}) self.assertRedirects(response, '/custom/', fetch_redirect_response=False) def test_reset_custom_redirect_named(self): response = self.client.post('/password_reset/custom_redirect/named/', {'email': '[email protected]'}) self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False) def test_confirm_redirect_default(self): url, path = self._test_confirm_start() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) def test_confirm_redirect_custom(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/custom/', fetch_redirect_response=False) def test_confirm_redirect_custom_named(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/named/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False) def test_confirm_custom_reset_url_token(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/token/') self.client.reset_url_token = 'set-passwordcustom' response = self.client.post( path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}, ) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) def test_confirm_login_post_reset(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/post_reset_login/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) self.assertIn(SESSION_KEY, self.client.session) @override_settings( AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend', 'django.contrib.auth.backends.AllowAllUsersModelBackend', ] ) def test_confirm_login_post_reset_custom_backend(self): # This backend is specified in the URL pattern. backend = 'django.contrib.auth.backends.AllowAllUsersModelBackend' url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/post_reset_login_custom_backend/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) self.assertIn(SESSION_KEY, self.client.session) self.assertEqual(self.client.session[BACKEND_SESSION_KEY], backend) def test_confirm_login_post_reset_already_logged_in(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/post_reset_login/') self.login() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) self.assertIn(SESSION_KEY, self.client.session) def test_confirm_display_user_from_form(self): url, path = self._test_confirm_start() response = self.client.get(path) # The password_reset_confirm() view passes the user object to the # SetPasswordForm``, even on GET requests (#16919). For this test, # {{ form.user }}`` is rendered in the template # registration/password_reset_confirm.html. username = User.objects.get(email='[email protected]').username self.assertContains(response, "Hello, %s." % username) # However, the view should NOT pass any user object on a form if the # password reset link was invalid. response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/') self.assertContains(response, "Hello, .") def test_confirm_link_redirects_to_set_password_page(self): url, path = self._test_confirm_start() # Don't use PasswordResetConfirmClient (self.client) here which # automatically fetches the redirect page. client = Client() response = client.get(path) token = response.resolver_match.kwargs['token'] uuidb64 = response.resolver_match.kwargs['uidb64'] self.assertRedirects(response, '/reset/%s/set-password/' % uuidb64) self.assertEqual(client.session['_password_reset_token'], token) def test_confirm_custom_reset_url_token_link_redirects_to_set_password_page(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/token/') client = Client() response = client.get(path) token = response.resolver_match.kwargs['token'] uuidb64 = response.resolver_match.kwargs['uidb64'] self.assertRedirects(response, '/reset/custom/token/%s/set-passwordcustom/' % uuidb64) self.assertEqual(client.session['_password_reset_token'], token) def test_invalid_link_if_going_directly_to_the_final_reset_password_url(self): url, path = self._test_confirm_start() _, uuidb64, _ = path.strip('/').split('/') response = Client().get('/reset/%s/set-password/' % uuidb64) self.assertContains(response, 'The password reset link was invalid') @override_settings(AUTH_USER_MODEL='auth_tests.CustomUser') class CustomUserPasswordResetTest(AuthViewsTestCase): user_email = '[email protected]' @classmethod def setUpTestData(cls): cls.u1 = CustomUser.custom_objects.create( email='[email protected]', date_of_birth=datetime.date(1976, 11, 8), ) cls.u1.set_password('password') cls.u1.save() def setUp(self): self.client = PasswordResetConfirmClient() def _test_confirm_start(self): # Start by creating the email response = self.client.post('/password_reset/', {'email': self.user_email}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) return self._read_signup_email(mail.outbox[0]) def _read_signup_email(self, email): urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body) self.assertIsNotNone(urlmatch, "No URL found in sent email") return urlmatch.group(), urlmatch.groups()[0] def test_confirm_valid_custom_user(self): url, path = self._test_confirm_start() response = self.client.get(path) # redirect to a 'complete' page: self.assertContains(response, "Please enter your new password") # then submit a new password response = self.client.post(path, { 'new_password1': 'anewpassword', 'new_password2': 'anewpassword', }) self.assertRedirects(response, '/reset/done/') @override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser') class UUIDUserPasswordResetTest(CustomUserPasswordResetTest): def _test_confirm_start(self): # instead of fixture UUIDUser.objects.create_user( email=self.user_email, username='foo', password='foo', ) return super()._test_confirm_start() def test_confirm_invalid_uuid(self): """A uidb64 that decodes to a non-UUID doesn't crash.""" _, path = self._test_confirm_start() invalid_uidb64 = urlsafe_base64_encode('INVALID_UUID'.encode()) first, _uuidb64_, second = path.strip('/').split('/') response = self.client.get('/' + '/'.join((first, invalid_uidb64, second)) + '/') self.assertContains(response, 'The password reset link was invalid') class ChangePasswordTest(AuthViewsTestCase): def fail_login(self): response = self.client.post('/login/', { 'username': 'testclient', 'password': 'password', }) self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % { 'username': User._meta.get_field('username').verbose_name }) def logout(self): self.client.get('/logout/') def test_password_change_fails_with_invalid_old_password(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'donuts', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect']) def test_password_change_fails_with_mismatched_passwords(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'donuts', }) self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch']) def test_password_change_succeeds(self): self.login() self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.fail_login() self.login(password='password1') def test_password_change_done_succeeds(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False) @override_settings(LOGIN_URL='/login/') def test_password_change_done_fails(self): response = self.client.get('/password_change/done/') self.assertRedirects(response, '/login/?next=/password_change/done/', fetch_redirect_response=False) def test_password_change_redirect_default(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False) def test_password_change_redirect_custom(self): self.login() response = self.client.post('/password_change/custom/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertRedirects(response, '/custom/', fetch_redirect_response=False) def test_password_change_redirect_custom_named(self): self.login() response = self.client.post('/password_change/custom/named/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False) class SessionAuthenticationTests(AuthViewsTestCase): def test_user_password_change_updates_session(self): """ #21649 - Ensure contrib.auth.views.password_change updates the user's session auth hash after a password change so the session isn't logged out. """ self.login() original_session_key = self.client.session.session_key response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) # if the hash isn't updated, retrieving the redirection page will fail. self.assertRedirects(response, '/password_change/done/') # The session key is rotated. self.assertNotEqual(original_session_key, self.client.session.session_key) class LoginTest(AuthViewsTestCase): def test_current_site_in_context_after_login(self): response = self.client.get(reverse('login')) self.assertEqual(response.status_code, 200) if apps.is_installed('django.contrib.sites'): Site = apps.get_model('sites.Site') site = Site.objects.get_current() self.assertEqual(response.context['site'], site) self.assertEqual(response.context['site_name'], site.name) else: self.assertIsInstance(response.context['site'], RequestSite) self.assertIsInstance(response.context['form'], AuthenticationForm) def test_security_check(self): login_url = reverse('login') # These URLs should not pass the security check. bad_urls = ( 'http://example.com', 'http:///example.com', 'https://example.com', 'ftp://example.com', '///example.com', '//example.com', 'javascript:alert("XSS")', ) for bad_url in bad_urls: with self.subTest(bad_url=bad_url): nasty_url = '%(url)s?%(next)s=%(bad_url)s' % { 'url': login_url, 'next': REDIRECT_FIELD_NAME, 'bad_url': quote(bad_url), } response = self.client.post(nasty_url, { 'username': 'testclient', 'password': 'password', }) self.assertEqual(response.status_code, 302) self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url) # These URLs should pass the security check. good_urls = ( '/view/?param=http://example.com', '/view/?param=https://example.com', '/view?param=ftp://example.com', 'view/?param=//example.com', 'https://testserver/', 'HTTPS://testserver/', '//testserver/', '/url%20with%20spaces/', ) for good_url in good_urls: with self.subTest(good_url=good_url): safe_url = '%(url)s?%(next)s=%(good_url)s' % { 'url': login_url, 'next': REDIRECT_FIELD_NAME, 'good_url': quote(good_url), } response = self.client.post(safe_url, { 'username': 'testclient', 'password': 'password', }) self.assertEqual(response.status_code, 302) self.assertIn(good_url, response.url, '%s should be allowed' % good_url) def test_security_check_https(self): login_url = reverse('login') non_https_next_url = 'http://testserver/path' not_secured_url = '%(url)s?%(next)s=%(next_url)s' % { 'url': login_url, 'next': REDIRECT_FIELD_NAME, 'next_url': quote(non_https_next_url), } post_data = { 'username': 'testclient', 'password': 'password', } response = self.client.post(not_secured_url, post_data, secure=True) self.assertEqual(response.status_code, 302) self.assertNotEqual(response.url, non_https_next_url) self.assertEqual(response.url, settings.LOGIN_REDIRECT_URL) def test_login_form_contains_request(self): # The custom authentication form for this login requires a request to # initialize it. response = self.client.post('/custom_request_auth_login/', { 'username': 'testclient', 'password': 'password', }) # The login was successful. self.assertRedirects(response, settings.LOGIN_REDIRECT_URL, fetch_redirect_response=False) def test_login_csrf_rotate(self): """ Makes sure that a login rotates the currently-used CSRF token. """ # Do a GET to establish a CSRF token # The test client isn't used here as it's a test for middleware. req = HttpRequest() CsrfViewMiddleware().process_view(req, LoginView.as_view(), (), {}) # get_token() triggers CSRF token inclusion in the response get_token(req) resp = LoginView.as_view()(req) resp2 = CsrfViewMiddleware().process_response(req, resp) csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None) token1 = csrf_cookie.coded_value # Prepare the POST request req = HttpRequest() req.COOKIES[settings.CSRF_COOKIE_NAME] = token1 req.method = "POST" req.POST = {'username': 'testclient', 'password': 'password', 'csrfmiddlewaretoken': token1} # Use POST request to log in SessionMiddleware().process_request(req) CsrfViewMiddleware().process_view(req, LoginView.as_view(), (), {}) req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view req.META["SERVER_PORT"] = 80 resp = LoginView.as_view()(req) resp2 = CsrfViewMiddleware().process_response(req, resp) csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None) token2 = csrf_cookie.coded_value # Check the CSRF token switched self.assertNotEqual(token1, token2) def test_session_key_flushed_on_login(self): """ To avoid reusing another user's session, ensure a new, empty session is created if the existing session corresponds to a different authenticated user. """ self.login() original_session_key = self.client.session.session_key self.login(username='staff') self.assertNotEqual(original_session_key, self.client.session.session_key) def test_session_key_flushed_on_login_after_password_change(self): """ As above, but same user logging in after a password change. """ self.login() original_session_key = self.client.session.session_key # If no password change, session key should not be flushed. self.login() self.assertEqual(original_session_key, self.client.session.session_key) user = User.objects.get(username='testclient') user.set_password('foobar') user.save() self.login(password='foobar') self.assertNotEqual(original_session_key, self.client.session.session_key) def test_login_session_without_hash_session_key(self): """ Session without django.contrib.auth.HASH_SESSION_KEY should login without an exception. """ user = User.objects.get(username='testclient') engine = import_module(settings.SESSION_ENGINE) session = engine.SessionStore() session[SESSION_KEY] = user.id session.save() original_session_key = session.session_key self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key self.login() self.assertNotEqual(original_session_key, self.client.session.session_key) class LoginURLSettings(AuthViewsTestCase): """Tests for settings.LOGIN_URL.""" def assertLoginURLEquals(self, url): response = self.client.get('/login_required/') self.assertRedirects(response, url, fetch_redirect_response=False) @override_settings(LOGIN_URL='/login/') def test_standard_login_url(self): self.assertLoginURLEquals('/login/?next=/login_required/') @override_settings(LOGIN_URL='login') def test_named_login_url(self): self.assertLoginURLEquals('/login/?next=/login_required/') @override_settings(LOGIN_URL='http://remote.example.com/login') def test_remote_login_url(self): quoted_next = quote('http://testserver/login_required/') expected = 'http://remote.example.com/login?next=%s' % quoted_next self.assertLoginURLEquals(expected) @override_settings(LOGIN_URL='https:///login/') def test_https_login_url(self): quoted_next = quote('http://testserver/login_required/') expected = 'https:///login/?next=%s' % quoted_next self.assertLoginURLEquals(expected) @override_settings(LOGIN_URL='/login/?pretty=1') def test_login_url_with_querystring(self): self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/') @override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/') def test_remote_login_url_with_next_querystring(self): quoted_next = quote('http://testserver/login_required/') expected = 'http://remote.example.com/login/?next=%s' % quoted_next self.assertLoginURLEquals(expected) @override_settings(LOGIN_URL=reverse_lazy('login')) def test_lazy_login_url(self): self.assertLoginURLEquals('/login/?next=/login_required/') class LoginRedirectUrlTest(AuthViewsTestCase): """Tests for settings.LOGIN_REDIRECT_URL.""" def assertLoginRedirectURLEqual(self, url): response = self.login() self.assertRedirects(response, url, fetch_redirect_response=False) def test_default(self): self.assertLoginRedirectURLEqual('/accounts/profile/') @override_settings(LOGIN_REDIRECT_URL='/custom/') def test_custom(self): self.assertLoginRedirectURLEqual('/custom/') @override_settings(LOGIN_REDIRECT_URL='password_reset') def test_named(self): self.assertLoginRedirectURLEqual('/password_reset/') @override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/') def test_remote(self): self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/') class RedirectToLoginTests(AuthViewsTestCase): """Tests for the redirect_to_login view""" @override_settings(LOGIN_URL=reverse_lazy('login')) def test_redirect_to_login_with_lazy(self): login_redirect_response = redirect_to_login(next='/else/where/') expected = '/login/?next=/else/where/' self.assertEqual(expected, login_redirect_response.url) @override_settings(LOGIN_URL=reverse_lazy('login')) def test_redirect_to_login_with_lazy_and_unicode(self): login_redirect_response = redirect_to_login(next='/else/where/झ/') expected = '/login/?next=/else/where/%E0%A4%9D/' self.assertEqual(expected, login_redirect_response.url) class LogoutThenLoginTests(AuthViewsTestCase): """Tests for the logout_then_login view""" def confirm_logged_out(self): self.assertNotIn(SESSION_KEY, self.client.session) @override_settings(LOGIN_URL='/login/') def test_default_logout_then_login(self): self.login() req = HttpRequest() req.method = 'GET' req.session = self.client.session response = logout_then_login(req) self.confirm_logged_out() self.assertRedirects(response, '/login/', fetch_redirect_response=False) def test_logout_then_login_with_custom_login(self): self.login() req = HttpRequest() req.method = 'GET' req.session = self.client.session response = logout_then_login(req, login_url='/custom/') self.confirm_logged_out() self.assertRedirects(response, '/custom/', fetch_redirect_response=False) class LoginRedirectAuthenticatedUser(AuthViewsTestCase): dont_redirect_url = '/login/redirect_authenticated_user_default/' do_redirect_url = '/login/redirect_authenticated_user/' def test_default(self): """Stay on the login page by default.""" self.login() response = self.client.get(self.dont_redirect_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['next'], '') def test_guest(self): """If not logged in, stay on the same page.""" response = self.client.get(self.do_redirect_url) self.assertEqual(response.status_code, 200) def test_redirect(self): """If logged in, go to default redirected URL.""" self.login() response = self.client.get(self.do_redirect_url) self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False) @override_settings(LOGIN_REDIRECT_URL='/custom/') def test_redirect_url(self): """If logged in, go to custom redirected URL.""" self.login() response = self.client.get(self.do_redirect_url) self.assertRedirects(response, '/custom/', fetch_redirect_response=False) def test_redirect_param(self): """If next is specified as a GET parameter, go there.""" self.login() url = self.do_redirect_url + '?next=/custom_next/' response = self.client.get(url) self.assertRedirects(response, '/custom_next/', fetch_redirect_response=False) def test_redirect_loop(self): """ Detect a redirect loop if LOGIN_REDIRECT_URL is not correctly set, with and without custom parameters. """ self.login() msg = ( "Redirection loop for authenticated user detected. Check that " "your LOGIN_REDIRECT_URL doesn't point to a login page." ) with self.settings(LOGIN_REDIRECT_URL=self.do_redirect_url): with self.assertRaisesMessage(ValueError, msg): self.client.get(self.do_redirect_url) url = self.do_redirect_url + '?bla=2' with self.assertRaisesMessage(ValueError, msg): self.client.get(url) def test_permission_required_not_logged_in(self): # Not logged in ... with self.settings(LOGIN_URL=self.do_redirect_url): # redirected to login. response = self.client.get('/permission_required_redirect/', follow=True) self.assertEqual(response.status_code, 200) # exception raised. response = self.client.get('/permission_required_exception/', follow=True) self.assertEqual(response.status_code, 403) # redirected to login. response = self.client.get('/login_and_permission_required_exception/', follow=True) self.assertEqual(response.status_code, 200) def test_permission_required_logged_in(self): self.login() # Already logged in... with self.settings(LOGIN_URL=self.do_redirect_url): # redirect loop encountered. with self.assertRaisesMessage(RedirectCycleError, 'Redirect loop detected.'): self.client.get('/permission_required_redirect/', follow=True) # exception raised. response = self.client.get('/permission_required_exception/', follow=True) self.assertEqual(response.status_code, 403) # exception raised. response = self.client.get('/login_and_permission_required_exception/', follow=True) self.assertEqual(response.status_code, 403) class LoginSuccessURLAllowedHostsTest(AuthViewsTestCase): def test_success_url_allowed_hosts_same_host(self): response = self.client.post('/login/allowed_hosts/', { 'username': 'testclient', 'password': 'password', 'next': 'https://testserver/home', }) self.assertIn(SESSION_KEY, self.client.session) self.assertRedirects(response, 'https://testserver/home', fetch_redirect_response=False) def test_success_url_allowed_hosts_safe_host(self): response = self.client.post('/login/allowed_hosts/', { 'username': 'testclient', 'password': 'password', 'next': 'https://otherserver/home', }) self.assertIn(SESSION_KEY, self.client.session) self.assertRedirects(response, 'https://otherserver/home', fetch_redirect_response=False) def test_success_url_allowed_hosts_unsafe_host(self): response = self.client.post('/login/allowed_hosts/', { 'username': 'testclient', 'password': 'password', 'next': 'https://evil/home', }) self.assertIn(SESSION_KEY, self.client.session) self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False) class LogoutTest(AuthViewsTestCase): def confirm_logged_out(self): self.assertNotIn(SESSION_KEY, self.client.session) def test_logout_default(self): "Logout without next_page option renders the default template" self.login() response = self.client.get('/logout/') self.assertContains(response, 'Logged out') self.confirm_logged_out() def test_logout_with_post(self): self.login() response = self.client.post('/logout/') self.assertContains(response, 'Logged out') self.confirm_logged_out() def test_14377(self): # Bug 14377 self.login() response = self.client.get('/logout/') self.assertIn('site', response.context) def test_logout_doesnt_cache(self): """ The logout() view should send "no-cache" headers for reasons described in #25490. """ response = self.client.get('/logout/') self.assertIn('no-store', response['Cache-Control']) def test_logout_with_overridden_redirect_url(self): # Bug 11223 self.login() response = self.client.get('/logout/next_page/') self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False) response = self.client.get('/logout/next_page/?next=/login/') self.assertRedirects(response, '/login/', fetch_redirect_response=False) self.confirm_logged_out() def test_logout_with_next_page_specified(self): "Logout with next_page option given redirects to specified resource" self.login() response = self.client.get('/logout/next_page/') self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False) self.confirm_logged_out() def test_logout_with_redirect_argument(self): "Logout with query string redirects to specified resource" self.login() response = self.client.get('/logout/?next=/login/') self.assertRedirects(response, '/login/', fetch_redirect_response=False) self.confirm_logged_out() def test_logout_with_custom_redirect_argument(self): "Logout with custom query string redirects to specified resource" self.login() response = self.client.get('/logout/custom_query/?follow=/somewhere/') self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False) self.confirm_logged_out() def test_logout_with_named_redirect(self): "Logout resolves names or URLs passed as next_page." self.login() response = self.client.get('/logout/next_page/named/') self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False) self.confirm_logged_out() def test_success_url_allowed_hosts_same_host(self): self.login() response = self.client.get('/logout/allowed_hosts/?next=https://testserver/') self.assertRedirects(response, 'https://testserver/', fetch_redirect_response=False) self.confirm_logged_out() def test_success_url_allowed_hosts_safe_host(self): self.login() response = self.client.get('/logout/allowed_hosts/?next=https://otherserver/') self.assertRedirects(response, 'https://otherserver/', fetch_redirect_response=False) self.confirm_logged_out() def test_success_url_allowed_hosts_unsafe_host(self): self.login() response = self.client.get('/logout/allowed_hosts/?next=https://evil/') self.assertRedirects(response, '/logout/allowed_hosts/', fetch_redirect_response=False) self.confirm_logged_out() def test_security_check(self): logout_url = reverse('logout') # These URLs should not pass the security check. bad_urls = ( 'http://example.com', 'http:///example.com', 'https://example.com', 'ftp://example.com', '///example.com', '//example.com', 'javascript:alert("XSS")', ) for bad_url in bad_urls: with self.subTest(bad_url=bad_url): nasty_url = '%(url)s?%(next)s=%(bad_url)s' % { 'url': logout_url, 'next': REDIRECT_FIELD_NAME, 'bad_url': quote(bad_url), } self.login() response = self.client.get(nasty_url) self.assertEqual(response.status_code, 302) self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url) self.confirm_logged_out() # These URLs should pass the security check. good_urls = ( '/view/?param=http://example.com', '/view/?param=https://example.com', '/view?param=ftp://example.com', 'view/?param=//example.com', 'https://testserver/', 'HTTPS://testserver/', '//testserver/', '/url%20with%20spaces/', ) for good_url in good_urls: with self.subTest(good_url=good_url): safe_url = '%(url)s?%(next)s=%(good_url)s' % { 'url': logout_url, 'next': REDIRECT_FIELD_NAME, 'good_url': quote(good_url), } self.login() response = self.client.get(safe_url) self.assertEqual(response.status_code, 302) self.assertIn(good_url, response.url, '%s should be allowed' % good_url) self.confirm_logged_out() def test_security_check_https(self): logout_url = reverse('logout') non_https_next_url = 'http://testserver/' url = '%(url)s?%(next)s=%(next_url)s' % { 'url': logout_url, 'next': REDIRECT_FIELD_NAME, 'next_url': quote(non_https_next_url), } self.login() response = self.client.get(url, secure=True) self.assertRedirects(response, logout_url, fetch_redirect_response=False) self.confirm_logged_out() def test_logout_preserve_language(self): """Language is preserved after logout.""" self.login() self.client.post('/setlang/', {'language': 'pl'}) self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, 'pl') self.client.get('/logout/') self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, 'pl') @override_settings(LOGOUT_REDIRECT_URL='/custom/') def test_logout_redirect_url_setting(self): self.login() response = self.client.get('/logout/') self.assertRedirects(response, '/custom/', fetch_redirect_response=False) @override_settings(LOGOUT_REDIRECT_URL='logout') def test_logout_redirect_url_named_setting(self): self.login() response = self.client.get('/logout/') self.assertRedirects(response, '/logout/', fetch_redirect_response=False) def get_perm(Model, perm): ct = ContentType.objects.get_for_model(Model) return Permission.objects.get(content_type=ct, codename=perm) # Redirect in test_user_change_password will fail if session auth hash # isn't updated after password change (#21649) @override_settings(ROOT_URLCONF='auth_tests.urls_admin') class ChangelistTests(AuthViewsTestCase): @classmethod def setUpTestData(cls): super().setUpTestData() # Make me a superuser before logging in. User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True) def setUp(self): self.login() # Get the latest last_login value. self.admin = User.objects.get(pk=self.u1.pk) def get_user_data(self, user): return { 'username': user.username, 'password': user.password, 'email': user.email, 'is_active': user.is_active, 'is_staff': user.is_staff, 'is_superuser': user.is_superuser, 'last_login_0': user.last_login.strftime('%Y-%m-%d'), 'last_login_1': user.last_login.strftime('%H:%M:%S'), 'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'), 'initial-last_login_1': user.last_login.strftime('%H:%M:%S'), 'date_joined_0': user.date_joined.strftime('%Y-%m-%d'), 'date_joined_1': user.date_joined.strftime('%H:%M:%S'), 'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'), 'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'), 'first_name': user.first_name, 'last_name': user.last_name, } # #20078 - users shouldn't be allowed to guess password hashes via # repeated password__startswith queries. def test_changelist_disallows_password_lookups(self): # A lookup that tries to filter on password isn't OK with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'): response = self.client.get(reverse('auth_test_admin:auth_user_changelist') + '?password__startswith=sha1$') self.assertEqual(response.status_code, 400) def test_user_change_email(self): data = self.get_user_data(self.admin) data['email'] = 'new_' + data['email'] response = self.client.post( reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)), data ) self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist')) row = LogEntry.objects.latest('id') self.assertEqual(row.get_change_message(), 'Changed Email address.') def test_user_not_change(self): response = self.client.post( reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)), self.get_user_data(self.admin) ) self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist')) row = LogEntry.objects.latest('id') self.assertEqual(row.get_change_message(), 'No fields changed.') def test_user_change_password(self): user_change_url = reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)) password_change_url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,)) response = self.client.get(user_change_url) # Test the link inside password field help_text. rel_link = re.search( r'you can change the password using <a href="([^"]*)">this form</a>', response.content.decode() ).groups()[0] self.assertEqual( os.path.normpath(user_change_url + rel_link), os.path.normpath(password_change_url) ) response = self.client.post( password_change_url, { 'password1': 'password1', 'password2': 'password1', } ) self.assertRedirects(response, user_change_url) row = LogEntry.objects.latest('id') self.assertEqual(row.get_change_message(), 'Changed password.') self.logout() self.login(password='password1') def test_user_change_different_user_password(self): u = User.objects.get(email='[email protected]') response = self.client.post( reverse('auth_test_admin:auth_user_password_change', args=(u.pk,)), { 'password1': 'password1', 'password2': 'password1', } ) self.assertRedirects(response, reverse('auth_test_admin:auth_user_change', args=(u.pk,))) row = LogEntry.objects.latest('id') self.assertEqual(row.user_id, self.admin.pk) self.assertEqual(row.object_id, str(u.pk)) self.assertEqual(row.get_change_message(), 'Changed password.') def test_password_change_bad_url(self): response = self.client.get(reverse('auth_test_admin:auth_user_password_change', args=('foobar',))) self.assertEqual(response.status_code, 404) @mock.patch('django.contrib.auth.admin.UserAdmin.has_change_permission') def test_user_change_password_passes_user_to_has_change_permission(self, has_change_permission): url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,)) self.client.post(url, {'password1': 'password1', 'password2': 'password1'}) (_request, user), _kwargs = has_change_permission.call_args self.assertEqual(user.pk, self.admin.pk) def test_view_user_password_is_readonly(self): u = User.objects.get(username='testclient') u.is_superuser = False u.save() original_password = u.password u.user_permissions.add(get_perm(User, 'view_user')) response = self.client.get(reverse('auth_test_admin:auth_user_change', args=(u.pk,)),) algo, salt, hash_string = (u.password.split('$')) self.assertContains(response, '<div class="readonly">testclient</div>') # ReadOnlyPasswordHashWidget is used to render the field. self.assertContains( response, '<strong>algorithm</strong>: %s\n\n' '<strong>salt</strong>: %s**********\n\n' '<strong>hash</strong>: %s**************************\n\n' % ( algo, salt[:2], hash_string[:6], ), html=True, ) # Value in POST data is ignored. data = self.get_user_data(u) data['password'] = 'shouldnotchange' change_url = reverse('auth_test_admin:auth_user_change', args=(u.pk,)) response = self.client.post(change_url, data) self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist')) u.refresh_from_db() self.assertEqual(u.password, original_password) @override_settings( AUTH_USER_MODEL='auth_tests.UUIDUser', ROOT_URLCONF='auth_tests.urls_custom_user_admin', ) class UUIDUserTests(TestCase): def test_admin_password_change(self): u = UUIDUser.objects.create_superuser(username='uuid', email='[email protected]', password='test') self.assertTrue(self.client.login(username='uuid', password='test')) user_change_url = reverse('custom_user_admin:auth_tests_uuiduser_change', args=(u.pk,)) response = self.client.get(user_change_url) self.assertEqual(response.status_code, 200) password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,)) response = self.client.get(password_change_url) self.assertEqual(response.status_code, 200) # A LogEntry is created with pk=1 which breaks a FK constraint on MySQL with connection.constraint_checks_disabled(): response = self.client.post(password_change_url, { 'password1': 'password1', 'password2': 'password1', }) self.assertRedirects(response, user_change_url) row = LogEntry.objects.latest('id') self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change() self.assertEqual(row.object_id, str(u.pk)) self.assertEqual(row.get_change_message(), 'Changed password.') # The LogEntry.user column isn't altered to a UUID type so it's set to # an integer manually in CustomUserAdmin to avoid an error. To avoid a # constraint error, delete the entry before constraints are checked # after the test. row.delete()
74c546a4aab61cfba21db6bdf3ba17aab455501c3a5e223445de7c8fbc9ff79b
from datetime import datetime from django.conf import settings from django.contrib.auth import authenticate from django.contrib.auth.backends import RemoteUserBackend from django.contrib.auth.middleware import RemoteUserMiddleware from django.contrib.auth.models import User from django.test import TestCase, modify_settings, override_settings from django.utils import timezone @override_settings(ROOT_URLCONF='auth_tests.urls') class RemoteUserTest(TestCase): middleware = 'django.contrib.auth.middleware.RemoteUserMiddleware' backend = 'django.contrib.auth.backends.RemoteUserBackend' header = 'REMOTE_USER' email_header = 'REMOTE_EMAIL' # Usernames to be passed in REMOTE_USER for the test_known_user test case. known_user = 'knownuser' known_user2 = 'knownuser2' def setUp(self): self.patched_settings = modify_settings( AUTHENTICATION_BACKENDS={'append': self.backend}, MIDDLEWARE={'append': self.middleware}, ) self.patched_settings.enable() def tearDown(self): self.patched_settings.disable() def test_no_remote_user(self): """ Tests requests where no remote user is specified and insures that no users get created. """ num_users = User.objects.count() response = self.client.get('/remote_user/') self.assertTrue(response.context['user'].is_anonymous) self.assertEqual(User.objects.count(), num_users) response = self.client.get('/remote_user/', **{self.header: None}) self.assertTrue(response.context['user'].is_anonymous) self.assertEqual(User.objects.count(), num_users) response = self.client.get('/remote_user/', **{self.header: ''}) self.assertTrue(response.context['user'].is_anonymous) self.assertEqual(User.objects.count(), num_users) def test_unknown_user(self): """ Tests the case where the username passed in the header does not exist as a User. """ num_users = User.objects.count() response = self.client.get('/remote_user/', **{self.header: 'newuser'}) self.assertEqual(response.context['user'].username, 'newuser') self.assertEqual(User.objects.count(), num_users + 1) User.objects.get(username='newuser') # Another request with same user should not create any new users. response = self.client.get('/remote_user/', **{self.header: 'newuser'}) self.assertEqual(User.objects.count(), num_users + 1) def test_known_user(self): """ Tests the case where the username passed in the header is a valid User. """ User.objects.create(username='knownuser') User.objects.create(username='knownuser2') num_users = User.objects.count() response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, 'knownuser') self.assertEqual(User.objects.count(), num_users) # A different user passed in the headers causes the new user # to be logged in. response = self.client.get('/remote_user/', **{self.header: self.known_user2}) self.assertEqual(response.context['user'].username, 'knownuser2') self.assertEqual(User.objects.count(), num_users) def test_last_login(self): """ A user's last_login is set the first time they make a request but not updated in subsequent requests with the same session. """ user = User.objects.create(username='knownuser') # Set last_login to something so we can determine if it changes. default_login = datetime(2000, 1, 1) if settings.USE_TZ: default_login = default_login.replace(tzinfo=timezone.utc) user.last_login = default_login user.save() response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertNotEqual(default_login, response.context['user'].last_login) user = User.objects.get(username='knownuser') user.last_login = default_login user.save() response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(default_login, response.context['user'].last_login) def test_header_disappears(self): """ A logged in user is logged out automatically when the REMOTE_USER header disappears during the same browser session. """ User.objects.create(username='knownuser') # Known user authenticates response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, 'knownuser') # During the session, the REMOTE_USER header disappears. Should trigger logout. response = self.client.get('/remote_user/') self.assertTrue(response.context['user'].is_anonymous) # verify the remoteuser middleware will not remove a user # authenticated via another backend User.objects.create_user(username='modeluser', password='foo') self.client.login(username='modeluser', password='foo') authenticate(username='modeluser', password='foo') response = self.client.get('/remote_user/') self.assertEqual(response.context['user'].username, 'modeluser') def test_user_switch_forces_new_login(self): """ If the username in the header changes between requests that the original user is logged out """ User.objects.create(username='knownuser') # Known user authenticates response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, 'knownuser') # During the session, the REMOTE_USER changes to a different user. response = self.client.get('/remote_user/', **{self.header: "newnewuser"}) # The current user is not the prior remote_user. # In backends that create a new user, username is "newnewuser" # In backends that do not create new users, it is '' (anonymous user) self.assertNotEqual(response.context['user'].username, 'knownuser') def test_inactive_user(self): User.objects.create(username='knownuser', is_active=False) response = self.client.get('/remote_user/', **{self.header: 'knownuser'}) self.assertTrue(response.context['user'].is_anonymous) class RemoteUserNoCreateBackend(RemoteUserBackend): """Backend that doesn't create unknown users.""" create_unknown_user = False class RemoteUserNoCreateTest(RemoteUserTest): """ Contains the same tests as RemoteUserTest, but using a custom auth backend class that doesn't create unknown users. """ backend = 'auth_tests.test_remote_user.RemoteUserNoCreateBackend' def test_unknown_user(self): num_users = User.objects.count() response = self.client.get('/remote_user/', **{self.header: 'newuser'}) self.assertTrue(response.context['user'].is_anonymous) self.assertEqual(User.objects.count(), num_users) class AllowAllUsersRemoteUserBackendTest(RemoteUserTest): """Backend that allows inactive users.""" backend = 'django.contrib.auth.backends.AllowAllUsersRemoteUserBackend' def test_inactive_user(self): user = User.objects.create(username='knownuser', is_active=False) response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, user.username) class CustomRemoteUserBackend(RemoteUserBackend): """ Backend that overrides RemoteUserBackend methods. """ def clean_username(self, username): """ Grabs username before the @ character. """ return username.split('@')[0] def configure_user(self, request, user): """ Sets user's email address using the email specified in an HTTP header. """ user.email = request.META.get(RemoteUserTest.email_header, '') user.save() return user class RemoteUserCustomTest(RemoteUserTest): """ Tests a custom RemoteUserBackend subclass that overrides the clean_username and configure_user methods. """ backend = 'auth_tests.test_remote_user.CustomRemoteUserBackend' # REMOTE_USER strings with email addresses for the custom backend to # clean. known_user = '[email protected]' known_user2 = '[email protected]' def test_known_user(self): """ The strings passed in REMOTE_USER should be cleaned and the known users should not have been configured with an email address. """ super().test_known_user() self.assertEqual(User.objects.get(username='knownuser').email, '') self.assertEqual(User.objects.get(username='knownuser2').email, '') def test_unknown_user(self): """ The unknown user created should be configured with an email address provided in the request header. """ num_users = User.objects.count() response = self.client.get('/remote_user/', **{ self.header: 'newuser', self.email_header: '[email protected]', }) self.assertEqual(response.context['user'].username, 'newuser') self.assertEqual(response.context['user'].email, '[email protected]') self.assertEqual(User.objects.count(), num_users + 1) newuser = User.objects.get(username='newuser') self.assertEqual(newuser.email, '[email protected]') class CustomHeaderMiddleware(RemoteUserMiddleware): """ Middleware that overrides custom HTTP auth user header. """ header = 'HTTP_AUTHUSER' class CustomHeaderRemoteUserTest(RemoteUserTest): """ Tests a custom RemoteUserMiddleware subclass with custom HTTP auth user header. """ middleware = ( 'auth_tests.test_remote_user.CustomHeaderMiddleware' ) header = 'HTTP_AUTHUSER' class PersistentRemoteUserTest(RemoteUserTest): """ PersistentRemoteUserMiddleware keeps the user logged in even if the subsequent calls do not contain the header value. """ middleware = 'django.contrib.auth.middleware.PersistentRemoteUserMiddleware' require_header = False def test_header_disappears(self): """ A logged in user is kept logged in even if the REMOTE_USER header disappears during the same browser session. """ User.objects.create(username='knownuser') # Known user authenticates response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, 'knownuser') # Should stay logged in if the REMOTE_USER header disappears. response = self.client.get('/remote_user/') self.assertFalse(response.context['user'].is_anonymous) self.assertEqual(response.context['user'].username, 'knownuser')
d7895eb989febb3e063c31fa8a0e938e1e02a93a329b30b275b7c24b16662748
from unittest import mock from django.conf.global_settings import PASSWORD_HASHERS from django.contrib.auth import get_user_model from django.contrib.auth.base_user import AbstractBaseUser from django.contrib.auth.hashers import get_hasher from django.contrib.auth.models import ( AbstractUser, AnonymousUser, Group, Permission, User, UserManager, ) from django.contrib.contenttypes.models import ContentType from django.core import mail from django.db.models.signals import post_save from django.test import SimpleTestCase, TestCase, override_settings from .models import IntegerUsernameUser from .models.with_custom_email_field import CustomEmailField class NaturalKeysTestCase(TestCase): def test_user_natural_key(self): staff_user = User.objects.create_user(username='staff') self.assertEqual(User.objects.get_by_natural_key('staff'), staff_user) self.assertEqual(staff_user.natural_key(), ('staff',)) def test_group_natural_key(self): users_group = Group.objects.create(name='users') self.assertEqual(Group.objects.get_by_natural_key('users'), users_group) class LoadDataWithoutNaturalKeysTestCase(TestCase): fixtures = ['regular.json'] def test_user_is_created_and_added_to_group(self): user = User.objects.get(username='my_username') group = Group.objects.get(name='my_group') self.assertEqual(group, user.groups.get()) class LoadDataWithNaturalKeysTestCase(TestCase): fixtures = ['natural.json'] def test_user_is_created_and_added_to_group(self): user = User.objects.get(username='my_username') group = Group.objects.get(name='my_group') self.assertEqual(group, user.groups.get()) class LoadDataWithNaturalKeysAndMultipleDatabasesTestCase(TestCase): databases = {'default', 'other'} def test_load_data_with_user_permissions(self): # Create test contenttypes for both databases default_objects = [ ContentType.objects.db_manager('default').create( model='examplemodela', app_label='app_a', ), ContentType.objects.db_manager('default').create( model='examplemodelb', app_label='app_b', ), ] other_objects = [ ContentType.objects.db_manager('other').create( model='examplemodelb', app_label='app_b', ), ContentType.objects.db_manager('other').create( model='examplemodela', app_label='app_a', ), ] # Now we create the test UserPermission Permission.objects.db_manager("default").create( name="Can delete example model b", codename="delete_examplemodelb", content_type=default_objects[1], ) Permission.objects.db_manager("other").create( name="Can delete example model b", codename="delete_examplemodelb", content_type=other_objects[0], ) perm_default = Permission.objects.get_by_natural_key( 'delete_examplemodelb', 'app_b', 'examplemodelb', ) perm_other = Permission.objects.db_manager('other').get_by_natural_key( 'delete_examplemodelb', 'app_b', 'examplemodelb', ) self.assertEqual(perm_default.content_type_id, default_objects[1].id) self.assertEqual(perm_other.content_type_id, other_objects[0].id) class UserManagerTestCase(TestCase): def test_create_user(self): email_lowercase = '[email protected]' user = User.objects.create_user('user', email_lowercase) self.assertEqual(user.email, email_lowercase) self.assertEqual(user.username, 'user') self.assertFalse(user.has_usable_password()) def test_create_user_email_domain_normalize_rfc3696(self): # According to https://tools.ietf.org/html/rfc3696#section-3 # the "@" symbol can be part of the local part of an email address returned = UserManager.normalize_email(r'Abc\@[email protected]') self.assertEqual(returned, r'Abc\@[email protected]') def test_create_user_email_domain_normalize(self): returned = UserManager.normalize_email('[email protected]') self.assertEqual(returned, '[email protected]') def test_create_user_email_domain_normalize_with_whitespace(self): returned = UserManager.normalize_email(r'email\ [email protected]') self.assertEqual(returned, r'email\ [email protected]') def test_empty_username(self): with self.assertRaisesMessage(ValueError, 'The given username must be set'): User.objects.create_user(username='') def test_create_user_is_staff(self): email = '[email protected]' user = User.objects.create_user('user', email, is_staff=True) self.assertEqual(user.email, email) self.assertEqual(user.username, 'user') self.assertTrue(user.is_staff) def test_create_super_user_raises_error_on_false_is_superuser(self): with self.assertRaisesMessage(ValueError, 'Superuser must have is_superuser=True.'): User.objects.create_superuser( username='test', email='[email protected]', password='test', is_superuser=False, ) def test_create_superuser_raises_error_on_false_is_staff(self): with self.assertRaisesMessage(ValueError, 'Superuser must have is_staff=True.'): User.objects.create_superuser( username='test', email='[email protected]', password='test', is_staff=False, ) def test_make_random_password(self): allowed_chars = 'abcdefg' password = UserManager().make_random_password(5, allowed_chars) self.assertEqual(len(password), 5) for char in password: self.assertIn(char, allowed_chars) class AbstractBaseUserTests(SimpleTestCase): def test_has_usable_password(self): """ Passwords are usable even if they don't correspond to a hasher in settings.PASSWORD_HASHERS. """ self.assertIs(User(password='some-gibbberish').has_usable_password(), True) def test_normalize_username(self): self.assertEqual(IntegerUsernameUser().normalize_username(123), 123) def test_clean_normalize_username(self): # The normalization happens in AbstractBaseUser.clean() ohm_username = 'iamtheΩ' # U+2126 OHM SIGN for model in ('auth.User', 'auth_tests.CustomUser'): with self.subTest(model=model), self.settings(AUTH_USER_MODEL=model): User = get_user_model() user = User(**{User.USERNAME_FIELD: ohm_username, 'password': 'foo'}) user.clean() username = user.get_username() self.assertNotEqual(username, ohm_username) self.assertEqual(username, 'iamtheΩ') # U+03A9 GREEK CAPITAL LETTER OMEGA def test_default_email(self): user = AbstractBaseUser() self.assertEqual(user.get_email_field_name(), 'email') def test_custom_email(self): user = CustomEmailField() self.assertEqual(user.get_email_field_name(), 'email_address') class AbstractUserTestCase(TestCase): def test_email_user(self): # valid send_mail parameters kwargs = { "fail_silently": False, "auth_user": None, "auth_password": None, "connection": None, "html_message": None, } abstract_user = AbstractUser(email='[email protected]') abstract_user.email_user( subject="Subject here", message="This is a message", from_email="[email protected]", **kwargs ) self.assertEqual(len(mail.outbox), 1) message = mail.outbox[0] self.assertEqual(message.subject, "Subject here") self.assertEqual(message.body, "This is a message") self.assertEqual(message.from_email, "[email protected]") self.assertEqual(message.to, [abstract_user.email]) def test_last_login_default(self): user1 = User.objects.create(username='user1') self.assertIsNone(user1.last_login) user2 = User.objects.create_user(username='user2') self.assertIsNone(user2.last_login) def test_user_clean_normalize_email(self): user = User(username='user', password='foo', email='[email protected]') user.clean() self.assertEqual(user.email, '[email protected]') def test_user_double_save(self): """ Calling user.save() twice should trigger password_changed() once. """ user = User.objects.create_user(username='user', password='foo') user.set_password('bar') with mock.patch('django.contrib.auth.password_validation.password_changed') as pw_changed: user.save() self.assertEqual(pw_changed.call_count, 1) user.save() self.assertEqual(pw_changed.call_count, 1) @override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS) def test_check_password_upgrade(self): """ password_changed() shouldn't be called if User.check_password() triggers a hash iteration upgrade. """ user = User.objects.create_user(username='user', password='foo') initial_password = user.password self.assertTrue(user.check_password('foo')) hasher = get_hasher('default') self.assertEqual('pbkdf2_sha256', hasher.algorithm) old_iterations = hasher.iterations try: # Upgrade the password iterations hasher.iterations = old_iterations + 1 with mock.patch('django.contrib.auth.password_validation.password_changed') as pw_changed: user.check_password('foo') self.assertEqual(pw_changed.call_count, 0) self.assertNotEqual(initial_password, user.password) finally: hasher.iterations = old_iterations class IsActiveTestCase(TestCase): """ Tests the behavior of the guaranteed is_active attribute """ def test_builtin_user_isactive(self): user = User.objects.create(username='foo', email='[email protected]') # is_active is true by default self.assertIs(user.is_active, True) user.is_active = False user.save() user_fetched = User.objects.get(pk=user.pk) # the is_active flag is saved self.assertFalse(user_fetched.is_active) @override_settings(AUTH_USER_MODEL='auth_tests.IsActiveTestUser1') def test_is_active_field_default(self): """ tests that the default value for is_active is provided """ UserModel = get_user_model() user = UserModel(username='foo') self.assertIs(user.is_active, True) # you can set the attribute - but it will not save user.is_active = False # there should be no problem saving - but the attribute is not saved user.save() user_fetched = UserModel._default_manager.get(pk=user.pk) # the attribute is always true for newly retrieved instance self.assertIs(user_fetched.is_active, True) class TestCreateSuperUserSignals(TestCase): """ Simple test case for ticket #20541 """ def post_save_listener(self, *args, **kwargs): self.signals_count += 1 def setUp(self): self.signals_count = 0 post_save.connect(self.post_save_listener, sender=User) def tearDown(self): post_save.disconnect(self.post_save_listener, sender=User) def test_create_user(self): User.objects.create_user("JohnDoe") self.assertEqual(self.signals_count, 1) def test_create_superuser(self): User.objects.create_superuser("JohnDoe", "[email protected]", "1") self.assertEqual(self.signals_count, 1) class AnonymousUserTests(SimpleTestCase): no_repr_msg = "Django doesn't provide a DB representation for AnonymousUser." def setUp(self): self.user = AnonymousUser() def test_properties(self): self.assertIsNone(self.user.pk) self.assertEqual(self.user.username, '') self.assertEqual(self.user.get_username(), '') self.assertIs(self.user.is_anonymous, True) self.assertIs(self.user.is_authenticated, False) self.assertIs(self.user.is_staff, False) self.assertIs(self.user.is_active, False) self.assertIs(self.user.is_superuser, False) self.assertEqual(self.user.groups.all().count(), 0) self.assertEqual(self.user.user_permissions.all().count(), 0) self.assertEqual(self.user.get_user_permissions(), set()) self.assertEqual(self.user.get_group_permissions(), set()) def test_str(self): self.assertEqual(str(self.user), 'AnonymousUser') def test_eq(self): self.assertEqual(self.user, AnonymousUser()) self.assertNotEqual(self.user, User('super', '[email protected]', 'super')) def test_hash(self): self.assertEqual(hash(self.user), 1) def test_int(self): msg = ( 'Cannot cast AnonymousUser to int. Are you trying to use it in ' 'place of User?' ) with self.assertRaisesMessage(TypeError, msg): int(self.user) def test_delete(self): with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg): self.user.delete() def test_save(self): with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg): self.user.save() def test_set_password(self): with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg): self.user.set_password('password') def test_check_password(self): with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg): self.user.check_password('password') class GroupTests(SimpleTestCase): def test_str(self): g = Group(name='Users') self.assertEqual(str(g), 'Users') class PermissionTests(TestCase): def test_str(self): p = Permission.objects.get(codename='view_customemailfield') self.assertEqual(str(p), 'auth_tests | custom email field | Can view custom email field')
d9fa988961265a57d9e454b0d419e8a03e328703e1e58bd1d36c81375f5d8b4a
from unittest import mock from django.contrib.auth import models from django.contrib.auth.mixins import ( LoginRequiredMixin, PermissionRequiredMixin, UserPassesTestMixin, ) from django.contrib.auth.models import AnonymousUser from django.core.exceptions import PermissionDenied from django.http import HttpResponse from django.test import RequestFactory, SimpleTestCase, TestCase from django.views.generic import View class AlwaysTrueMixin(UserPassesTestMixin): def test_func(self): return True class AlwaysFalseMixin(UserPassesTestMixin): def test_func(self): return False class EmptyResponseView(View): def get(self, request, *args, **kwargs): return HttpResponse() class AlwaysTrueView(AlwaysTrueMixin, EmptyResponseView): pass class AlwaysFalseView(AlwaysFalseMixin, EmptyResponseView): pass class StackedMixinsView1(LoginRequiredMixin, PermissionRequiredMixin, EmptyResponseView): permission_required = ['auth_tests.add_customuser', 'auth_tests.change_customuser'] raise_exception = True class StackedMixinsView2(PermissionRequiredMixin, LoginRequiredMixin, EmptyResponseView): permission_required = ['auth_tests.add_customuser', 'auth_tests.change_customuser'] raise_exception = True class AccessMixinTests(TestCase): factory = RequestFactory() def test_stacked_mixins_success(self): user = models.User.objects.create(username='joe', password='qwerty') perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser')) user.user_permissions.add(*perms) request = self.factory.get('/rand') request.user = user view = StackedMixinsView1.as_view() response = view(request) self.assertEqual(response.status_code, 200) view = StackedMixinsView2.as_view() response = view(request) self.assertEqual(response.status_code, 200) def test_stacked_mixins_missing_permission(self): user = models.User.objects.create(username='joe', password='qwerty') perms = models.Permission.objects.filter(codename__in=('add_customuser',)) user.user_permissions.add(*perms) request = self.factory.get('/rand') request.user = user view = StackedMixinsView1.as_view() with self.assertRaises(PermissionDenied): view(request) view = StackedMixinsView2.as_view() with self.assertRaises(PermissionDenied): view(request) def test_access_mixin_permission_denied_response(self): user = models.User.objects.create(username='joe', password='qwerty') # Authenticated users receive PermissionDenied. request = self.factory.get('/rand') request.user = user view = AlwaysFalseView.as_view() with self.assertRaises(PermissionDenied): view(request) # Anonymous users are redirected to the login page. request.user = AnonymousUser() response = view(request) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/accounts/login/?next=/rand') @mock.patch.object(models.User, 'is_authenticated', False) def test_stacked_mixins_not_logged_in(self): user = models.User.objects.create(username='joe', password='qwerty') perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser')) user.user_permissions.add(*perms) request = self.factory.get('/rand') request.user = user view = StackedMixinsView1.as_view() with self.assertRaises(PermissionDenied): view(request) view = StackedMixinsView2.as_view() with self.assertRaises(PermissionDenied): view(request) class UserPassesTestTests(SimpleTestCase): factory = RequestFactory() def _test_redirect(self, view=None, url='/accounts/login/?next=/rand'): if not view: view = AlwaysFalseView.as_view() request = self.factory.get('/rand') request.user = AnonymousUser() response = view(request) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, url) def test_default(self): self._test_redirect() def test_custom_redirect_url(self): class AView(AlwaysFalseView): login_url = '/login/' self._test_redirect(AView.as_view(), '/login/?next=/rand') def test_custom_redirect_parameter(self): class AView(AlwaysFalseView): redirect_field_name = 'goto' self._test_redirect(AView.as_view(), '/accounts/login/?goto=/rand') def test_no_redirect_parameter(self): class AView(AlwaysFalseView): redirect_field_name = None self._test_redirect(AView.as_view(), '/accounts/login/') def test_raise_exception(self): class AView(AlwaysFalseView): raise_exception = True request = self.factory.get('/rand') request.user = AnonymousUser() with self.assertRaises(PermissionDenied): AView.as_view()(request) def test_raise_exception_custom_message(self): msg = "You don't have access here" class AView(AlwaysFalseView): raise_exception = True permission_denied_message = msg request = self.factory.get('/rand') request.user = AnonymousUser() view = AView.as_view() with self.assertRaises(PermissionDenied) as cm: view(request) self.assertEqual(cm.exception.args[0], msg) def test_raise_exception_custom_message_function(self): msg = "You don't have access here" class AView(AlwaysFalseView): raise_exception = True def get_permission_denied_message(self): return msg request = self.factory.get('/rand') request.user = AnonymousUser() view = AView.as_view() with self.assertRaises(PermissionDenied) as cm: view(request) self.assertEqual(cm.exception.args[0], msg) def test_user_passes(self): view = AlwaysTrueView.as_view() request = self.factory.get('/rand') request.user = AnonymousUser() response = view(request) self.assertEqual(response.status_code, 200) class LoginRequiredMixinTests(TestCase): factory = RequestFactory() @classmethod def setUpTestData(cls): cls.user = models.User.objects.create(username='joe', password='qwerty') def test_login_required(self): """ login_required works on a simple view wrapped in a login_required decorator. """ class AView(LoginRequiredMixin, EmptyResponseView): pass view = AView.as_view() request = self.factory.get('/rand') request.user = AnonymousUser() response = view(request) self.assertEqual(response.status_code, 302) self.assertEqual('/accounts/login/?next=/rand', response.url) request = self.factory.get('/rand') request.user = self.user response = view(request) self.assertEqual(response.status_code, 200) class PermissionsRequiredMixinTests(TestCase): factory = RequestFactory() @classmethod def setUpTestData(cls): cls.user = models.User.objects.create(username='joe', password='qwerty') perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser')) cls.user.user_permissions.add(*perms) def test_many_permissions_pass(self): class AView(PermissionRequiredMixin, EmptyResponseView): permission_required = ['auth_tests.add_customuser', 'auth_tests.change_customuser'] request = self.factory.get('/rand') request.user = self.user resp = AView.as_view()(request) self.assertEqual(resp.status_code, 200) def test_single_permission_pass(self): class AView(PermissionRequiredMixin, EmptyResponseView): permission_required = 'auth_tests.add_customuser' request = self.factory.get('/rand') request.user = self.user resp = AView.as_view()(request) self.assertEqual(resp.status_code, 200) def test_permissioned_denied_redirect(self): class AView(PermissionRequiredMixin, EmptyResponseView): permission_required = [ 'auth_tests.add_customuser', 'auth_tests.change_customuser', 'nonexistent-permission', ] # Authenticated users receive PermissionDenied. request = self.factory.get('/rand') request.user = self.user with self.assertRaises(PermissionDenied): AView.as_view()(request) # Anonymous users are redirected to the login page. request.user = AnonymousUser() resp = AView.as_view()(request) self.assertEqual(resp.status_code, 302) def test_permissioned_denied_exception_raised(self): class AView(PermissionRequiredMixin, EmptyResponseView): permission_required = [ 'auth_tests.add_customuser', 'auth_tests.change_customuser', 'nonexistent-permission', ] raise_exception = True request = self.factory.get('/rand') request.user = self.user with self.assertRaises(PermissionDenied): AView.as_view()(request)
cb2f980c25cec825bccf0231661cb1ea5ae8898ea23a9c02d3250f3e14711d8b
from importlib import import_module from django.apps import apps from django.contrib.auth.models import Permission, User from django.contrib.contenttypes.models import ContentType from django.test import TestCase from django.test.utils import captured_stdout from .models import Proxy, UserProxy update_proxy_permissions = import_module('django.contrib.auth.migrations.0011_update_proxy_permissions') class ProxyModelWithDifferentAppLabelTests(TestCase): available_apps = [ 'auth_tests', 'django.contrib.auth', 'django.contrib.contenttypes', ] def setUp(self): """ Create proxy permissions with content_type to the concrete model rather than the proxy model (as they were before Django 2.2 and migration 11). """ Permission.objects.all().delete() self.concrete_content_type = ContentType.objects.get_for_model(UserProxy) self.default_permission = Permission.objects.create( content_type=self.concrete_content_type, codename='add_userproxy', name='Can add userproxy', ) self.custom_permission = Permission.objects.create( content_type=self.concrete_content_type, codename='use_different_app_label', name='May use a different app label', ) def test_proxy_model_permissions_contenttype(self): proxy_model_content_type = ContentType.objects.get_for_model(UserProxy, for_concrete_model=False) self.assertEqual(self.default_permission.content_type, self.concrete_content_type) self.assertEqual(self.custom_permission.content_type, self.concrete_content_type) update_proxy_permissions.update_proxy_model_permissions(apps, None) self.default_permission.refresh_from_db() self.assertEqual(self.default_permission.content_type, proxy_model_content_type) self.custom_permission.refresh_from_db() self.assertEqual(self.custom_permission.content_type, proxy_model_content_type) def test_user_has_now_proxy_model_permissions(self): user = User.objects.create() user.user_permissions.add(self.default_permission) user.user_permissions.add(self.custom_permission) for permission in [self.default_permission, self.custom_permission]: self.assertTrue(user.has_perm('auth.' + permission.codename)) self.assertFalse(user.has_perm('auth_tests.' + permission.codename)) update_proxy_permissions.update_proxy_model_permissions(apps, None) # Reload user to purge the _perm_cache. user = User._default_manager.get(pk=user.pk) for permission in [self.default_permission, self.custom_permission]: self.assertFalse(user.has_perm('auth.' + permission.codename)) self.assertTrue(user.has_perm('auth_tests.' + permission.codename)) def test_migrate_backwards(self): update_proxy_permissions.update_proxy_model_permissions(apps, None) update_proxy_permissions.revert_proxy_model_permissions(apps, None) self.default_permission.refresh_from_db() self.assertEqual(self.default_permission.content_type, self.concrete_content_type) self.custom_permission.refresh_from_db() self.assertEqual(self.custom_permission.content_type, self.concrete_content_type) def test_user_keeps_same_permissions_after_migrating_backward(self): user = User.objects.create() user.user_permissions.add(self.default_permission) user.user_permissions.add(self.custom_permission) for permission in [self.default_permission, self.custom_permission]: self.assertTrue(user.has_perm('auth.' + permission.codename)) self.assertFalse(user.has_perm('auth_tests.' + permission.codename)) update_proxy_permissions.update_proxy_model_permissions(apps, None) update_proxy_permissions.revert_proxy_model_permissions(apps, None) # Reload user to purge the _perm_cache. user = User._default_manager.get(pk=user.pk) for permission in [self.default_permission, self.custom_permission]: self.assertTrue(user.has_perm('auth.' + permission.codename)) self.assertFalse(user.has_perm('auth_tests.' + permission.codename)) class ProxyModelWithSameAppLabelTests(TestCase): available_apps = [ 'auth_tests', 'django.contrib.auth', 'django.contrib.contenttypes', ] def setUp(self): """ Create proxy permissions with content_type to the concrete model rather than the proxy model (as they were before Django 2.2 and migration 11). """ Permission.objects.all().delete() self.concrete_content_type = ContentType.objects.get_for_model(Proxy) self.default_permission = Permission.objects.create( content_type=self.concrete_content_type, codename='add_proxy', name='Can add proxy', ) self.custom_permission = Permission.objects.create( content_type=self.concrete_content_type, codename='display_proxys', name='May display proxys information', ) def test_proxy_model_permissions_contenttype(self): proxy_model_content_type = ContentType.objects.get_for_model(Proxy, for_concrete_model=False) self.assertEqual(self.default_permission.content_type, self.concrete_content_type) self.assertEqual(self.custom_permission.content_type, self.concrete_content_type) update_proxy_permissions.update_proxy_model_permissions(apps, None) self.default_permission.refresh_from_db() self.custom_permission.refresh_from_db() self.assertEqual(self.default_permission.content_type, proxy_model_content_type) self.assertEqual(self.custom_permission.content_type, proxy_model_content_type) def test_user_still_has_proxy_model_permissions(self): user = User.objects.create() user.user_permissions.add(self.default_permission) user.user_permissions.add(self.custom_permission) for permission in [self.default_permission, self.custom_permission]: self.assertTrue(user.has_perm('auth_tests.' + permission.codename)) update_proxy_permissions.update_proxy_model_permissions(apps, None) # Reload user to purge the _perm_cache. user = User._default_manager.get(pk=user.pk) for permission in [self.default_permission, self.custom_permission]: self.assertTrue(user.has_perm('auth_tests.' + permission.codename)) def test_migrate_backwards(self): update_proxy_permissions.update_proxy_model_permissions(apps, None) update_proxy_permissions.revert_proxy_model_permissions(apps, None) self.default_permission.refresh_from_db() self.assertEqual(self.default_permission.content_type, self.concrete_content_type) self.custom_permission.refresh_from_db() self.assertEqual(self.custom_permission.content_type, self.concrete_content_type) def test_user_keeps_same_permissions_after_migrating_backward(self): user = User.objects.create() user.user_permissions.add(self.default_permission) user.user_permissions.add(self.custom_permission) for permission in [self.default_permission, self.custom_permission]: self.assertTrue(user.has_perm('auth_tests.' + permission.codename)) update_proxy_permissions.update_proxy_model_permissions(apps, None) update_proxy_permissions.revert_proxy_model_permissions(apps, None) # Reload user to purge the _perm_cache. user = User._default_manager.get(pk=user.pk) for permission in [self.default_permission, self.custom_permission]: self.assertTrue(user.has_perm('auth_tests.' + permission.codename)) def test_migrate_with_existing_target_permission(self): """ Permissions may already exist: - Old workaround was to manually create permissions for proxy models. - Model may have been concrete and then converted to proxy. Output a reminder to audit relevant permissions. """ proxy_model_content_type = ContentType.objects.get_for_model(Proxy, for_concrete_model=False) Permission.objects.create( content_type=proxy_model_content_type, codename='add_proxy', name='Can add proxy', ) Permission.objects.create( content_type=proxy_model_content_type, codename='display_proxys', name='May display proxys information', ) with captured_stdout() as stdout: update_proxy_permissions.update_proxy_model_permissions(apps, None) self.assertIn('A problem arose migrating proxy model permissions', stdout.getvalue())
def563b04ae9cac67d34a247aa11f79fd198582143ff601223787fba1f89648f
import builtins import getpass import sys from datetime import date from io import StringIO from unittest import mock from django.apps import apps from django.contrib.auth import get_permission_codename, management from django.contrib.auth.management import ( create_permissions, get_default_username, ) from django.contrib.auth.management.commands import ( changepassword, createsuperuser, ) from django.contrib.auth.models import Group, Permission, User from django.contrib.contenttypes.models import ContentType from django.core.management import call_command from django.core.management.base import CommandError from django.db import migrations from django.test import TestCase, override_settings from django.utils.translation import gettext_lazy as _ from .models import ( CustomUser, CustomUserNonUniqueUsername, CustomUserWithFK, Email, UserProxy, ) MOCK_INPUT_KEY_TO_PROMPTS = { # @mock_inputs dict key: [expected prompt messages], 'bypass': ['Bypass password validation and create user anyway? [y/N]: '], 'email': ['Email address: '], 'date_of_birth': ['Date of birth: '], 'first_name': ['First name: '], 'username': ['Username: ', lambda: "Username (leave blank to use '%s'): " % get_default_username()], } def mock_inputs(inputs): """ Decorator to temporarily replace input/getpass to allow interactive createsuperuser. """ def inner(test_func): def wrapped(*args): class mock_getpass: @staticmethod def getpass(prompt=b'Password: ', stream=None): if callable(inputs['password']): return inputs['password']() return inputs['password'] def mock_input(prompt): assert '__proxy__' not in prompt response = None for key, val in inputs.items(): if val == 'KeyboardInterrupt': raise KeyboardInterrupt # get() fallback because sometimes 'key' is the actual # prompt rather than a shortcut name. prompt_msgs = MOCK_INPUT_KEY_TO_PROMPTS.get(key, key) if isinstance(prompt_msgs, list): prompt_msgs = [msg() if callable(msg) else msg for msg in prompt_msgs] if prompt in prompt_msgs: if callable(val): response = val() else: response = val break if response is None: raise ValueError('Mock input for %r not found.' % prompt) return response old_getpass = createsuperuser.getpass old_input = builtins.input createsuperuser.getpass = mock_getpass builtins.input = mock_input try: test_func(*args) finally: createsuperuser.getpass = old_getpass builtins.input = old_input return wrapped return inner class MockTTY: """ A fake stdin object that pretends to be a TTY to be used in conjunction with mock_inputs. """ def isatty(self): return True class MockInputTests(TestCase): @mock_inputs({'username': 'alice'}) def test_input_not_found(self): with self.assertRaisesMessage(ValueError, "Mock input for 'Email address: ' not found."): call_command('createsuperuser', stdin=MockTTY()) class GetDefaultUsernameTestCase(TestCase): def setUp(self): self.old_get_system_username = management.get_system_username def tearDown(self): management.get_system_username = self.old_get_system_username def test_actual_implementation(self): self.assertIsInstance(management.get_system_username(), str) def test_simple(self): management.get_system_username = lambda: 'joe' self.assertEqual(management.get_default_username(), 'joe') def test_existing(self): User.objects.create(username='joe') management.get_system_username = lambda: 'joe' self.assertEqual(management.get_default_username(), '') self.assertEqual( management.get_default_username(check_db=False), 'joe') def test_i18n(self): # 'Julia' with accented 'u': management.get_system_username = lambda: 'J\xfalia' self.assertEqual(management.get_default_username(), 'julia') @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}, ]) class ChangepasswordManagementCommandTestCase(TestCase): @classmethod def setUpTestData(cls): cls.user = User.objects.create_user(username='joe', password='qwerty') def setUp(self): self.stdout = StringIO() self.stderr = StringIO() def tearDown(self): self.stdout.close() self.stderr.close() @mock.patch.object(getpass, 'getpass', return_value='password') def test_get_pass(self, mock_get_pass): call_command('changepassword', username='joe', stdout=self.stdout) self.assertIs(User.objects.get(username='joe').check_password('password'), True) @mock.patch.object(getpass, 'getpass', return_value='') def test_get_pass_no_input(self, mock_get_pass): with self.assertRaisesMessage(CommandError, 'aborted'): call_command('changepassword', username='joe', stdout=self.stdout) @mock.patch.object(changepassword.Command, '_get_pass', return_value='new_password') def test_system_username(self, mock_get_pass): """The system username is used if --username isn't provided.""" username = getpass.getuser() User.objects.create_user(username=username, password='qwerty') call_command('changepassword', stdout=self.stdout) self.assertIs(User.objects.get(username=username).check_password('new_password'), True) def test_nonexistent_username(self): with self.assertRaisesMessage(CommandError, "user 'test' does not exist"): call_command('changepassword', username='test', stdout=self.stdout) @mock.patch.object(changepassword.Command, '_get_pass', return_value='not qwerty') def test_that_changepassword_command_changes_joes_password(self, mock_get_pass): "Executing the changepassword management command should change joe's password" self.assertTrue(self.user.check_password('qwerty')) call_command('changepassword', username='joe', stdout=self.stdout) command_output = self.stdout.getvalue().strip() self.assertEqual( command_output, "Changing password for user 'joe'\nPassword changed successfully for user 'joe'" ) self.assertTrue(User.objects.get(username="joe").check_password("not qwerty")) @mock.patch.object(changepassword.Command, '_get_pass', side_effect=lambda *args: str(args)) def test_that_max_tries_exits_1(self, mock_get_pass): """ A CommandError should be thrown by handle() if the user enters in mismatched passwords three times. """ msg = "Aborting password change for user 'joe' after 3 attempts" with self.assertRaisesMessage(CommandError, msg): call_command('changepassword', username='joe', stdout=self.stdout, stderr=self.stderr) @mock.patch.object(changepassword.Command, '_get_pass', return_value='1234567890') def test_password_validation(self, mock_get_pass): """ A CommandError should be raised if the user enters in passwords which fail validation three times. """ abort_msg = "Aborting password change for user 'joe' after 3 attempts" with self.assertRaisesMessage(CommandError, abort_msg): call_command('changepassword', username='joe', stdout=self.stdout, stderr=self.stderr) self.assertIn('This password is entirely numeric.', self.stderr.getvalue()) @mock.patch.object(changepassword.Command, '_get_pass', return_value='not qwerty') def test_that_changepassword_command_works_with_nonascii_output(self, mock_get_pass): """ #21627 -- Executing the changepassword management command should allow non-ASCII characters from the User object representation. """ # 'Julia' with accented 'u': User.objects.create_user(username='J\xfalia', password='qwerty') call_command('changepassword', username='J\xfalia', stdout=self.stdout) class MultiDBChangepasswordManagementCommandTestCase(TestCase): databases = {'default', 'other'} @mock.patch.object(changepassword.Command, '_get_pass', return_value='not qwerty') def test_that_changepassword_command_with_database_option_uses_given_db(self, mock_get_pass): """ changepassword --database should operate on the specified DB. """ user = User.objects.db_manager('other').create_user(username='joe', password='qwerty') self.assertTrue(user.check_password('qwerty')) out = StringIO() call_command('changepassword', username='joe', database='other', stdout=out) command_output = out.getvalue().strip() self.assertEqual( command_output, "Changing password for user 'joe'\nPassword changed successfully for user 'joe'" ) self.assertTrue(User.objects.using('other').get(username="joe").check_password('not qwerty')) @override_settings( SILENCED_SYSTEM_CHECKS=['fields.W342'], # ForeignKey(unique=True) AUTH_PASSWORD_VALIDATORS=[{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}], ) class CreatesuperuserManagementCommandTestCase(TestCase): def test_no_email_argument(self): new_io = StringIO() with self.assertRaisesMessage(CommandError, 'You must use --email with --noinput.'): call_command('createsuperuser', interactive=False, username='joe', stdout=new_io) def test_basic_usage(self): "Check the operation of the createsuperuser management command" # We can use the management command to create a superuser new_io = StringIO() call_command( "createsuperuser", interactive=False, username="joe", email="[email protected]", stdout=new_io ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = User.objects.get(username="joe") self.assertEqual(u.email, '[email protected]') # created password should be unusable self.assertFalse(u.has_usable_password()) def test_non_ascii_verbose_name(self): @mock_inputs({ 'password': "nopasswd", "Uživatel (leave blank to use '%s'): " % get_default_username(): 'foo', # username (cz) 'email': '[email protected]', }) def test(self): username_field = User._meta.get_field('username') old_verbose_name = username_field.verbose_name username_field.verbose_name = _('u\u017eivatel') new_io = StringIO() try: call_command( "createsuperuser", interactive=True, stdout=new_io, stdin=MockTTY(), ) finally: username_field.verbose_name = old_verbose_name command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') test(self) def test_verbosity_zero(self): # We can suppress output on the management command new_io = StringIO() call_command( "createsuperuser", interactive=False, username="joe2", email="[email protected]", verbosity=0, stdout=new_io ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, '') u = User.objects.get(username="joe2") self.assertEqual(u.email, '[email protected]') self.assertFalse(u.has_usable_password()) def test_email_in_username(self): new_io = StringIO() call_command( "createsuperuser", interactive=False, username="[email protected]", email="[email protected]", stdout=new_io ) u = User._default_manager.get(username="[email protected]") self.assertEqual(u.email, '[email protected]') self.assertFalse(u.has_usable_password()) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUser') def test_swappable_user(self): "A superuser can be created when a custom user model is in use" # We can use the management command to create a superuser # We skip validation because the temporary substitution of the # swappable User model messes with validation. new_io = StringIO() call_command( "createsuperuser", interactive=False, email="[email protected]", date_of_birth="1976-04-01", first_name='Joe', stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUser._default_manager.get(email="[email protected]") self.assertEqual(u.date_of_birth, date(1976, 4, 1)) # created password should be unusable self.assertFalse(u.has_usable_password()) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUser') def test_swappable_user_missing_required_field(self): "A Custom superuser won't be created when a required field isn't provided" # We can use the management command to create a superuser # We skip validation because the temporary substitution of the # swappable User model messes with validation. new_io = StringIO() with self.assertRaisesMessage(CommandError, 'You must use --email with --noinput.'): call_command( "createsuperuser", interactive=False, stdout=new_io, stderr=new_io, ) self.assertEqual(CustomUser._default_manager.count(), 0) @override_settings( AUTH_USER_MODEL='auth_tests.CustomUserNonUniqueUsername', AUTHENTICATION_BACKENDS=['my.custom.backend'], ) def test_swappable_user_username_non_unique(self): @mock_inputs({ 'username': 'joe', 'password': 'nopasswd', }) def createsuperuser(): new_io = StringIO() call_command( "createsuperuser", interactive=True, email="[email protected]", stdout=new_io, stdin=MockTTY(), ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') for i in range(2): createsuperuser() users = CustomUserNonUniqueUsername.objects.filter(username="joe") self.assertEqual(users.count(), 2) def test_skip_if_not_in_TTY(self): """ If the command is not called from a TTY, it should be skipped and a message should be displayed (#7423). """ class FakeStdin: """A fake stdin object that has isatty() return False.""" def isatty(self): return False out = StringIO() call_command( "createsuperuser", stdin=FakeStdin(), stdout=out, interactive=True, ) self.assertEqual(User._default_manager.count(), 0) self.assertIn("Superuser creation skipped", out.getvalue()) def test_passing_stdin(self): """ You can pass a stdin object as an option and it should be available on self.stdin. If no such option is passed, it defaults to sys.stdin. """ sentinel = object() command = createsuperuser.Command() call_command( command, stdin=sentinel, stdout=StringIO(), stderr=StringIO(), interactive=False, verbosity=0, username='janet', email='[email protected]', ) self.assertIs(command.stdin, sentinel) command = createsuperuser.Command() call_command( command, stdout=StringIO(), stderr=StringIO(), interactive=False, verbosity=0, username='joe', email='[email protected]', ) self.assertIs(command.stdin, sys.stdin) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithFK') def test_fields_with_fk(self): new_io = StringIO() group = Group.objects.create(name='mygroup') email = Email.objects.create(email='[email protected]') call_command( 'createsuperuser', interactive=False, username=email.pk, email=email.email, group=group.pk, stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUserWithFK._default_manager.get(email=email) self.assertEqual(u.username, email) self.assertEqual(u.group, group) non_existent_email = '[email protected]' msg = 'email instance with email %r does not exist.' % non_existent_email with self.assertRaisesMessage(CommandError, msg): call_command( 'createsuperuser', interactive=False, username=email.pk, email=non_existent_email, stdout=new_io, ) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithFK') def test_fields_with_fk_interactive(self): new_io = StringIO() group = Group.objects.create(name='mygroup') email = Email.objects.create(email='[email protected]') @mock_inputs({ 'password': 'nopasswd', 'Username (Email.id): ': email.pk, 'Email (Email.email): ': email.email, 'Group (Group.id): ': group.pk, }) def test(self): call_command( 'createsuperuser', interactive=True, stdout=new_io, stdin=MockTTY(), ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUserWithFK._default_manager.get(email=email) self.assertEqual(u.username, email) self.assertEqual(u.group, group) test(self) def test_default_username(self): """createsuperuser uses a default username when one isn't provided.""" # Get the default username before creating a user. default_username = get_default_username() new_io = StringIO() entered_passwords = ['password', 'password'] def return_passwords(): return entered_passwords.pop(0) @mock_inputs({'password': return_passwords, 'username': '', 'email': ''}) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual(new_io.getvalue().strip(), 'Superuser created successfully.') self.assertTrue(User.objects.filter(username=default_username).exists()) test(self) def test_password_validation(self): """ Creation should fail if the password fails validation. """ new_io = StringIO() entered_passwords = ['1234567890', '1234567890', 'password', 'password'] def bad_then_good_password(): return entered_passwords.pop(0) @mock_inputs({ 'password': bad_then_good_password, 'username': 'joe1234567890', 'email': '', 'bypass': 'n', }) def test(self): call_command( "createsuperuser", interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "This password is entirely numeric.\n" "Superuser created successfully." ) test(self) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, ]) def test_validate_password_against_username(self): new_io = StringIO() username = 'supremelycomplex' entered_passwords = [username, username, 'superduperunguessablepassword', 'superduperunguessablepassword'] def bad_then_good_password(): return entered_passwords.pop(0) @mock_inputs({ 'password': bad_then_good_password, 'username': username, 'email': '', 'bypass': 'n', }) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), 'The password is too similar to the username.\n' 'Superuser created successfully.' ) test(self) @override_settings( AUTH_USER_MODEL='auth_tests.CustomUser', AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, ] ) def test_validate_password_against_required_fields(self): new_io = StringIO() first_name = 'josephine' entered_passwords = [first_name, first_name, 'superduperunguessablepassword', 'superduperunguessablepassword'] def bad_then_good_password(): return entered_passwords.pop(0) @mock_inputs({ 'password': bad_then_good_password, 'username': 'whatever', 'first_name': first_name, 'date_of_birth': '1970-01-01', 'email': '[email protected]', 'bypass': 'n', }) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "The password is too similar to the first name.\n" "Superuser created successfully." ) test(self) def test_blank_username(self): """Creation fails if --username is blank.""" new_io = StringIO() def test(self): with self.assertRaisesMessage(CommandError, 'Username cannot be blank.'): call_command( 'createsuperuser', username='', stdin=MockTTY(), stdout=new_io, stderr=new_io, ) test(self) def test_blank_username_non_interactive(self): new_io = StringIO() def test(self): with self.assertRaisesMessage(CommandError, 'Username cannot be blank.'): call_command( 'createsuperuser', username='', interactive=False, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) test(self) def test_password_validation_bypass(self): """ Password validation can be bypassed by entering 'y' at the prompt. """ new_io = StringIO() @mock_inputs({ 'password': '1234567890', 'username': 'joe1234567890', 'email': '', 'bypass': 'y', }) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), 'This password is entirely numeric.\n' 'Superuser created successfully.' ) test(self) def test_invalid_username(self): """Creation fails if the username fails validation.""" user_field = User._meta.get_field(User.USERNAME_FIELD) new_io = StringIO() entered_passwords = ['password', 'password'] # Enter an invalid (too long) username first and then a valid one. invalid_username = ('x' * user_field.max_length) + 'y' entered_usernames = [invalid_username, 'janet'] def return_passwords(): return entered_passwords.pop(0) def return_usernames(): return entered_usernames.pop(0) @mock_inputs({'password': return_passwords, 'username': return_usernames, 'email': ''}) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), 'Error: Ensure this value has at most %s characters (it has %s).\n' 'Superuser created successfully.' % (user_field.max_length, len(invalid_username)) ) test(self) @mock_inputs({'username': 'KeyboardInterrupt'}) def test_keyboard_interrupt(self): new_io = StringIO() with self.assertRaises(SystemExit): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual(new_io.getvalue(), '\nOperation cancelled.\n') def test_existing_username(self): """Creation fails if the username already exists.""" user = User.objects.create(username='janet') new_io = StringIO() entered_passwords = ['password', 'password'] # Enter the existing username first and then a new one. entered_usernames = [user.username, 'joe'] def return_passwords(): return entered_passwords.pop(0) def return_usernames(): return entered_usernames.pop(0) @mock_inputs({'password': return_passwords, 'username': return_usernames, 'email': ''}) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), 'Error: That username is already taken.\n' 'Superuser created successfully.' ) test(self) def test_existing_username_non_interactive(self): """Creation fails if the username already exists.""" User.objects.create(username='janet') new_io = StringIO() with self.assertRaisesMessage(CommandError, "Error: That username is already taken."): call_command( 'createsuperuser', username='janet', email='', interactive=False, stdout=new_io, ) def test_existing_username_provided_via_option_and_interactive(self): """call_command() gets username='janet' and interactive=True.""" new_io = StringIO() entered_passwords = ['password', 'password'] User.objects.create(username='janet') def return_passwords(): return entered_passwords.pop(0) @mock_inputs({ 'password': return_passwords, 'username': 'janet1', 'email': '[email protected]' }) def test(self): call_command( 'createsuperuser', username='janet', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) msg = 'Error: That username is already taken.\nSuperuser created successfully.' self.assertEqual(new_io.getvalue().strip(), msg) test(self) def test_validation_mismatched_passwords(self): """ Creation should fail if the user enters mismatched passwords. """ new_io = StringIO() # The first two passwords do not match, but the second two do match and # are valid. entered_passwords = ["password", "not password", "password2", "password2"] def mismatched_passwords_then_matched(): return entered_passwords.pop(0) @mock_inputs({ 'password': mismatched_passwords_then_matched, 'username': 'joe1234567890', 'email': '', }) def test(self): call_command( "createsuperuser", interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "Error: Your passwords didn't match.\n" "Superuser created successfully." ) test(self) def test_validation_blank_password_entered(self): """ Creation should fail if the user enters blank passwords. """ new_io = StringIO() # The first two passwords are empty strings, but the second two are # valid. entered_passwords = ["", "", "password2", "password2"] def blank_passwords_then_valid(): return entered_passwords.pop(0) @mock_inputs({ 'password': blank_passwords_then_valid, 'username': 'joe1234567890', 'email': '', }) def test(self): call_command( "createsuperuser", interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "Error: Blank passwords aren't allowed.\n" "Superuser created successfully." ) test(self) @override_settings(AUTH_USER_MODEL='auth_tests.NoPasswordUser') def test_usermodel_without_password(self): new_io = StringIO() def test(self): call_command( 'createsuperuser', interactive=False, stdin=MockTTY(), stdout=new_io, stderr=new_io, username='username', ) self.assertEqual(new_io.getvalue().strip(), 'Superuser created successfully.') test(self) @override_settings(AUTH_USER_MODEL='auth_tests.NoPasswordUser') def test_usermodel_without_password_interactive(self): new_io = StringIO() @mock_inputs({'username': 'username'}) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual(new_io.getvalue().strip(), 'Superuser created successfully.') test(self) class MultiDBCreatesuperuserTestCase(TestCase): databases = {'default', 'other'} def test_createsuperuser_command_with_database_option(self): """ changepassword --database should operate on the specified DB. """ new_io = StringIO() call_command( 'createsuperuser', interactive=False, username='joe', email='[email protected]', database='other', stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') user = User.objects.using('other').get(username='joe') self.assertEqual(user.email, '[email protected]') class CreatePermissionsTests(TestCase): def setUp(self): self._original_permissions = Permission._meta.permissions[:] self._original_default_permissions = Permission._meta.default_permissions self.app_config = apps.get_app_config('auth') def tearDown(self): Permission._meta.permissions = self._original_permissions Permission._meta.default_permissions = self._original_default_permissions ContentType.objects.clear_cache() def test_default_permissions(self): permission_content_type = ContentType.objects.get_by_natural_key('auth', 'permission') Permission._meta.permissions = [ ('my_custom_permission', 'Some permission'), ] create_permissions(self.app_config, verbosity=0) # view/add/change/delete permission by default + custom permission self.assertEqual(Permission.objects.filter( content_type=permission_content_type, ).count(), 5) Permission.objects.filter(content_type=permission_content_type).delete() Permission._meta.default_permissions = [] create_permissions(self.app_config, verbosity=0) # custom permission only since default permissions is empty self.assertEqual(Permission.objects.filter( content_type=permission_content_type, ).count(), 1) def test_unavailable_models(self): """ #24075 - Permissions shouldn't be created or deleted if the ContentType or Permission models aren't available. """ state = migrations.state.ProjectState() # Unavailable contenttypes.ContentType with self.assertNumQueries(0): create_permissions(self.app_config, verbosity=0, apps=state.apps) # Unavailable auth.Permission state = migrations.state.ProjectState(real_apps=['contenttypes']) with self.assertNumQueries(0): create_permissions(self.app_config, verbosity=0, apps=state.apps) def test_create_permissions_checks_contenttypes_created(self): """ `post_migrate` handler ordering isn't guaranteed. Simulate a case where create_permissions() is called before create_contenttypes(). """ # Warm the manager cache. ContentType.objects.get_for_model(Group) # Apply a deletion as if e.g. a database 'flush' had been executed. ContentType.objects.filter(app_label='auth', model='group').delete() # This fails with a foreign key constraint without the fix. create_permissions(apps.get_app_config('auth'), interactive=False, verbosity=0) def test_permission_with_proxy_content_type_created(self): """ A proxy model's permissions use its own content type rather than the content type of the concrete model. """ opts = UserProxy._meta codename = get_permission_codename('add', opts) self.assertTrue( Permission.objects.filter( content_type__model=opts.model_name, content_type__app_label=opts.app_label, codename=codename, ).exists() )