hash
stringlengths
64
64
content
stringlengths
0
1.51M
e4975896936bb9a53f5e2c01f86a83e0c69f4049babc342a54212eaf63541dde
from django.apps import apps from django.contrib import auth from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager from django.contrib.auth.hashers import make_password from django.contrib.contenttypes.models import ContentType from django.core.exceptions import PermissionDenied from django.core.mail import send_mail from django.db import models from django.db.models.manager import EmptyManager from django.utils import timezone from django.utils.itercompat import is_iterable from django.utils.translation import gettext_lazy as _ from .validators import UnicodeUsernameValidator def update_last_login(sender, user, **kwargs): """ A signal receiver which updates the last_login date for the user logging in. """ user.last_login = timezone.now() user.save(update_fields=['last_login']) class PermissionManager(models.Manager): use_in_migrations = True def get_by_natural_key(self, codename, app_label, model): return self.get( codename=codename, content_type=ContentType.objects.db_manager(self.db).get_by_natural_key(app_label, model), ) class Permission(models.Model): """ The permissions system provides a way to assign permissions to specific users and groups of users. The permission system is used by the Django admin site, but may also be useful in your own code. The Django admin site uses permissions as follows: - The "add" permission limits the user's ability to view the "add" form and add an object. - The "change" permission limits a user's ability to view the change list, view the "change" form and change an object. - The "delete" permission limits the ability to delete an object. - The "view" permission limits the ability to view an object. Permissions are set globally per type of object, not per specific object instance. It is possible to say "Mary may change news stories," but it's not currently possible to say "Mary may change news stories, but only the ones she created herself" or "Mary may only change news stories that have a certain status or publication date." The permissions listed above are automatically created for each model. """ name = models.CharField(_('name'), max_length=255) content_type = models.ForeignKey( ContentType, models.CASCADE, verbose_name=_('content type'), ) codename = models.CharField(_('codename'), max_length=100) objects = PermissionManager() class Meta: verbose_name = _('permission') verbose_name_plural = _('permissions') unique_together = [['content_type', 'codename']] ordering = ['content_type__app_label', 'content_type__model', 'codename'] def __str__(self): return '%s | %s' % (self.content_type, self.name) def natural_key(self): return (self.codename,) + self.content_type.natural_key() natural_key.dependencies = ['contenttypes.contenttype'] class GroupManager(models.Manager): """ The manager for the auth's Group model. """ use_in_migrations = True def get_by_natural_key(self, name): return self.get(name=name) class Group(models.Model): """ Groups are a generic way of categorizing users to apply permissions, or some other label, to those users. A user can belong to any number of groups. A user in a group automatically has all the permissions granted to that group. For example, if the group 'Site editors' has the permission can_edit_home_page, any user in that group will have that permission. Beyond permissions, groups are a convenient way to categorize users to apply some label, or extended functionality, to them. For example, you could create a group 'Special users', and you could write code that would do special things to those users -- such as giving them access to a members-only portion of your site, or sending them members-only email messages. """ name = models.CharField(_('name'), max_length=150, unique=True) permissions = models.ManyToManyField( Permission, verbose_name=_('permissions'), blank=True, ) objects = GroupManager() class Meta: verbose_name = _('group') verbose_name_plural = _('groups') def __str__(self): return self.name def natural_key(self): return (self.name,) class UserManager(BaseUserManager): use_in_migrations = True def _create_user(self, username, email, password, **extra_fields): """ Create and save a user with the given username, email, and password. """ if not username: raise ValueError('The given username must be set') email = self.normalize_email(email) # Lookup the real model class from the global app registry so this # manager method can be used in migrations. This is fine because # managers are by definition working on the real model. GlobalUserModel = apps.get_model(self.model._meta.app_label, self.model._meta.object_name) username = GlobalUserModel.normalize_username(username) user = self.model(username=username, email=email, **extra_fields) user.password = make_password(password) user.save(using=self._db) return user def create_user(self, username, email=None, password=None, **extra_fields): extra_fields.setdefault('is_staff', False) extra_fields.setdefault('is_superuser', False) return self._create_user(username, email, password, **extra_fields) def create_superuser(self, username, email=None, password=None, **extra_fields): extra_fields.setdefault('is_staff', True) extra_fields.setdefault('is_superuser', True) if extra_fields.get('is_staff') is not True: raise ValueError('Superuser must have is_staff=True.') if extra_fields.get('is_superuser') is not True: raise ValueError('Superuser must have is_superuser=True.') return self._create_user(username, email, password, **extra_fields) def with_perm(self, perm, is_active=True, include_superusers=True, backend=None, obj=None): if backend is None: backends = auth._get_backends(return_tuples=True) if len(backends) == 1: backend, _ = backends[0] else: raise ValueError( 'You have multiple authentication backends configured and ' 'therefore must provide the `backend` argument.' ) elif not isinstance(backend, str): raise TypeError( 'backend must be a dotted import path string (got %r).' % backend ) else: backend = auth.load_backend(backend) if hasattr(backend, 'with_perm'): return backend.with_perm( perm, is_active=is_active, include_superusers=include_superusers, obj=obj, ) return self.none() # A few helper functions for common logic between User and AnonymousUser. def _user_get_permissions(user, obj, from_name): permissions = set() name = 'get_%s_permissions' % from_name for backend in auth.get_backends(): if hasattr(backend, name): permissions.update(getattr(backend, name)(user, obj)) return permissions def _user_has_perm(user, perm, obj): """ A backend can raise `PermissionDenied` to short-circuit permission checking. """ for backend in auth.get_backends(): if not hasattr(backend, 'has_perm'): continue try: if backend.has_perm(user, perm, obj): return True except PermissionDenied: return False return False def _user_has_module_perms(user, app_label): """ A backend can raise `PermissionDenied` to short-circuit permission checking. """ for backend in auth.get_backends(): if not hasattr(backend, 'has_module_perms'): continue try: if backend.has_module_perms(user, app_label): return True except PermissionDenied: return False return False class PermissionsMixin(models.Model): """ Add the fields and methods necessary to support the Group and Permission models using the ModelBackend. """ is_superuser = models.BooleanField( _('superuser status'), default=False, help_text=_( 'Designates that this user has all permissions without ' 'explicitly assigning them.' ), ) groups = models.ManyToManyField( Group, verbose_name=_('groups'), blank=True, help_text=_( 'The groups this user belongs to. A user will get all permissions ' 'granted to each of their groups.' ), related_name="user_set", related_query_name="user", ) user_permissions = models.ManyToManyField( Permission, verbose_name=_('user permissions'), blank=True, help_text=_('Specific permissions for this user.'), related_name="user_set", related_query_name="user", ) class Meta: abstract = True def get_user_permissions(self, obj=None): """ Return a list of permission strings that this user has directly. Query all available auth backends. If an object is passed in, return only permissions matching this object. """ return _user_get_permissions(self, obj, 'user') def get_group_permissions(self, obj=None): """ Return a list of permission strings that this user has through their groups. Query all available auth backends. If an object is passed in, return only permissions matching this object. """ return _user_get_permissions(self, obj, 'group') def get_all_permissions(self, obj=None): return _user_get_permissions(self, obj, 'all') def has_perm(self, perm, obj=None): """ Return True if the user has the specified permission. Query all available auth backends, but return immediately if any backend returns True. Thus, a user who has permission from a single auth backend is assumed to have permission in general. If an object is provided, check permissions for that object. """ # Active superusers have all permissions. if self.is_active and self.is_superuser: return True # Otherwise we need to check the backends. return _user_has_perm(self, perm, obj) def has_perms(self, perm_list, obj=None): """ Return True if the user has each of the specified permissions. If object is passed, check if the user has all required perms for it. """ if not is_iterable(perm_list) or isinstance(perm_list, str): raise ValueError('perm_list must be an iterable of permissions.') return all(self.has_perm(perm, obj) for perm in perm_list) def has_module_perms(self, app_label): """ Return True if the user has any permissions in the given app label. Use similar logic as has_perm(), above. """ # Active superusers have all permissions. if self.is_active and self.is_superuser: return True return _user_has_module_perms(self, app_label) class AbstractUser(AbstractBaseUser, PermissionsMixin): """ An abstract base class implementing a fully featured User model with admin-compliant permissions. Username and password are required. Other fields are optional. """ username_validator = UnicodeUsernameValidator() username = models.CharField( _('username'), max_length=150, unique=True, help_text=_('Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.'), validators=[username_validator], error_messages={ 'unique': _("A user with that username already exists."), }, ) first_name = models.CharField(_('first name'), max_length=150, blank=True) last_name = models.CharField(_('last name'), max_length=150, blank=True) email = models.EmailField(_('email address'), blank=True) is_staff = models.BooleanField( _('staff status'), default=False, help_text=_('Designates whether the user can log into this admin site.'), ) is_active = models.BooleanField( _('active'), default=True, help_text=_( 'Designates whether this user should be treated as active. ' 'Unselect this instead of deleting accounts.' ), ) date_joined = models.DateTimeField(_('date joined'), default=timezone.now) objects = UserManager() EMAIL_FIELD = 'email' USERNAME_FIELD = 'username' REQUIRED_FIELDS = ['email'] class Meta: verbose_name = _('user') verbose_name_plural = _('users') abstract = True def clean(self): super().clean() self.email = self.__class__.objects.normalize_email(self.email) def get_full_name(self): """ Return the first_name plus the last_name, with a space in between. """ full_name = '%s %s' % (self.first_name, self.last_name) return full_name.strip() def get_short_name(self): """Return the short name for the user.""" return self.first_name def email_user(self, subject, message, from_email=None, **kwargs): """Send an email to this user.""" send_mail(subject, message, from_email, [self.email], **kwargs) class User(AbstractUser): """ Users within the Django authentication system are represented by this model. Username and password are required. Other fields are optional. """ class Meta(AbstractUser.Meta): swappable = 'AUTH_USER_MODEL' class AnonymousUser: id = None pk = None username = '' is_staff = False is_active = False is_superuser = False _groups = EmptyManager(Group) _user_permissions = EmptyManager(Permission) def __str__(self): return 'AnonymousUser' def __eq__(self, other): return isinstance(other, self.__class__) def __hash__(self): return 1 # instances always return the same hash value def __int__(self): raise TypeError('Cannot cast AnonymousUser to int. Are you trying to use it in place of User?') def save(self): raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.") def delete(self): raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.") def set_password(self, raw_password): raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.") def check_password(self, raw_password): raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.") @property def groups(self): return self._groups @property def user_permissions(self): return self._user_permissions def get_user_permissions(self, obj=None): return _user_get_permissions(self, obj, 'user') def get_group_permissions(self, obj=None): return set() def get_all_permissions(self, obj=None): return _user_get_permissions(self, obj, 'all') def has_perm(self, perm, obj=None): return _user_has_perm(self, perm, obj=obj) def has_perms(self, perm_list, obj=None): if not is_iterable(perm_list) or isinstance(perm_list, str): raise ValueError('perm_list must be an iterable of permissions.') return all(self.has_perm(perm, obj) for perm in perm_list) def has_module_perms(self, module): return _user_has_module_perms(self, module) @property def is_anonymous(self): return True @property def is_authenticated(self): return False def get_username(self): return self.username
9c02681f312f1d6326775291f95c98f3367e0f6ec3f92d8460d2943a63fa5116
import base64 import binascii import functools import hashlib import importlib import math import warnings from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.signals import setting_changed from django.dispatch import receiver from django.utils.crypto import ( RANDOM_STRING_CHARS, constant_time_compare, get_random_string, md5, pbkdf2, ) from django.utils.module_loading import import_string from django.utils.translation import gettext_noop as _ UNUSABLE_PASSWORD_PREFIX = '!' # This will never be a valid encoded hash UNUSABLE_PASSWORD_SUFFIX_LENGTH = 40 # number of random chars to add after UNUSABLE_PASSWORD_PREFIX def is_password_usable(encoded): """ Return True if this password wasn't generated by User.set_unusable_password(), i.e. make_password(None). """ return encoded is None or not encoded.startswith(UNUSABLE_PASSWORD_PREFIX) def check_password(password, encoded, setter=None, preferred='default'): """ Return a boolean of whether the raw password matches the three part encoded digest. If setter is specified, it'll be called when you need to regenerate the password. """ if password is None or not is_password_usable(encoded): return False preferred = get_hasher(preferred) try: hasher = identify_hasher(encoded) except ValueError: # encoded is gibberish or uses a hasher that's no longer installed. return False hasher_changed = hasher.algorithm != preferred.algorithm must_update = hasher_changed or preferred.must_update(encoded) is_correct = hasher.verify(password, encoded) # If the hasher didn't change (we don't protect against enumeration if it # does) and the password should get updated, try to close the timing gap # between the work factor of the current encoded password and the default # work factor. if not is_correct and not hasher_changed and must_update: hasher.harden_runtime(password, encoded) if setter and is_correct and must_update: setter(password) return is_correct def make_password(password, salt=None, hasher='default'): """ Turn a plain-text password into a hash for database storage Same as encode() but generate a new random salt. If password is None then return a concatenation of UNUSABLE_PASSWORD_PREFIX and a random string, which disallows logins. Additional random string reduces chances of gaining access to staff or superuser accounts. See ticket #20079 for more info. """ if password is None: return UNUSABLE_PASSWORD_PREFIX + get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH) if not isinstance(password, (bytes, str)): raise TypeError( 'Password must be a string or bytes, got %s.' % type(password).__qualname__ ) hasher = get_hasher(hasher) salt = salt or hasher.salt() return hasher.encode(password, salt) @functools.lru_cache def get_hashers(): hashers = [] for hasher_path in settings.PASSWORD_HASHERS: hasher_cls = import_string(hasher_path) hasher = hasher_cls() if not getattr(hasher, 'algorithm'): raise ImproperlyConfigured("hasher doesn't specify an " "algorithm name: %s" % hasher_path) hashers.append(hasher) return hashers @functools.lru_cache def get_hashers_by_algorithm(): return {hasher.algorithm: hasher for hasher in get_hashers()} @receiver(setting_changed) def reset_hashers(*, setting, **kwargs): if setting == 'PASSWORD_HASHERS': get_hashers.cache_clear() get_hashers_by_algorithm.cache_clear() def get_hasher(algorithm='default'): """ Return an instance of a loaded password hasher. If algorithm is 'default', return the default hasher. Lazily import hashers specified in the project's settings file if needed. """ if hasattr(algorithm, 'algorithm'): return algorithm elif algorithm == 'default': return get_hashers()[0] else: hashers = get_hashers_by_algorithm() try: return hashers[algorithm] except KeyError: raise ValueError("Unknown password hashing algorithm '%s'. " "Did you specify it in the PASSWORD_HASHERS " "setting?" % algorithm) def identify_hasher(encoded): """ Return an instance of a loaded password hasher. Identify hasher algorithm by examining encoded hash, and call get_hasher() to return hasher. Raise ValueError if algorithm cannot be identified, or if hasher is not loaded. """ # Ancient versions of Django created plain MD5 passwords and accepted # MD5 passwords with an empty salt. if ((len(encoded) == 32 and '$' not in encoded) or (len(encoded) == 37 and encoded.startswith('md5$$'))): algorithm = 'unsalted_md5' # Ancient versions of Django accepted SHA1 passwords with an empty salt. elif len(encoded) == 46 and encoded.startswith('sha1$$'): algorithm = 'unsalted_sha1' else: algorithm = encoded.split('$', 1)[0] return get_hasher(algorithm) def mask_hash(hash, show=6, char="*"): """ Return the given hash, with only the first ``show`` number shown. The rest are masked with ``char`` for security reasons. """ masked = hash[:show] masked += char * len(hash[show:]) return masked def must_update_salt(salt, expected_entropy): # Each character in the salt provides log_2(len(alphabet)) bits of entropy. return len(salt) * math.log2(len(RANDOM_STRING_CHARS)) < expected_entropy class BasePasswordHasher: """ Abstract base class for password hashers When creating your own hasher, you need to override algorithm, verify(), encode() and safe_summary(). PasswordHasher objects are immutable. """ algorithm = None library = None salt_entropy = 128 def _load_library(self): if self.library is not None: if isinstance(self.library, (tuple, list)): name, mod_path = self.library else: mod_path = self.library try: module = importlib.import_module(mod_path) except ImportError as e: raise ValueError("Couldn't load %r algorithm library: %s" % (self.__class__.__name__, e)) return module raise ValueError("Hasher %r doesn't specify a library attribute" % self.__class__.__name__) def salt(self): """ Generate a cryptographically secure nonce salt in ASCII with an entropy of at least `salt_entropy` bits. """ # Each character in the salt provides # log_2(len(alphabet)) bits of entropy. char_count = math.ceil(self.salt_entropy / math.log2(len(RANDOM_STRING_CHARS))) return get_random_string(char_count, allowed_chars=RANDOM_STRING_CHARS) def verify(self, password, encoded): """Check if the given password is correct.""" raise NotImplementedError('subclasses of BasePasswordHasher must provide a verify() method') def _check_encode_args(self, password, salt): if password is None: raise TypeError('password must be provided.') if not salt or '$' in salt: raise ValueError('salt must be provided and cannot contain $.') def encode(self, password, salt): """ Create an encoded database value. The result is normally formatted as "algorithm$salt$hash" and must be fewer than 128 characters. """ raise NotImplementedError('subclasses of BasePasswordHasher must provide an encode() method') def decode(self, encoded): """ Return a decoded database value. The result is a dictionary and should contain `algorithm`, `hash`, and `salt`. Extra keys can be algorithm specific like `iterations` or `work_factor`. """ raise NotImplementedError( 'subclasses of BasePasswordHasher must provide a decode() method.' ) def safe_summary(self, encoded): """ Return a summary of safe values. The result is a dictionary and will be used where the password field must be displayed to construct a safe representation of the password. """ raise NotImplementedError('subclasses of BasePasswordHasher must provide a safe_summary() method') def must_update(self, encoded): return False def harden_runtime(self, password, encoded): """ Bridge the runtime gap between the work factor supplied in `encoded` and the work factor suggested by this hasher. Taking PBKDF2 as an example, if `encoded` contains 20000 iterations and `self.iterations` is 30000, this method should run password through another 10000 iterations of PBKDF2. Similar approaches should exist for any hasher that has a work factor. If not, this method should be defined as a no-op to silence the warning. """ warnings.warn('subclasses of BasePasswordHasher should provide a harden_runtime() method') class PBKDF2PasswordHasher(BasePasswordHasher): """ Secure password hashing using the PBKDF2 algorithm (recommended) Configured to use PBKDF2 + HMAC + SHA256. The result is a 64 byte binary string. Iterations may be changed safely but you must rename the algorithm if you change SHA256. """ algorithm = "pbkdf2_sha256" iterations = 390000 digest = hashlib.sha256 def encode(self, password, salt, iterations=None): self._check_encode_args(password, salt) iterations = iterations or self.iterations hash = pbkdf2(password, salt, iterations, digest=self.digest) hash = base64.b64encode(hash).decode('ascii').strip() return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash) def decode(self, encoded): algorithm, iterations, salt, hash = encoded.split('$', 3) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'hash': hash, 'iterations': int(iterations), 'salt': salt, } def verify(self, password, encoded): decoded = self.decode(encoded) encoded_2 = self.encode(password, decoded['salt'], decoded['iterations']) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('iterations'): decoded['iterations'], _('salt'): mask_hash(decoded['salt']), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) update_salt = must_update_salt(decoded['salt'], self.salt_entropy) return (decoded['iterations'] != self.iterations) or update_salt def harden_runtime(self, password, encoded): decoded = self.decode(encoded) extra_iterations = self.iterations - decoded['iterations'] if extra_iterations > 0: self.encode(password, decoded['salt'], extra_iterations) class PBKDF2SHA1PasswordHasher(PBKDF2PasswordHasher): """ Alternate PBKDF2 hasher which uses SHA1, the default PRF recommended by PKCS #5. This is compatible with other implementations of PBKDF2, such as openssl's PKCS5_PBKDF2_HMAC_SHA1(). """ algorithm = "pbkdf2_sha1" digest = hashlib.sha1 class Argon2PasswordHasher(BasePasswordHasher): """ Secure password hashing using the argon2 algorithm. This is the winner of the Password Hashing Competition 2013-2015 (https://password-hashing.net). It requires the argon2-cffi library which depends on native C code and might cause portability issues. """ algorithm = 'argon2' library = 'argon2' time_cost = 2 memory_cost = 102400 parallelism = 8 def encode(self, password, salt): argon2 = self._load_library() params = self.params() data = argon2.low_level.hash_secret( password.encode(), salt.encode(), time_cost=params.time_cost, memory_cost=params.memory_cost, parallelism=params.parallelism, hash_len=params.hash_len, type=params.type, ) return self.algorithm + data.decode('ascii') def decode(self, encoded): argon2 = self._load_library() algorithm, rest = encoded.split('$', 1) assert algorithm == self.algorithm params = argon2.extract_parameters('$' + rest) variety, *_, b64salt, hash = rest.split('$') # Add padding. b64salt += '=' * (-len(b64salt) % 4) salt = base64.b64decode(b64salt).decode('latin1') return { 'algorithm': algorithm, 'hash': hash, 'memory_cost': params.memory_cost, 'parallelism': params.parallelism, 'salt': salt, 'time_cost': params.time_cost, 'variety': variety, 'version': params.version, 'params': params, } def verify(self, password, encoded): argon2 = self._load_library() algorithm, rest = encoded.split('$', 1) assert algorithm == self.algorithm try: return argon2.PasswordHasher().verify('$' + rest, password) except argon2.exceptions.VerificationError: return False def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('variety'): decoded['variety'], _('version'): decoded['version'], _('memory cost'): decoded['memory_cost'], _('time cost'): decoded['time_cost'], _('parallelism'): decoded['parallelism'], _('salt'): mask_hash(decoded['salt']), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) current_params = decoded['params'] new_params = self.params() # Set salt_len to the salt_len of the current parameters because salt # is explicitly passed to argon2. new_params.salt_len = current_params.salt_len update_salt = must_update_salt(decoded['salt'], self.salt_entropy) return (current_params != new_params) or update_salt def harden_runtime(self, password, encoded): # The runtime for Argon2 is too complicated to implement a sensible # hardening algorithm. pass def params(self): argon2 = self._load_library() # salt_len is a noop, because we provide our own salt. return argon2.Parameters( type=argon2.low_level.Type.ID, version=argon2.low_level.ARGON2_VERSION, salt_len=argon2.DEFAULT_RANDOM_SALT_LENGTH, hash_len=argon2.DEFAULT_HASH_LENGTH, time_cost=self.time_cost, memory_cost=self.memory_cost, parallelism=self.parallelism, ) class BCryptSHA256PasswordHasher(BasePasswordHasher): """ Secure password hashing using the bcrypt algorithm (recommended) This is considered by many to be the most secure algorithm but you must first install the bcrypt library. Please be warned that this library depends on native C code and might cause portability issues. """ algorithm = "bcrypt_sha256" digest = hashlib.sha256 library = ("bcrypt", "bcrypt") rounds = 12 def salt(self): bcrypt = self._load_library() return bcrypt.gensalt(self.rounds) def encode(self, password, salt): bcrypt = self._load_library() password = password.encode() # Hash the password prior to using bcrypt to prevent password # truncation as described in #20138. if self.digest is not None: # Use binascii.hexlify() because a hex encoded bytestring is str. password = binascii.hexlify(self.digest(password).digest()) data = bcrypt.hashpw(password, salt) return "%s$%s" % (self.algorithm, data.decode('ascii')) def decode(self, encoded): algorithm, empty, algostr, work_factor, data = encoded.split('$', 4) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'algostr': algostr, 'checksum': data[22:], 'salt': data[:22], 'work_factor': int(work_factor), } def verify(self, password, encoded): algorithm, data = encoded.split('$', 1) assert algorithm == self.algorithm encoded_2 = self.encode(password, data.encode('ascii')) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('work factor'): decoded['work_factor'], _('salt'): mask_hash(decoded['salt']), _('checksum'): mask_hash(decoded['checksum']), } def must_update(self, encoded): decoded = self.decode(encoded) return decoded['work_factor'] != self.rounds def harden_runtime(self, password, encoded): _, data = encoded.split('$', 1) salt = data[:29] # Length of the salt in bcrypt. rounds = data.split('$')[2] # work factor is logarithmic, adding one doubles the load. diff = 2**(self.rounds - int(rounds)) - 1 while diff > 0: self.encode(password, salt.encode('ascii')) diff -= 1 class BCryptPasswordHasher(BCryptSHA256PasswordHasher): """ Secure password hashing using the bcrypt algorithm This is considered by many to be the most secure algorithm but you must first install the bcrypt library. Please be warned that this library depends on native C code and might cause portability issues. This hasher does not first hash the password which means it is subject to bcrypt's 72 bytes password truncation. Most use cases should prefer the BCryptSHA256PasswordHasher. """ algorithm = "bcrypt" digest = None class ScryptPasswordHasher(BasePasswordHasher): """ Secure password hashing using the Scrypt algorithm. """ algorithm = 'scrypt' block_size = 8 maxmem = 0 parallelism = 1 work_factor = 2 ** 14 def encode(self, password, salt, n=None, r=None, p=None): self._check_encode_args(password, salt) n = n or self.work_factor r = r or self.block_size p = p or self.parallelism hash_ = hashlib.scrypt( password.encode(), salt=salt.encode(), n=n, r=r, p=p, maxmem=self.maxmem, dklen=64, ) hash_ = base64.b64encode(hash_).decode('ascii').strip() return '%s$%d$%s$%d$%d$%s' % (self.algorithm, n, salt, r, p, hash_) def decode(self, encoded): algorithm, work_factor, salt, block_size, parallelism, hash_ = encoded.split('$', 6) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'work_factor': int(work_factor), 'salt': salt, 'block_size': int(block_size), 'parallelism': int(parallelism), 'hash': hash_, } def verify(self, password, encoded): decoded = self.decode(encoded) encoded_2 = self.encode( password, decoded['salt'], decoded['work_factor'], decoded['block_size'], decoded['parallelism'], ) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('work factor'): decoded['work_factor'], _('block size'): decoded['block_size'], _('parallelism'): decoded['parallelism'], _('salt'): mask_hash(decoded['salt']), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) return ( decoded['work_factor'] != self.work_factor or decoded['block_size'] != self.block_size or decoded['parallelism'] != self.parallelism ) def harden_runtime(self, password, encoded): # The runtime for Scrypt is too complicated to implement a sensible # hardening algorithm. pass class SHA1PasswordHasher(BasePasswordHasher): """ The SHA1 password hashing algorithm (not recommended) """ algorithm = "sha1" def encode(self, password, salt): self._check_encode_args(password, salt) hash = hashlib.sha1((salt + password).encode()).hexdigest() return "%s$%s$%s" % (self.algorithm, salt, hash) def decode(self, encoded): algorithm, salt, hash = encoded.split('$', 2) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'hash': hash, 'salt': salt, } def verify(self, password, encoded): decoded = self.decode(encoded) encoded_2 = self.encode(password, decoded['salt']) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('salt'): mask_hash(decoded['salt'], show=2), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) return must_update_salt(decoded['salt'], self.salt_entropy) def harden_runtime(self, password, encoded): pass class MD5PasswordHasher(BasePasswordHasher): """ The Salted MD5 password hashing algorithm (not recommended) """ algorithm = "md5" def encode(self, password, salt): self._check_encode_args(password, salt) hash = md5((salt + password).encode()).hexdigest() return "%s$%s$%s" % (self.algorithm, salt, hash) def decode(self, encoded): algorithm, salt, hash = encoded.split('$', 2) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'hash': hash, 'salt': salt, } def verify(self, password, encoded): decoded = self.decode(encoded) encoded_2 = self.encode(password, decoded['salt']) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('salt'): mask_hash(decoded['salt'], show=2), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) return must_update_salt(decoded['salt'], self.salt_entropy) def harden_runtime(self, password, encoded): pass class UnsaltedSHA1PasswordHasher(BasePasswordHasher): """ Very insecure algorithm that you should *never* use; store SHA1 hashes with an empty salt. This class is implemented because Django used to accept such password hashes. Some older Django installs still have these values lingering around so we need to handle and upgrade them properly. """ algorithm = "unsalted_sha1" def salt(self): return '' def encode(self, password, salt): if salt != '': raise ValueError('salt must be empty.') hash = hashlib.sha1(password.encode()).hexdigest() return 'sha1$$%s' % hash def decode(self, encoded): assert encoded.startswith('sha1$$') return { 'algorithm': self.algorithm, 'hash': encoded[6:], 'salt': None, } def verify(self, password, encoded): encoded_2 = self.encode(password, '') return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('hash'): mask_hash(decoded['hash']), } def harden_runtime(self, password, encoded): pass class UnsaltedMD5PasswordHasher(BasePasswordHasher): """ Incredibly insecure algorithm that you should *never* use; stores unsalted MD5 hashes without the algorithm prefix, also accepts MD5 hashes with an empty salt. This class is implemented because Django used to store passwords this way and to accept such password hashes. Some older Django installs still have these values lingering around so we need to handle and upgrade them properly. """ algorithm = "unsalted_md5" def salt(self): return '' def encode(self, password, salt): if salt != '': raise ValueError('salt must be empty.') return md5(password.encode()).hexdigest() def decode(self, encoded): return { 'algorithm': self.algorithm, 'hash': encoded, 'salt': None, } def verify(self, password, encoded): if len(encoded) == 37 and encoded.startswith('md5$$'): encoded = encoded[5:] encoded_2 = self.encode(password, '') return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('hash'): mask_hash(decoded['hash'], show=3), } def harden_runtime(self, password, encoded): pass class CryptPasswordHasher(BasePasswordHasher): """ Password hashing using UNIX crypt (not recommended) The crypt module is not supported on all platforms. """ algorithm = "crypt" library = "crypt" def salt(self): return get_random_string(2) def encode(self, password, salt): crypt = self._load_library() if len(salt) != 2: raise ValueError('salt must be of length 2.') hash = crypt.crypt(password, salt) if hash is None: # A platform like OpenBSD with a dummy crypt module. raise TypeError('hash must be provided.') # we don't need to store the salt, but Django used to do this return '%s$%s$%s' % (self.algorithm, '', hash) def decode(self, encoded): algorithm, salt, hash = encoded.split('$', 2) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'hash': hash, 'salt': salt, } def verify(self, password, encoded): crypt = self._load_library() decoded = self.decode(encoded) data = crypt.crypt(password, decoded['hash']) return constant_time_compare(decoded['hash'], data) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('salt'): decoded['salt'], _('hash'): mask_hash(decoded['hash'], show=3), } def harden_runtime(self, password, encoded): pass
fc803745449844a39c9d5888dd2c356809a8db8e393ffb01e9c878656a863462
from urllib.parse import urlparse, urlunparse from django.conf import settings # Avoid shadowing the login() and logout() views below. from django.contrib.auth import ( REDIRECT_FIELD_NAME, get_user_model, login as auth_login, logout as auth_logout, update_session_auth_hash, ) from django.contrib.auth.decorators import login_required from django.contrib.auth.forms import ( AuthenticationForm, PasswordChangeForm, PasswordResetForm, SetPasswordForm, ) from django.contrib.auth.tokens import default_token_generator from django.contrib.sites.shortcuts import get_current_site from django.core.exceptions import ImproperlyConfigured, ValidationError from django.http import HttpResponseRedirect, QueryDict from django.shortcuts import resolve_url from django.urls import reverse_lazy from django.utils.decorators import method_decorator from django.utils.http import ( url_has_allowed_host_and_scheme, urlsafe_base64_decode, ) from django.utils.translation import gettext_lazy as _ from django.views.decorators.cache import never_cache from django.views.decorators.csrf import csrf_protect from django.views.decorators.debug import sensitive_post_parameters from django.views.generic.base import TemplateView from django.views.generic.edit import FormView UserModel = get_user_model() class SuccessURLAllowedHostsMixin: success_url_allowed_hosts = set() def get_success_url_allowed_hosts(self): return {self.request.get_host(), *self.success_url_allowed_hosts} class LoginView(SuccessURLAllowedHostsMixin, FormView): """ Display the login form and handle the login action. """ form_class = AuthenticationForm authentication_form = None next_page = None redirect_field_name = REDIRECT_FIELD_NAME template_name = 'registration/login.html' redirect_authenticated_user = False extra_context = None @method_decorator(sensitive_post_parameters()) @method_decorator(csrf_protect) @method_decorator(never_cache) def dispatch(self, request, *args, **kwargs): if self.redirect_authenticated_user and self.request.user.is_authenticated: redirect_to = self.get_success_url() if redirect_to == self.request.path: raise ValueError( "Redirection loop for authenticated user detected. Check that " "your LOGIN_REDIRECT_URL doesn't point to a login page." ) return HttpResponseRedirect(redirect_to) return super().dispatch(request, *args, **kwargs) def get_success_url(self): return self.get_redirect_url() or self.get_default_redirect_url() def get_redirect_url(self): """Return the user-originating redirect URL if it's safe.""" redirect_to = self.request.POST.get( self.redirect_field_name, self.request.GET.get(self.redirect_field_name, '') ) url_is_safe = url_has_allowed_host_and_scheme( url=redirect_to, allowed_hosts=self.get_success_url_allowed_hosts(), require_https=self.request.is_secure(), ) return redirect_to if url_is_safe else '' def get_default_redirect_url(self): """Return the default redirect URL.""" return resolve_url(self.next_page or settings.LOGIN_REDIRECT_URL) def get_form_class(self): return self.authentication_form or self.form_class def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs['request'] = self.request return kwargs def form_valid(self, form): """Security check complete. Log the user in.""" auth_login(self.request, form.get_user()) return HttpResponseRedirect(self.get_success_url()) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) current_site = get_current_site(self.request) context.update({ self.redirect_field_name: self.get_redirect_url(), 'site': current_site, 'site_name': current_site.name, **(self.extra_context or {}) }) return context class LogoutView(SuccessURLAllowedHostsMixin, TemplateView): """ Log out the user and display the 'You are logged out' message. """ next_page = None redirect_field_name = REDIRECT_FIELD_NAME template_name = 'registration/logged_out.html' extra_context = None @method_decorator(never_cache) def dispatch(self, request, *args, **kwargs): auth_logout(request) next_page = self.get_next_page() if next_page: # Redirect to this page until the session has been cleared. return HttpResponseRedirect(next_page) return super().dispatch(request, *args, **kwargs) def post(self, request, *args, **kwargs): """Logout may be done via POST.""" return self.get(request, *args, **kwargs) def get_next_page(self): if self.next_page is not None: next_page = resolve_url(self.next_page) elif settings.LOGOUT_REDIRECT_URL: next_page = resolve_url(settings.LOGOUT_REDIRECT_URL) else: next_page = self.next_page if (self.redirect_field_name in self.request.POST or self.redirect_field_name in self.request.GET): next_page = self.request.POST.get( self.redirect_field_name, self.request.GET.get(self.redirect_field_name) ) url_is_safe = url_has_allowed_host_and_scheme( url=next_page, allowed_hosts=self.get_success_url_allowed_hosts(), require_https=self.request.is_secure(), ) # Security check -- Ensure the user-originating redirection URL is # safe. if not url_is_safe: next_page = self.request.path return next_page def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) current_site = get_current_site(self.request) context.update({ 'site': current_site, 'site_name': current_site.name, 'title': _('Logged out'), 'subtitle': None, **(self.extra_context or {}) }) return context def logout_then_login(request, login_url=None): """ Log out the user if they are logged in. Then redirect to the login page. """ login_url = resolve_url(login_url or settings.LOGIN_URL) return LogoutView.as_view(next_page=login_url)(request) def redirect_to_login(next, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME): """ Redirect the user to the login page, passing the given 'next' page. """ resolved_url = resolve_url(login_url or settings.LOGIN_URL) login_url_parts = list(urlparse(resolved_url)) if redirect_field_name: querystring = QueryDict(login_url_parts[4], mutable=True) querystring[redirect_field_name] = next login_url_parts[4] = querystring.urlencode(safe='/') return HttpResponseRedirect(urlunparse(login_url_parts)) # Class-based password reset views # - PasswordResetView sends the mail # - PasswordResetDoneView shows a success message for the above # - PasswordResetConfirmView checks the link the user clicked and # prompts for a new password # - PasswordResetCompleteView shows a success message for the above class PasswordContextMixin: extra_context = None def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context.update({ 'title': self.title, 'subtitle': None, **(self.extra_context or {}) }) return context class PasswordResetView(PasswordContextMixin, FormView): email_template_name = 'registration/password_reset_email.html' extra_email_context = None form_class = PasswordResetForm from_email = None html_email_template_name = None subject_template_name = 'registration/password_reset_subject.txt' success_url = reverse_lazy('password_reset_done') template_name = 'registration/password_reset_form.html' title = _('Password reset') token_generator = default_token_generator @method_decorator(csrf_protect) def dispatch(self, *args, **kwargs): return super().dispatch(*args, **kwargs) def form_valid(self, form): opts = { 'use_https': self.request.is_secure(), 'token_generator': self.token_generator, 'from_email': self.from_email, 'email_template_name': self.email_template_name, 'subject_template_name': self.subject_template_name, 'request': self.request, 'html_email_template_name': self.html_email_template_name, 'extra_email_context': self.extra_email_context, } form.save(**opts) return super().form_valid(form) INTERNAL_RESET_SESSION_TOKEN = '_password_reset_token' class PasswordResetDoneView(PasswordContextMixin, TemplateView): template_name = 'registration/password_reset_done.html' title = _('Password reset sent') class PasswordResetConfirmView(PasswordContextMixin, FormView): form_class = SetPasswordForm post_reset_login = False post_reset_login_backend = None reset_url_token = 'set-password' success_url = reverse_lazy('password_reset_complete') template_name = 'registration/password_reset_confirm.html' title = _('Enter new password') token_generator = default_token_generator @method_decorator(sensitive_post_parameters()) @method_decorator(never_cache) def dispatch(self, *args, **kwargs): if 'uidb64' not in kwargs or 'token' not in kwargs: raise ImproperlyConfigured( "The URL path must contain 'uidb64' and 'token' parameters." ) self.validlink = False self.user = self.get_user(kwargs['uidb64']) if self.user is not None: token = kwargs['token'] if token == self.reset_url_token: session_token = self.request.session.get(INTERNAL_RESET_SESSION_TOKEN) if self.token_generator.check_token(self.user, session_token): # If the token is valid, display the password reset form. self.validlink = True return super().dispatch(*args, **kwargs) else: if self.token_generator.check_token(self.user, token): # Store the token in the session and redirect to the # password reset form at a URL without the token. That # avoids the possibility of leaking the token in the # HTTP Referer header. self.request.session[INTERNAL_RESET_SESSION_TOKEN] = token redirect_url = self.request.path.replace(token, self.reset_url_token) return HttpResponseRedirect(redirect_url) # Display the "Password reset unsuccessful" page. return self.render_to_response(self.get_context_data()) def get_user(self, uidb64): try: # urlsafe_base64_decode() decodes to bytestring uid = urlsafe_base64_decode(uidb64).decode() user = UserModel._default_manager.get(pk=uid) except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist, ValidationError): user = None return user def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs['user'] = self.user return kwargs def form_valid(self, form): user = form.save() del self.request.session[INTERNAL_RESET_SESSION_TOKEN] if self.post_reset_login: auth_login(self.request, user, self.post_reset_login_backend) return super().form_valid(form) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) if self.validlink: context['validlink'] = True else: context.update({ 'form': None, 'title': _('Password reset unsuccessful'), 'validlink': False, }) return context class PasswordResetCompleteView(PasswordContextMixin, TemplateView): template_name = 'registration/password_reset_complete.html' title = _('Password reset complete') def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context['login_url'] = resolve_url(settings.LOGIN_URL) return context class PasswordChangeView(PasswordContextMixin, FormView): form_class = PasswordChangeForm success_url = reverse_lazy('password_change_done') template_name = 'registration/password_change_form.html' title = _('Password change') @method_decorator(sensitive_post_parameters()) @method_decorator(csrf_protect) @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super().dispatch(*args, **kwargs) def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs['user'] = self.request.user return kwargs def form_valid(self, form): form.save() # Updating the password logs out all other sessions for the user # except the current one. update_session_auth_hash(self.request, form.user) return super().form_valid(form) class PasswordChangeDoneView(PasswordContextMixin, TemplateView): template_name = 'registration/password_change_done.html' title = _('Password change successful') @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super().dispatch(*args, **kwargs)
0a80259fb5dade610a7796a473561892d1661ed94a7830b2b582d7493b0a84cd
import functools import gzip import re from difflib import SequenceMatcher from pathlib import Path from django.conf import settings from django.core.exceptions import ( FieldDoesNotExist, ImproperlyConfigured, ValidationError, ) from django.utils.functional import cached_property, lazy from django.utils.html import format_html, format_html_join from django.utils.module_loading import import_string from django.utils.translation import gettext as _, ngettext @functools.lru_cache(maxsize=None) def get_default_password_validators(): return get_password_validators(settings.AUTH_PASSWORD_VALIDATORS) def get_password_validators(validator_config): validators = [] for validator in validator_config: try: klass = import_string(validator['NAME']) except ImportError: msg = "The module in NAME could not be imported: %s. Check your AUTH_PASSWORD_VALIDATORS setting." raise ImproperlyConfigured(msg % validator['NAME']) validators.append(klass(**validator.get('OPTIONS', {}))) return validators def validate_password(password, user=None, password_validators=None): """ Validate that the password meets all validator requirements. If the password is valid, return ``None``. If the password is invalid, raise ValidationError with all error messages. """ errors = [] if password_validators is None: password_validators = get_default_password_validators() for validator in password_validators: try: validator.validate(password, user) except ValidationError as error: errors.append(error) if errors: raise ValidationError(errors) def password_changed(password, user=None, password_validators=None): """ Inform all validators that have implemented a password_changed() method that the password has been changed. """ if password_validators is None: password_validators = get_default_password_validators() for validator in password_validators: password_changed = getattr(validator, 'password_changed', lambda *a: None) password_changed(password, user) def password_validators_help_texts(password_validators=None): """ Return a list of all help texts of all configured validators. """ help_texts = [] if password_validators is None: password_validators = get_default_password_validators() for validator in password_validators: help_texts.append(validator.get_help_text()) return help_texts def _password_validators_help_text_html(password_validators=None): """ Return an HTML string with all help texts of all configured validators in an <ul>. """ help_texts = password_validators_help_texts(password_validators) help_items = format_html_join('', '<li>{}</li>', ((help_text,) for help_text in help_texts)) return format_html('<ul>{}</ul>', help_items) if help_items else '' password_validators_help_text_html = lazy(_password_validators_help_text_html, str) class MinimumLengthValidator: """ Validate that the password is of a minimum length. """ def __init__(self, min_length=8): self.min_length = min_length def validate(self, password, user=None): if len(password) < self.min_length: raise ValidationError( ngettext( "This password is too short. It must contain at least %(min_length)d character.", "This password is too short. It must contain at least %(min_length)d characters.", self.min_length ), code='password_too_short', params={'min_length': self.min_length}, ) def get_help_text(self): return ngettext( "Your password must contain at least %(min_length)d character.", "Your password must contain at least %(min_length)d characters.", self.min_length ) % {'min_length': self.min_length} def exceeds_maximum_length_ratio(password, max_similarity, value): """ Test that value is within a reasonable range of password. The following ratio calculations are based on testing SequenceMatcher like this: for i in range(0,6): print(10**i, SequenceMatcher(a='A', b='A'*(10**i)).quick_ratio()) which yields: 1 1.0 10 0.18181818181818182 100 0.019801980198019802 1000 0.001998001998001998 10000 0.00019998000199980003 100000 1.999980000199998e-05 This means a length_ratio of 10 should never yield a similarity higher than 0.2, for 100 this is down to 0.02 and for 1000 it is 0.002. This can be calculated via 2 / length_ratio. As a result we avoid the potentially expensive sequence matching. """ pwd_len = len(password) length_bound_similarity = max_similarity / 2 * pwd_len value_len = len(value) return pwd_len >= 10 * value_len and value_len < length_bound_similarity class UserAttributeSimilarityValidator: """ Validate that the password is sufficiently different from the user's attributes. If no specific attributes are provided, look at a sensible list of defaults. Attributes that don't exist are ignored. Comparison is made to not only the full attribute value, but also its components, so that, for example, a password is validated against either part of an email address, as well as the full address. """ DEFAULT_USER_ATTRIBUTES = ('username', 'first_name', 'last_name', 'email') def __init__(self, user_attributes=DEFAULT_USER_ATTRIBUTES, max_similarity=0.7): self.user_attributes = user_attributes if max_similarity < 0.1: raise ValueError('max_similarity must be at least 0.1') self.max_similarity = max_similarity def validate(self, password, user=None): if not user: return password = password.lower() for attribute_name in self.user_attributes: value = getattr(user, attribute_name, None) if not value or not isinstance(value, str): continue value_lower = value.lower() value_parts = re.split(r'\W+', value_lower) + [value_lower] for value_part in value_parts: if exceeds_maximum_length_ratio(password, self.max_similarity, value_part): continue if SequenceMatcher(a=password, b=value_part).quick_ratio() >= self.max_similarity: try: verbose_name = str(user._meta.get_field(attribute_name).verbose_name) except FieldDoesNotExist: verbose_name = attribute_name raise ValidationError( _("The password is too similar to the %(verbose_name)s."), code='password_too_similar', params={'verbose_name': verbose_name}, ) def get_help_text(self): return _('Your password can’t be too similar to your other personal information.') class CommonPasswordValidator: """ Validate that the password is not a common password. The password is rejected if it occurs in a provided list of passwords, which may be gzipped. The list Django ships with contains 20000 common passwords (lowercased and deduplicated), created by Royce Williams: https://gist.github.com/roycewilliams/281ce539915a947a23db17137d91aeb7 The password list must be lowercased to match the comparison in validate(). """ @cached_property def DEFAULT_PASSWORD_LIST_PATH(self): return Path(__file__).resolve().parent / 'common-passwords.txt.gz' def __init__(self, password_list_path=DEFAULT_PASSWORD_LIST_PATH): if password_list_path is CommonPasswordValidator.DEFAULT_PASSWORD_LIST_PATH: password_list_path = self.DEFAULT_PASSWORD_LIST_PATH try: with gzip.open(password_list_path, 'rt', encoding='utf-8') as f: self.passwords = {x.strip() for x in f} except OSError: with open(password_list_path) as f: self.passwords = {x.strip() for x in f} def validate(self, password, user=None): if password.lower().strip() in self.passwords: raise ValidationError( _("This password is too common."), code='password_too_common', ) def get_help_text(self): return _('Your password can’t be a commonly used password.') class NumericPasswordValidator: """ Validate that the password is not entirely numeric. """ def validate(self, password, user=None): if password.isdigit(): raise ValidationError( _("This password is entirely numeric."), code='password_entirely_numeric', ) def get_help_text(self): return _('Your password can’t be entirely numeric.')
ea6da910e0ff3eaea0ac0e599ca0f21e48e23da3dd7e4049210ac41ac49ad9a2
import json from django import forms from django.contrib.admin.utils import ( display_for_field, flatten_fieldsets, help_text_for_field, label_for_field, lookup_field, quote, ) from django.core.exceptions import ObjectDoesNotExist from django.db.models.fields.related import ( ForeignObjectRel, ManyToManyRel, OneToOneField, ) from django.forms.utils import flatatt from django.template.defaultfilters import capfirst, linebreaksbr from django.urls import NoReverseMatch, reverse from django.utils.html import conditional_escape, format_html from django.utils.safestring import mark_safe from django.utils.translation import gettext, gettext_lazy as _ ACTION_CHECKBOX_NAME = '_selected_action' class ActionForm(forms.Form): action = forms.ChoiceField(label=_('Action:')) select_across = forms.BooleanField( label='', required=False, initial=0, widget=forms.HiddenInput({'class': 'select-across'}), ) checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False) class AdminForm: def __init__(self, form, fieldsets, prepopulated_fields, readonly_fields=None, model_admin=None): self.form, self.fieldsets = form, fieldsets self.prepopulated_fields = [{ 'field': form[field_name], 'dependencies': [form[f] for f in dependencies] } for field_name, dependencies in prepopulated_fields.items()] self.model_admin = model_admin if readonly_fields is None: readonly_fields = () self.readonly_fields = readonly_fields def __repr__(self): return ( f'<{self.__class__.__qualname__}: ' f'form={self.form.__class__.__qualname__} ' f'fieldsets={self.fieldsets!r}>' ) def __iter__(self): for name, options in self.fieldsets: yield Fieldset( self.form, name, readonly_fields=self.readonly_fields, model_admin=self.model_admin, **options ) @property def errors(self): return self.form.errors @property def non_field_errors(self): return self.form.non_field_errors @property def media(self): media = self.form.media for fs in self: media = media + fs.media return media class Fieldset: def __init__(self, form, name=None, readonly_fields=(), fields=(), classes=(), description=None, model_admin=None): self.form = form self.name, self.fields = name, fields self.classes = ' '.join(classes) self.description = description self.model_admin = model_admin self.readonly_fields = readonly_fields @property def media(self): if 'collapse' in self.classes: return forms.Media(js=['admin/js/collapse.js']) return forms.Media() def __iter__(self): for field in self.fields: yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin) class Fieldline: def __init__(self, form, field, readonly_fields=None, model_admin=None): self.form = form # A django.forms.Form instance if not hasattr(field, "__iter__") or isinstance(field, str): self.fields = [field] else: self.fields = field self.has_visible_field = not all( field in self.form.fields and self.form.fields[field].widget.is_hidden for field in self.fields ) self.model_admin = model_admin if readonly_fields is None: readonly_fields = () self.readonly_fields = readonly_fields def __iter__(self): for i, field in enumerate(self.fields): if field in self.readonly_fields: yield AdminReadonlyField(self.form, field, is_first=(i == 0), model_admin=self.model_admin) else: yield AdminField(self.form, field, is_first=(i == 0)) def errors(self): return mark_safe( '\n'.join( self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields ).strip('\n') ) class AdminField: def __init__(self, form, field, is_first): self.field = form[field] # A django.forms.BoundField instance self.is_first = is_first # Whether this field is first on the line self.is_checkbox = isinstance(self.field.field.widget, forms.CheckboxInput) self.is_readonly = False def label_tag(self): classes = [] contents = conditional_escape(self.field.label) if self.is_checkbox: classes.append('vCheckboxLabel') if self.field.field.required: classes.append('required') if not self.is_first: classes.append('inline') attrs = {'class': ' '.join(classes)} if classes else {} # checkboxes should not have a label suffix as the checkbox appears # to the left of the label. return self.field.label_tag( contents=mark_safe(contents), attrs=attrs, label_suffix='' if self.is_checkbox else None, ) def errors(self): return mark_safe(self.field.errors.as_ul()) class AdminReadonlyField: def __init__(self, form, field, is_first, model_admin=None): # Make self.field look a little bit like a field. This means that # {{ field.name }} must be a useful class name to identify the field. # For convenience, store other field-related data here too. if callable(field): class_name = field.__name__ if field.__name__ != '<lambda>' else '' else: class_name = field if form._meta.labels and class_name in form._meta.labels: label = form._meta.labels[class_name] else: label = label_for_field(field, form._meta.model, model_admin, form=form) if form._meta.help_texts and class_name in form._meta.help_texts: help_text = form._meta.help_texts[class_name] else: help_text = help_text_for_field(class_name, form._meta.model) if field in form.fields: is_hidden = form.fields[field].widget.is_hidden else: is_hidden = False self.field = { 'name': class_name, 'label': label, 'help_text': help_text, 'field': field, 'is_hidden': is_hidden, } self.form = form self.model_admin = model_admin self.is_first = is_first self.is_checkbox = False self.is_readonly = True self.empty_value_display = model_admin.get_empty_value_display() def label_tag(self): attrs = {} if not self.is_first: attrs["class"] = "inline" label = self.field['label'] return format_html('<label{}>{}{}</label>', flatatt(attrs), capfirst(label), self.form.label_suffix) def get_admin_url(self, remote_field, remote_obj): url_name = 'admin:%s_%s_change' % ( remote_field.model._meta.app_label, remote_field.model._meta.model_name, ) try: url = reverse( url_name, args=[quote(remote_obj.pk)], current_app=self.model_admin.admin_site.name, ) return format_html('<a href="{}">{}</a>', url, remote_obj) except NoReverseMatch: return str(remote_obj) def contents(self): from django.contrib.admin.templatetags.admin_list import _boolean_icon field, obj, model_admin = self.field['field'], self.form.instance, self.model_admin try: f, attr, value = lookup_field(field, obj, model_admin) except (AttributeError, ValueError, ObjectDoesNotExist): result_repr = self.empty_value_display else: if field in self.form.fields: widget = self.form[field].field.widget # This isn't elegant but suffices for contrib.auth's # ReadOnlyPasswordHashWidget. if getattr(widget, 'read_only', False): return widget.render(field, value) if f is None: if getattr(attr, 'boolean', False): result_repr = _boolean_icon(value) else: if hasattr(value, "__html__"): result_repr = value else: result_repr = linebreaksbr(value) else: if isinstance(f.remote_field, ManyToManyRel) and value is not None: result_repr = ", ".join(map(str, value.all())) elif ( isinstance(f.remote_field, (ForeignObjectRel, OneToOneField)) and value is not None ): result_repr = self.get_admin_url(f.remote_field, value) else: result_repr = display_for_field(value, f, self.empty_value_display) result_repr = linebreaksbr(result_repr) return conditional_escape(result_repr) class InlineAdminFormSet: """ A wrapper around an inline formset for use in the admin system. """ def __init__(self, inline, formset, fieldsets, prepopulated_fields=None, readonly_fields=None, model_admin=None, has_add_permission=True, has_change_permission=True, has_delete_permission=True, has_view_permission=True): self.opts = inline self.formset = formset self.fieldsets = fieldsets self.model_admin = model_admin if readonly_fields is None: readonly_fields = () self.readonly_fields = readonly_fields if prepopulated_fields is None: prepopulated_fields = {} self.prepopulated_fields = prepopulated_fields self.classes = ' '.join(inline.classes) if inline.classes else '' self.has_add_permission = has_add_permission self.has_change_permission = has_change_permission self.has_delete_permission = has_delete_permission self.has_view_permission = has_view_permission def __iter__(self): if self.has_change_permission: readonly_fields_for_editing = self.readonly_fields else: readonly_fields_for_editing = self.readonly_fields + flatten_fieldsets(self.fieldsets) for form, original in zip(self.formset.initial_forms, self.formset.get_queryset()): view_on_site_url = self.opts.get_view_on_site_url(original) yield InlineAdminForm( self.formset, form, self.fieldsets, self.prepopulated_fields, original, readonly_fields_for_editing, model_admin=self.opts, view_on_site_url=view_on_site_url, ) for form in self.formset.extra_forms: yield InlineAdminForm( self.formset, form, self.fieldsets, self.prepopulated_fields, None, self.readonly_fields, model_admin=self.opts, ) if self.has_add_permission: yield InlineAdminForm( self.formset, self.formset.empty_form, self.fieldsets, self.prepopulated_fields, None, self.readonly_fields, model_admin=self.opts, ) def fields(self): fk = getattr(self.formset, "fk", None) empty_form = self.formset.empty_form meta_labels = empty_form._meta.labels or {} meta_help_texts = empty_form._meta.help_texts or {} for i, field_name in enumerate(flatten_fieldsets(self.fieldsets)): if fk and fk.name == field_name: continue if not self.has_change_permission or field_name in self.readonly_fields: form_field = empty_form.fields.get(field_name) widget_is_hidden = False if form_field is not None: widget_is_hidden = form_field.widget.is_hidden yield { 'name': field_name, 'label': meta_labels.get(field_name) or label_for_field( field_name, self.opts.model, self.opts, form=empty_form, ), 'widget': {'is_hidden': widget_is_hidden}, 'required': False, 'help_text': meta_help_texts.get(field_name) or help_text_for_field(field_name, self.opts.model), } else: form_field = empty_form.fields[field_name] label = form_field.label if label is None: label = label_for_field(field_name, self.opts.model, self.opts, form=empty_form) yield { 'name': field_name, 'label': label, 'widget': form_field.widget, 'required': form_field.required, 'help_text': form_field.help_text, } def inline_formset_data(self): verbose_name = self.opts.verbose_name return json.dumps({ 'name': '#%s' % self.formset.prefix, 'options': { 'prefix': self.formset.prefix, 'addText': gettext('Add another %(verbose_name)s') % { 'verbose_name': capfirst(verbose_name), }, 'deleteText': gettext('Remove'), } }) @property def forms(self): return self.formset.forms @property def non_form_errors(self): return self.formset.non_form_errors @property def media(self): media = self.opts.media + self.formset.media for fs in self: media = media + fs.media return media class InlineAdminForm(AdminForm): """ A wrapper around an inline form for use in the admin system. """ def __init__(self, formset, form, fieldsets, prepopulated_fields, original, readonly_fields=None, model_admin=None, view_on_site_url=None): self.formset = formset self.model_admin = model_admin self.original = original self.show_url = original and view_on_site_url is not None self.absolute_url = view_on_site_url super().__init__(form, fieldsets, prepopulated_fields, readonly_fields, model_admin) def __iter__(self): for name, options in self.fieldsets: yield InlineFieldset( self.formset, self.form, name, self.readonly_fields, model_admin=self.model_admin, **options ) def needs_explicit_pk_field(self): return ( # Auto fields are editable, so check for auto or non-editable pk. self.form._meta.model._meta.auto_field or not self.form._meta.model._meta.pk.editable or # Also search any parents for an auto field. (The pk info is # propagated to child models so that does not need to be checked # in parents.) any(parent._meta.auto_field or not parent._meta.model._meta.pk.editable for parent in self.form._meta.model._meta.get_parent_list()) ) def pk_field(self): return AdminField(self.form, self.formset._pk_field.name, False) def fk_field(self): fk = getattr(self.formset, "fk", None) if fk: return AdminField(self.form, fk.name, False) else: return "" def deletion_field(self): from django.forms.formsets import DELETION_FIELD_NAME return AdminField(self.form, DELETION_FIELD_NAME, False) def ordering_field(self): from django.forms.formsets import ORDERING_FIELD_NAME return AdminField(self.form, ORDERING_FIELD_NAME, False) class InlineFieldset(Fieldset): def __init__(self, formset, *args, **kwargs): self.formset = formset super().__init__(*args, **kwargs) def __iter__(self): fk = getattr(self.formset, "fk", None) for field in self.fields: if not fk or fk.name != field: yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin) class AdminErrorList(forms.utils.ErrorList): """Store errors for the form/formsets in an add/change view.""" def __init__(self, form, inline_formsets): super().__init__() if form.is_bound: self.extend(form.errors.values()) for inline_formset in inline_formsets: self.extend(inline_formset.non_form_errors()) for errors_in_inline_form in inline_formset.errors: self.extend(errors_in_inline_form.values())
c0d8c23a67728f339acce6489471246110221fd8ced337b770b6202376a798fa
from contextlib import contextmanager from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.test import modify_settings from django.test.selenium import SeleniumTestCase from django.utils.deprecation import MiddlewareMixin from django.utils.translation import gettext as _ class CSPMiddleware(MiddlewareMixin): """The admin's JavaScript should be compatible with CSP.""" def process_response(self, request, response): response.headers['Content-Security-Policy'] = "default-src 'self'" return response @modify_settings(MIDDLEWARE={'append': 'django.contrib.admin.tests.CSPMiddleware'}) class AdminSeleniumTestCase(SeleniumTestCase, StaticLiveServerTestCase): available_apps = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', ] def wait_until(self, callback, timeout=10): """ Block the execution of the tests until the specified callback returns a value that is not falsy. This method can be called, for example, after clicking a link or submitting a form. See the other public methods that call this function for more details. """ from selenium.webdriver.support.wait import WebDriverWait WebDriverWait(self.selenium, timeout).until(callback) def wait_for_and_switch_to_popup(self, num_windows=2, timeout=10): """ Block until `num_windows` are present and are ready (usually 2, but can be overridden in the case of pop-ups opening other pop-ups). Switch the current window to the new pop-up. """ self.wait_until(lambda d: len(d.window_handles) == num_windows, timeout) self.selenium.switch_to.window(self.selenium.window_handles[-1]) self.wait_page_ready() def wait_for(self, css_selector, timeout=10): """ Block until a CSS selector is found on the page. """ from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as ec self.wait_until( ec.presence_of_element_located((By.CSS_SELECTOR, css_selector)), timeout ) def wait_for_text(self, css_selector, text, timeout=10): """ Block until the text is found in the CSS selector. """ from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as ec self.wait_until( ec.text_to_be_present_in_element( (By.CSS_SELECTOR, css_selector), text), timeout ) def wait_for_value(self, css_selector, text, timeout=10): """ Block until the value is found in the CSS selector. """ from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as ec self.wait_until( ec.text_to_be_present_in_element_value( (By.CSS_SELECTOR, css_selector), text), timeout ) def wait_until_visible(self, css_selector, timeout=10): """ Block until the element described by the CSS selector is visible. """ from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as ec self.wait_until( ec.visibility_of_element_located((By.CSS_SELECTOR, css_selector)), timeout ) def wait_until_invisible(self, css_selector, timeout=10): """ Block until the element described by the CSS selector is invisible. """ from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as ec self.wait_until( ec.invisibility_of_element_located((By.CSS_SELECTOR, css_selector)), timeout ) def wait_page_ready(self, timeout=10): """ Block until the page is ready. """ self.wait_until( lambda driver: driver.execute_script('return document.readyState;') == 'complete', timeout, ) @contextmanager def wait_page_loaded(self, timeout=10): """ Block until a new page has loaded and is ready. """ from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as ec old_page = self.selenium.find_element(By.TAG_NAME, 'html') yield # Wait for the next page to be loaded self.wait_until(ec.staleness_of(old_page), timeout=timeout) self.wait_page_ready(timeout=timeout) def admin_login(self, username, password, login_url='/admin/'): """ Log in to the admin. """ from selenium.webdriver.common.by import By self.selenium.get('%s%s' % (self.live_server_url, login_url)) username_input = self.selenium.find_element(By.NAME, 'username') username_input.send_keys(username) password_input = self.selenium.find_element(By.NAME, 'password') password_input.send_keys(password) login_text = _('Log in') with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="%s"]' % login_text).click() def select_option(self, selector, value): """ Select the <OPTION> with the value `value` inside the <SELECT> widget identified by the CSS selector `selector`. """ from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select select = Select(self.selenium.find_element(By.CSS_SELECTOR, selector)) select.select_by_value(value) def deselect_option(self, selector, value): """ Deselect the <OPTION> with the value `value` inside the <SELECT> widget identified by the CSS selector `selector`. """ from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select select = Select(self.selenium.find_element(By.CSS_SELECTOR, selector)) select.deselect_by_value(value) def assertCountSeleniumElements(self, selector, count, root_element=None): """ Assert number of matches for a CSS selector. `root_element` allow restriction to a pre-selected node. """ from selenium.webdriver.common.by import By root_element = root_element or self.selenium self.assertEqual(len(root_element.find_elements(By.CSS_SELECTOR, selector)), count) def _assertOptionsValues(self, options_selector, values): from selenium.webdriver.common.by import By if values: options = self.selenium.find_elements(By.CSS_SELECTOR, options_selector) actual_values = [] for option in options: actual_values.append(option.get_attribute('value')) self.assertEqual(values, actual_values) else: # Prevent the `find_elements(By.CSS_SELECTOR, …)` call from blocking # if the selector doesn't match any options as we expect it # to be the case. with self.disable_implicit_wait(): self.wait_until( lambda driver: not driver.find_elements(By.CSS_SELECTOR, options_selector) ) def assertSelectOptions(self, selector, values): """ Assert that the <SELECT> widget identified by `selector` has the options with the given `values`. """ self._assertOptionsValues("%s > option" % selector, values) def assertSelectedOptions(self, selector, values): """ Assert that the <SELECT> widget identified by `selector` has the selected options with the given `values`. """ self._assertOptionsValues("%s > option:checked" % selector, values) def has_css_class(self, selector, klass): """ Return True if the element identified by `selector` has the CSS class `klass`. """ from selenium.webdriver.common.by import By return self.selenium.find_element( By.CSS_SELECTOR, selector, ).get_attribute('class').find(klass) != -1
e5f6a9a6e5d344d38f5c179a4543cb93faac106b92db6fe0266b2f6d59028b75
import copy import json import re from functools import partial, update_wrapper from urllib.parse import quote as urlquote from django import forms from django.conf import settings from django.contrib import messages from django.contrib.admin import helpers, widgets from django.contrib.admin.checks import ( BaseModelAdminChecks, InlineModelAdminChecks, ModelAdminChecks, ) from django.contrib.admin.decorators import display from django.contrib.admin.exceptions import DisallowedModelAdminToField from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.utils import ( NestedObjects, construct_change_message, flatten_fieldsets, get_deleted_objects, lookup_spawns_duplicates, model_format_dict, model_ngettext, quote, unquote, ) from django.contrib.admin.widgets import ( AutocompleteSelect, AutocompleteSelectMultiple, ) from django.contrib.auth import get_permission_codename from django.core.exceptions import ( FieldDoesNotExist, FieldError, PermissionDenied, ValidationError, ) from django.core.paginator import Paginator from django.db import models, router, transaction from django.db.models.constants import LOOKUP_SEP from django.forms.formsets import DELETION_FIELD_NAME, all_valid from django.forms.models import ( BaseInlineFormSet, inlineformset_factory, modelform_defines_fields, modelform_factory, modelformset_factory, ) from django.forms.widgets import CheckboxSelectMultiple, SelectMultiple from django.http import HttpResponseRedirect from django.http.response import HttpResponseBase from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import reverse from django.utils.decorators import method_decorator from django.utils.html import format_html from django.utils.http import urlencode from django.utils.safestring import mark_safe from django.utils.text import ( capfirst, format_lazy, get_text_list, smart_split, unescape_string_literal, ) from django.utils.translation import gettext as _, ngettext from django.views.decorators.csrf import csrf_protect from django.views.generic import RedirectView IS_POPUP_VAR = '_popup' TO_FIELD_VAR = '_to_field' HORIZONTAL, VERTICAL = 1, 2 def get_content_type_for_model(obj): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level. from django.contrib.contenttypes.models import ContentType return ContentType.objects.get_for_model(obj, for_concrete_model=False) def get_ul_class(radio_style): return 'radiolist' if radio_style == VERTICAL else 'radiolist inline' class IncorrectLookupParameters(Exception): pass # Defaults for formfield_overrides. ModelAdmin subclasses can change this # by adding to ModelAdmin.formfield_overrides. FORMFIELD_FOR_DBFIELD_DEFAULTS = { models.DateTimeField: { 'form_class': forms.SplitDateTimeField, 'widget': widgets.AdminSplitDateTime }, models.DateField: {'widget': widgets.AdminDateWidget}, models.TimeField: {'widget': widgets.AdminTimeWidget}, models.TextField: {'widget': widgets.AdminTextareaWidget}, models.URLField: {'widget': widgets.AdminURLFieldWidget}, models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget}, models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget}, models.CharField: {'widget': widgets.AdminTextInputWidget}, models.ImageField: {'widget': widgets.AdminFileWidget}, models.FileField: {'widget': widgets.AdminFileWidget}, models.EmailField: {'widget': widgets.AdminEmailInputWidget}, models.UUIDField: {'widget': widgets.AdminUUIDInputWidget}, } csrf_protect_m = method_decorator(csrf_protect) class BaseModelAdmin(metaclass=forms.MediaDefiningClass): """Functionality common to both ModelAdmin and InlineAdmin.""" autocomplete_fields = () raw_id_fields = () fields = None exclude = None fieldsets = None form = forms.ModelForm filter_vertical = () filter_horizontal = () radio_fields = {} prepopulated_fields = {} formfield_overrides = {} readonly_fields = () ordering = None sortable_by = None view_on_site = True show_full_result_count = True checks_class = BaseModelAdminChecks def check(self, **kwargs): return self.checks_class().check(self, **kwargs) def __init__(self): # Merge FORMFIELD_FOR_DBFIELD_DEFAULTS with the formfield_overrides # rather than simply overwriting. overrides = copy.deepcopy(FORMFIELD_FOR_DBFIELD_DEFAULTS) for k, v in self.formfield_overrides.items(): overrides.setdefault(k, {}).update(v) self.formfield_overrides = overrides def formfield_for_dbfield(self, db_field, request, **kwargs): """ Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor. """ # If the field specifies choices, we don't need to look for special # admin widgets - we just need to use a select widget of some kind. if db_field.choices: return self.formfield_for_choice_field(db_field, request, **kwargs) # ForeignKey or ManyToManyFields if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)): # Combine the field kwargs with any options for formfield_overrides. # Make sure the passed in **kwargs override anything in # formfield_overrides because **kwargs is more specific, and should # always win. if db_field.__class__ in self.formfield_overrides: kwargs = {**self.formfield_overrides[db_field.__class__], **kwargs} # Get the correct formfield. if isinstance(db_field, models.ForeignKey): formfield = self.formfield_for_foreignkey(db_field, request, **kwargs) elif isinstance(db_field, models.ManyToManyField): formfield = self.formfield_for_manytomany(db_field, request, **kwargs) # For non-raw_id fields, wrap the widget with a wrapper that adds # extra HTML -- the "add other" interface -- to the end of the # rendered output. formfield can be None if it came from a # OneToOneField with parent_link=True or a M2M intermediary. if formfield and db_field.name not in self.raw_id_fields: related_modeladmin = self.admin_site._registry.get(db_field.remote_field.model) wrapper_kwargs = {} if related_modeladmin: wrapper_kwargs.update( can_add_related=related_modeladmin.has_add_permission(request), can_change_related=related_modeladmin.has_change_permission(request), can_delete_related=related_modeladmin.has_delete_permission(request), can_view_related=related_modeladmin.has_view_permission(request), ) formfield.widget = widgets.RelatedFieldWidgetWrapper( formfield.widget, db_field.remote_field, self.admin_site, **wrapper_kwargs ) return formfield # If we've got overrides for the formfield defined, use 'em. **kwargs # passed to formfield_for_dbfield override the defaults. for klass in db_field.__class__.mro(): if klass in self.formfield_overrides: kwargs = {**copy.deepcopy(self.formfield_overrides[klass]), **kwargs} return db_field.formfield(**kwargs) # For any other type of field, just call its formfield() method. return db_field.formfield(**kwargs) def formfield_for_choice_field(self, db_field, request, **kwargs): """ Get a form Field for a database Field that has declared choices. """ # If the field is named as a radio_field, use a RadioSelect if db_field.name in self.radio_fields: # Avoid stomping on custom widget/choices arguments. if 'widget' not in kwargs: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) if 'choices' not in kwargs: kwargs['choices'] = db_field.get_choices( include_blank=db_field.blank, blank_choice=[('', _('None'))] ) return db_field.formfield(**kwargs) def get_field_queryset(self, db, db_field, request): """ If the ModelAdmin specifies ordering, the queryset should respect that ordering. Otherwise don't specify the queryset, let the field decide (return None in that case). """ related_admin = self.admin_site._registry.get(db_field.remote_field.model) if related_admin is not None: ordering = related_admin.get_ordering(request) if ordering is not None and ordering != (): return db_field.remote_field.model._default_manager.using(db).order_by(*ordering) return None def formfield_for_foreignkey(self, db_field, request, **kwargs): """ Get a form Field for a ForeignKey. """ db = kwargs.get('using') if 'widget' not in kwargs: if db_field.name in self.get_autocomplete_fields(request): kwargs['widget'] = AutocompleteSelect(db_field, self.admin_site, using=db) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.remote_field, self.admin_site, using=db) elif db_field.name in self.radio_fields: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) kwargs['empty_label'] = _('None') if db_field.blank else None if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset return db_field.formfield(**kwargs) def formfield_for_manytomany(self, db_field, request, **kwargs): """ Get a form Field for a ManyToManyField. """ # If it uses an intermediary model that isn't auto created, don't show # a field in admin. if not db_field.remote_field.through._meta.auto_created: return None db = kwargs.get('using') if 'widget' not in kwargs: autocomplete_fields = self.get_autocomplete_fields(request) if db_field.name in autocomplete_fields: kwargs['widget'] = AutocompleteSelectMultiple( db_field, self.admin_site, using=db, ) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ManyToManyRawIdWidget( db_field.remote_field, self.admin_site, using=db, ) elif db_field.name in [*self.filter_vertical, *self.filter_horizontal]: kwargs['widget'] = widgets.FilteredSelectMultiple( db_field.verbose_name, db_field.name in self.filter_vertical ) if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset form_field = db_field.formfield(**kwargs) if (isinstance(form_field.widget, SelectMultiple) and not isinstance(form_field.widget, (CheckboxSelectMultiple, AutocompleteSelectMultiple))): msg = _('Hold down “Control”, or “Command” on a Mac, to select more than one.') help_text = form_field.help_text form_field.help_text = format_lazy('{} {}', help_text, msg) if help_text else msg return form_field def get_autocomplete_fields(self, request): """ Return a list of ForeignKey and/or ManyToMany fields which should use an autocomplete widget. """ return self.autocomplete_fields def get_view_on_site_url(self, obj=None): if obj is None or not self.view_on_site: return None if callable(self.view_on_site): return self.view_on_site(obj) elif hasattr(obj, 'get_absolute_url'): # use the ContentType lookup if view_on_site is True return reverse('admin:view_on_site', kwargs={ 'content_type_id': get_content_type_for_model(obj).pk, 'object_id': obj.pk }) def get_empty_value_display(self): """ Return the empty_value_display set on ModelAdmin or AdminSite. """ try: return mark_safe(self.empty_value_display) except AttributeError: return mark_safe(self.admin_site.empty_value_display) def get_exclude(self, request, obj=None): """ Hook for specifying exclude. """ return self.exclude def get_fields(self, request, obj=None): """ Hook for specifying fields. """ if self.fields: return self.fields # _get_form_for_get_fields() is implemented in subclasses. form = self._get_form_for_get_fields(request, obj) return [*form.base_fields, *self.get_readonly_fields(request, obj)] def get_fieldsets(self, request, obj=None): """ Hook for specifying fieldsets. """ if self.fieldsets: return self.fieldsets return [(None, {'fields': self.get_fields(request, obj)})] def get_inlines(self, request, obj): """Hook for specifying custom inlines.""" return self.inlines def get_ordering(self, request): """ Hook for specifying field ordering. """ return self.ordering or () # otherwise we might try to *None, which is bad ;) def get_readonly_fields(self, request, obj=None): """ Hook for specifying custom readonly fields. """ return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): """ Hook for specifying custom prepopulated fields. """ return self.prepopulated_fields def get_queryset(self, request): """ Return a QuerySet of all model instances that can be edited by the admin site. This is used by changelist_view. """ qs = self.model._default_manager.get_queryset() # TODO: this should be handled by some parameter to the ChangeList. ordering = self.get_ordering(request) if ordering: qs = qs.order_by(*ordering) return qs def get_sortable_by(self, request): """Hook for specifying which fields can be sorted in the changelist.""" return self.sortable_by if self.sortable_by is not None else self.get_list_display(request) def lookup_allowed(self, lookup, value): from django.contrib.admin.filters import SimpleListFilter model = self.model # Check FKey lookups that are allowed, so that popups produced by # ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to, # are allowed to work. for fk_lookup in model._meta.related_fkey_lookups: # As ``limit_choices_to`` can be a callable, invoke it here. if callable(fk_lookup): fk_lookup = fk_lookup() if (lookup, value) in widgets.url_params_from_lookup_dict(fk_lookup).items(): return True relation_parts = [] prev_field = None for part in lookup.split(LOOKUP_SEP): try: field = model._meta.get_field(part) except FieldDoesNotExist: # Lookups on nonexistent fields are ok, since they're ignored # later. break # It is allowed to filter on values that would be found from local # model anyways. For example, if you filter on employee__department__id, # then the id value would be found already from employee__department_id. if not prev_field or (prev_field.is_relation and field not in prev_field.path_infos[-1].target_fields): relation_parts.append(part) if not getattr(field, 'path_infos', None): # This is not a relational field, so further parts # must be transforms. break prev_field = field model = field.path_infos[-1].to_opts.model if len(relation_parts) <= 1: # Either a local field filter, or no fields at all. return True valid_lookups = {self.date_hierarchy} for filter_item in self.list_filter: if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter): valid_lookups.add(filter_item.parameter_name) elif isinstance(filter_item, (list, tuple)): valid_lookups.add(filter_item[0]) else: valid_lookups.add(filter_item) # Is it a valid relational lookup? return not { LOOKUP_SEP.join(relation_parts), LOOKUP_SEP.join(relation_parts + [part]) }.isdisjoint(valid_lookups) def to_field_allowed(self, request, to_field): """ Return True if the model associated with this admin should be allowed to be referenced by the specified field. """ opts = self.model._meta try: field = opts.get_field(to_field) except FieldDoesNotExist: return False # Always allow referencing the primary key since it's already possible # to get this information from the change view URL. if field.primary_key: return True # Allow reverse relationships to models defining m2m fields if they # target the specified field. for many_to_many in opts.many_to_many: if many_to_many.m2m_target_field_name() == to_field: return True # Make sure at least one of the models registered for this site # references this field through a FK or a M2M relationship. registered_models = set() for model, admin in self.admin_site._registry.items(): registered_models.add(model) for inline in admin.inlines: registered_models.add(inline.model) related_objects = ( f for f in opts.get_fields(include_hidden=True) if (f.auto_created and not f.concrete) ) for related_object in related_objects: related_model = related_object.related_model remote_field = related_object.field.remote_field if (any(issubclass(model, related_model) for model in registered_models) and hasattr(remote_field, 'get_related_field') and remote_field.get_related_field() == field): return True return False def has_add_permission(self, request): """ Return True if the given request has permission to add an object. Can be overridden by the user in subclasses. """ opts = self.opts codename = get_permission_codename('add', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_change_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to change the `obj` model instance. If `obj` is None, this should return True if the given request has permission to change *any* object of the given type. """ opts = self.opts codename = get_permission_codename('change', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_delete_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to delete the `obj` model instance. If `obj` is None, this should return True if the given request has permission to delete *any* object of the given type. """ opts = self.opts codename = get_permission_codename('delete', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_view_permission(self, request, obj=None): """ Return True if the given request has permission to view the given Django model instance. The default implementation doesn't examine the `obj` parameter. If overridden by the user in subclasses, it should return True if the given request has permission to view the `obj` model instance. If `obj` is None, it should return True if the request has permission to view any object of the given type. """ opts = self.opts codename_view = get_permission_codename('view', opts) codename_change = get_permission_codename('change', opts) return ( request.user.has_perm('%s.%s' % (opts.app_label, codename_view)) or request.user.has_perm('%s.%s' % (opts.app_label, codename_change)) ) def has_view_or_change_permission(self, request, obj=None): return self.has_view_permission(request, obj) or self.has_change_permission(request, obj) def has_module_permission(self, request): """ Return True if the given request has any permission in the given app label. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to view the module on the admin index page and access the module's index page. Overriding it does not restrict access to the add, change or delete views. Use `ModelAdmin.has_(add|change|delete)_permission` for that. """ return request.user.has_module_perms(self.opts.app_label) class ModelAdmin(BaseModelAdmin): """Encapsulate all admin options and functionality for a given model.""" list_display = ('__str__',) list_display_links = () list_filter = () list_select_related = False list_per_page = 100 list_max_show_all = 200 list_editable = () search_fields = () search_help_text = None date_hierarchy = None save_as = False save_as_continue = True save_on_top = False paginator = Paginator preserve_filters = True inlines = () # Custom templates (designed to be over-ridden in subclasses) add_form_template = None change_form_template = None change_list_template = None delete_confirmation_template = None delete_selected_confirmation_template = None object_history_template = None popup_response_template = None # Actions actions = () action_form = helpers.ActionForm actions_on_top = True actions_on_bottom = False actions_selection_counter = True checks_class = ModelAdminChecks def __init__(self, model, admin_site): self.model = model self.opts = model._meta self.admin_site = admin_site super().__init__() def __str__(self): return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__) def __repr__(self): return ( f'<{self.__class__.__qualname__}: model={self.model.__qualname__} ' f'site={self.admin_site!r}>' ) def get_inline_instances(self, request, obj=None): inline_instances = [] for inline_class in self.get_inlines(request, obj): inline = inline_class(self.model, self.admin_site) if request: if not (inline.has_view_or_change_permission(request, obj) or inline.has_add_permission(request, obj) or inline.has_delete_permission(request, obj)): continue if not inline.has_add_permission(request, obj): inline.max_num = 0 inline_instances.append(inline) return inline_instances def get_urls(self): from django.urls import path def wrap(view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) wrapper.model_admin = self return update_wrapper(wrapper, view) info = self.model._meta.app_label, self.model._meta.model_name return [ path('', wrap(self.changelist_view), name='%s_%s_changelist' % info), path('add/', wrap(self.add_view), name='%s_%s_add' % info), path('<path:object_id>/history/', wrap(self.history_view), name='%s_%s_history' % info), path('<path:object_id>/delete/', wrap(self.delete_view), name='%s_%s_delete' % info), path('<path:object_id>/change/', wrap(self.change_view), name='%s_%s_change' % info), # For backwards compatibility (was the change url before 1.9) path('<path:object_id>/', wrap(RedirectView.as_view( pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info) ))), ] @property def urls(self): return self.get_urls() @property def media(self): extra = '' if settings.DEBUG else '.min' js = [ 'vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'core.js', 'admin/RelatedObjectLookups.js', 'actions.js', 'urlify.js', 'prepopulate.js', 'vendor/xregexp/xregexp%s.js' % extra, ] return forms.Media(js=['admin/js/%s' % url for url in js]) def get_model_perms(self, request): """ Return a dict of all perms for this model. This dict has the keys ``add``, ``change``, ``delete``, and ``view`` mapping to the True/False for each of those actions. """ return { 'add': self.has_add_permission(request), 'change': self.has_change_permission(request), 'delete': self.has_delete_permission(request), 'view': self.has_view_permission(request), } def _get_form_for_get_fields(self, request, obj): return self.get_form(request, obj, fields=None) def get_form(self, request, obj=None, change=False, **kwargs): """ Return a Form class for use in the admin add view. This is used by add_view and change_view. """ if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) readonly_fields = self.get_readonly_fields(request, obj) exclude.extend(readonly_fields) # Exclude all fields if it's a change form and the user doesn't have # the change permission. if change and hasattr(request, 'user') and not self.has_change_permission(request, obj): exclude.extend(fields) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # ModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # if exclude is an empty list we pass None to be consistent with the # default on modelform_factory exclude = exclude or None # Remove declared form fields which are in readonly_fields. new_attrs = dict.fromkeys(f for f in readonly_fields if f in self.form.declared_fields) form = type(self.form.__name__, (self.form,), new_attrs) defaults = { 'form': form, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS try: return modelform_factory(self.model, **defaults) except FieldError as e: raise FieldError( '%s. Check fields/fieldsets/exclude attributes of class %s.' % (e, self.__class__.__name__) ) def get_changelist(self, request, **kwargs): """ Return the ChangeList class for use on the changelist page. """ from django.contrib.admin.views.main import ChangeList return ChangeList def get_changelist_instance(self, request): """ Return a `ChangeList` instance based on `request`. May raise `IncorrectLookupParameters`. """ list_display = self.get_list_display(request) list_display_links = self.get_list_display_links(request, list_display) # Add the action checkboxes if any actions are available. if self.get_actions(request): list_display = ['action_checkbox', *list_display] sortable_by = self.get_sortable_by(request) ChangeList = self.get_changelist(request) return ChangeList( request, self.model, list_display, list_display_links, self.get_list_filter(request), self.date_hierarchy, self.get_search_fields(request), self.get_list_select_related(request), self.list_per_page, self.list_max_show_all, self.list_editable, self, sortable_by, self.search_help_text, ) def get_object(self, request, object_id, from_field=None): """ Return an instance matching the field and value provided, the primary key is used if no field is provided. Return ``None`` if no match is found or the object_id fails validation. """ queryset = self.get_queryset(request) model = queryset.model field = model._meta.pk if from_field is None else model._meta.get_field(from_field) try: object_id = field.to_python(object_id) return queryset.get(**{field.name: object_id}) except (model.DoesNotExist, ValidationError, ValueError): return None def get_changelist_form(self, request, **kwargs): """ Return a Form class for use in the Formset on the changelist page. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form')): defaults['fields'] = forms.ALL_FIELDS return modelform_factory(self.model, **defaults) def get_changelist_formset(self, request, **kwargs): """ Return a FormSet class for use on the changelist page if list_editable is used. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } return modelformset_factory( self.model, self.get_changelist_form(request), extra=0, fields=self.list_editable, **defaults ) def get_formsets_with_inlines(self, request, obj=None): """ Yield formsets and the corresponding inlines. """ for inline in self.get_inline_instances(request, obj): yield inline.get_formset(request, obj), inline def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True): return self.paginator(queryset, per_page, orphans, allow_empty_first_page) def log_addition(self, request, obj, message): """ Log that an object has been successfully added. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import ADDITION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(obj).pk, object_id=obj.pk, object_repr=str(obj), action_flag=ADDITION, change_message=message, ) def log_change(self, request, obj, message): """ Log that an object has been successfully changed. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import CHANGE, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(obj).pk, object_id=obj.pk, object_repr=str(obj), action_flag=CHANGE, change_message=message, ) def log_deletion(self, request, obj, object_repr): """ Log that an object will be deleted. Note that this method must be called before the deletion. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import DELETION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(obj).pk, object_id=obj.pk, object_repr=object_repr, action_flag=DELETION, ) @display(description=mark_safe('<input type="checkbox" id="action-toggle">')) def action_checkbox(self, obj): """ A list_display column containing a checkbox widget. """ return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, str(obj.pk)) @staticmethod def _get_action_description(func, name): return getattr(func, 'short_description', capfirst(name.replace('_', ' '))) def _get_base_actions(self): """Return the list of actions, prior to any request-based filtering.""" actions = [] base_actions = (self.get_action(action) for action in self.actions or []) # get_action might have returned None, so filter any of those out. base_actions = [action for action in base_actions if action] base_action_names = {name for _, name, _ in base_actions} # Gather actions from the admin site first for (name, func) in self.admin_site.actions: if name in base_action_names: continue description = self._get_action_description(func, name) actions.append((func, name, description)) # Add actions from this ModelAdmin. actions.extend(base_actions) return actions def _filter_actions_by_permissions(self, request, actions): """Filter out any actions that the user doesn't have access to.""" filtered_actions = [] for action in actions: callable = action[0] if not hasattr(callable, 'allowed_permissions'): filtered_actions.append(action) continue permission_checks = ( getattr(self, 'has_%s_permission' % permission) for permission in callable.allowed_permissions ) if any(has_permission(request) for has_permission in permission_checks): filtered_actions.append(action) return filtered_actions def get_actions(self, request): """ Return a dictionary mapping the names of all actions for this ModelAdmin to a tuple of (callable, name, description) for each action. """ # If self.actions is set to None that means actions are disabled on # this page. if self.actions is None or IS_POPUP_VAR in request.GET: return {} actions = self._filter_actions_by_permissions(request, self._get_base_actions()) return {name: (func, name, desc) for func, name, desc in actions} def get_action_choices(self, request, default_choices=models.BLANK_CHOICE_DASH): """ Return a list of choices for use in a form object. Each choice is a tuple (name, description). """ choices = [] + default_choices for func, name, description in self.get_actions(request).values(): choice = (name, description % model_format_dict(self.opts)) choices.append(choice) return choices def get_action(self, action): """ Return a given action from a parameter, which can either be a callable, or the name of a method on the ModelAdmin. Return is a tuple of (callable, name, description). """ # If the action is a callable, just use it. if callable(action): func = action action = action.__name__ # Next, look for a method. Grab it off self.__class__ to get an unbound # method instead of a bound one; this ensures that the calling # conventions are the same for functions and methods. elif hasattr(self.__class__, action): func = getattr(self.__class__, action) # Finally, look for a named method on the admin site else: try: func = self.admin_site.get_action(action) except KeyError: return None description = self._get_action_description(func, action) return func, action, description def get_list_display(self, request): """ Return a sequence containing the fields to be displayed on the changelist. """ return self.list_display def get_list_display_links(self, request, list_display): """ Return a sequence containing the fields to be displayed as links on the changelist. The list_display parameter is the list of fields returned by get_list_display(). """ if self.list_display_links or self.list_display_links is None or not list_display: return self.list_display_links else: # Use only the first item in list_display as link return list(list_display)[:1] def get_list_filter(self, request): """ Return a sequence containing the fields to be displayed as filters in the right sidebar of the changelist page. """ return self.list_filter def get_list_select_related(self, request): """ Return a list of fields to add to the select_related() part of the changelist items query. """ return self.list_select_related def get_search_fields(self, request): """ Return a sequence containing the fields to be searched whenever somebody submits a search query. """ return self.search_fields def get_search_results(self, request, queryset, search_term): """ Return a tuple containing a queryset to implement the search and a boolean indicating if the results may contain duplicates. """ # Apply keyword searches. def construct_search(field_name): if field_name.startswith('^'): return "%s__istartswith" % field_name[1:] elif field_name.startswith('='): return "%s__iexact" % field_name[1:] elif field_name.startswith('@'): return "%s__search" % field_name[1:] # Use field_name if it includes a lookup. opts = queryset.model._meta lookup_fields = field_name.split(LOOKUP_SEP) # Go through the fields, following all relations. prev_field = None for path_part in lookup_fields: if path_part == 'pk': path_part = opts.pk.name try: field = opts.get_field(path_part) except FieldDoesNotExist: # Use valid query lookups. if prev_field and prev_field.get_lookup(path_part): return field_name else: prev_field = field if hasattr(field, 'path_infos'): # Update opts to follow the relation. opts = field.path_infos[-1].to_opts # Otherwise, use the field with icontains. return "%s__icontains" % field_name may_have_duplicates = False search_fields = self.get_search_fields(request) if search_fields and search_term: orm_lookups = [construct_search(str(search_field)) for search_field in search_fields] term_queries = [] for bit in smart_split(search_term): if bit.startswith(('"', "'")) and bit[0] == bit[-1]: bit = unescape_string_literal(bit) or_queries = models.Q( *((orm_lookup, bit) for orm_lookup in orm_lookups), _connector=models.Q.OR, ) term_queries.append(or_queries) queryset = queryset.filter(models.Q(*term_queries)) may_have_duplicates |= any( lookup_spawns_duplicates(self.opts, search_spec) for search_spec in orm_lookups ) return queryset, may_have_duplicates def get_preserved_filters(self, request): """ Return the preserved filters querystring. """ match = request.resolver_match if self.preserve_filters and match: opts = self.model._meta current_url = '%s:%s' % (match.app_name, match.url_name) changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name) if current_url == changelist_url: preserved_filters = request.GET.urlencode() else: preserved_filters = request.GET.get('_changelist_filters') if preserved_filters: return urlencode({'_changelist_filters': preserved_filters}) return '' def construct_change_message(self, request, form, formsets, add=False): """ Construct a JSON structure describing changes from a changed object. """ return construct_change_message(form, formsets, add) def message_user(self, request, message, level=messages.INFO, extra_tags='', fail_silently=False): """ Send a message to the user. The default implementation posts a message using the django.contrib.messages backend. Exposes almost the same API as messages.add_message(), but accepts the positional arguments in a different order to maintain backwards compatibility. For convenience, it accepts the `level` argument as a string rather than the usual level number. """ if not isinstance(level, int): # attempt to get the level if passed a string try: level = getattr(messages.constants, level.upper()) except AttributeError: levels = messages.constants.DEFAULT_TAGS.values() levels_repr = ', '.join('`%s`' % level for level in levels) raise ValueError( 'Bad message level string: `%s`. Possible values are: %s' % (level, levels_repr) ) messages.add_message(request, level, message, extra_tags=extra_tags, fail_silently=fail_silently) def save_form(self, request, form, change): """ Given a ModelForm return an unsaved instance. ``change`` is True if the object is being changed, and False if it's being added. """ return form.save(commit=False) def save_model(self, request, obj, form, change): """ Given a model instance save it to the database. """ obj.save() def delete_model(self, request, obj): """ Given a model instance delete it from the database. """ obj.delete() def delete_queryset(self, request, queryset): """Given a queryset, delete it from the database.""" queryset.delete() def save_formset(self, request, form, formset, change): """ Given an inline formset save it to the database. """ formset.save() def save_related(self, request, form, formsets, change): """ Given the ``HttpRequest``, the parent ``ModelForm`` instance, the list of inline formsets and a boolean value based on whether the parent is being added or changed, save the related objects to the database. Note that at this point save_form() and save_model() have already been called. """ form.save_m2m() for formset in formsets: self.save_formset(request, form, formset, change=change) def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None): opts = self.model._meta app_label = opts.app_label preserved_filters = self.get_preserved_filters(request) form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url) view_on_site_url = self.get_view_on_site_url(obj) has_editable_inline_admin_formsets = False for inline in context['inline_admin_formsets']: if inline.has_add_permission or inline.has_change_permission or inline.has_delete_permission: has_editable_inline_admin_formsets = True break context.update({ 'add': add, 'change': change, 'has_view_permission': self.has_view_permission(request, obj), 'has_add_permission': self.has_add_permission(request), 'has_change_permission': self.has_change_permission(request, obj), 'has_delete_permission': self.has_delete_permission(request, obj), 'has_editable_inline_admin_formsets': has_editable_inline_admin_formsets, 'has_file_field': context['adminform'].form.is_multipart() or any( admin_formset.formset.is_multipart() for admin_formset in context['inline_admin_formsets'] ), 'has_absolute_url': view_on_site_url is not None, 'absolute_url': view_on_site_url, 'form_url': form_url, 'opts': opts, 'content_type_id': get_content_type_for_model(self.model).pk, 'save_as': self.save_as, 'save_on_top': self.save_on_top, 'to_field_var': TO_FIELD_VAR, 'is_popup_var': IS_POPUP_VAR, 'app_label': app_label, }) if add and self.add_form_template is not None: form_template = self.add_form_template else: form_template = self.change_form_template request.current_app = self.admin_site.name return TemplateResponse(request, form_template or [ "admin/%s/%s/change_form.html" % (app_label, opts.model_name), "admin/%s/change_form.html" % app_label, "admin/change_form.html" ], context) def response_add(self, request, obj, post_url_continue=None): """ Determine the HttpResponse for the add_view stage. """ opts = obj._meta preserved_filters = self.get_preserved_filters(request) obj_url = reverse( 'admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(quote(obj.pk),), current_app=self.admin_site.name, ) # Add a link to the object's change form if the user can edit the obj. if self.has_change_permission(request, obj): obj_repr = format_html('<a href="{}">{}</a>', urlquote(obj_url), obj) else: obj_repr = str(obj) msg_dict = { 'name': opts.verbose_name, 'obj': obj_repr, } # Here, we distinguish between different save types by checking for # the presence of keys in request.POST. if IS_POPUP_VAR in request.POST: to_field = request.POST.get(TO_FIELD_VAR) if to_field: attr = str(to_field) else: attr = obj._meta.pk.attname value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'value': str(value), 'obj': str(obj), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) elif "_continue" in request.POST or ( # Redirecting after "Save as new". "_saveasnew" in request.POST and self.save_as_continue and self.has_change_permission(request, obj) ): msg = _('The {name} “{obj}” was added successfully.') if self.has_change_permission(request, obj): msg += ' ' + _('You may edit it again below.') self.message_user(request, format_html(msg, **msg_dict), messages.SUCCESS) if post_url_continue is None: post_url_continue = obj_url post_url_continue = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue ) return HttpResponseRedirect(post_url_continue) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was added successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_add(request, obj) def response_change(self, request, obj): """ Determine the HttpResponse for the change_view stage. """ if IS_POPUP_VAR in request.POST: opts = obj._meta to_field = request.POST.get(TO_FIELD_VAR) attr = str(to_field) if to_field else opts.pk.attname value = request.resolver_match.kwargs['object_id'] new_value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'action': 'change', 'value': str(value), 'obj': str(obj), 'new_value': str(new_value), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) opts = self.model._meta preserved_filters = self.get_preserved_filters(request) msg_dict = { 'name': opts.verbose_name, 'obj': format_html('<a href="{}">{}</a>', urlquote(request.path), obj), } if "_continue" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_saveasnew" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(obj.pk,), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_add' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was changed successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_change(request, obj) def _response_post_save(self, request, obj): opts = self.model._meta if self.has_view_or_change_permission(request): post_url = reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def response_post_save_add(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when adding a new object. """ return self._response_post_save(request, obj) def response_post_save_change(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when editing an existing object. """ return self._response_post_save(request, obj) def response_action(self, request, queryset): """ Handle an admin action. This is called if a request is POSTed to the changelist; it returns an HttpResponse if the action was handled, and None otherwise. """ # There can be multiple action forms on the page (at the top # and bottom of the change list, for example). Get the action # whose button was pushed. try: action_index = int(request.POST.get('index', 0)) except ValueError: action_index = 0 # Construct the action form. data = request.POST.copy() data.pop(helpers.ACTION_CHECKBOX_NAME, None) data.pop("index", None) # Use the action whose button was pushed try: data.update({'action': data.getlist('action')[action_index]}) except IndexError: # If we didn't get an action from the chosen form that's invalid # POST data, so by deleting action it'll fail the validation check # below. So no need to do anything here pass action_form = self.action_form(data, auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) # If the form's valid we can handle the action. if action_form.is_valid(): action = action_form.cleaned_data['action'] select_across = action_form.cleaned_data['select_across'] func = self.get_actions(request)[action][0] # Get the list of selected PKs. If nothing's selected, we can't # perform an action on it, so bail. Except we want to perform # the action explicitly on all objects. selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) if not selected and not select_across: # Reminder that something needs to be selected or nothing will happen msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) return None if not select_across: # Perform the action only on the selected objects queryset = queryset.filter(pk__in=selected) response = func(self, request, queryset) # Actions may return an HttpResponse-like object, which will be # used as the response from the POST. If not, we'll be a good # little HTTP citizen and redirect back to the changelist page. if isinstance(response, HttpResponseBase): return response else: return HttpResponseRedirect(request.get_full_path()) else: msg = _("No action selected.") self.message_user(request, msg, messages.WARNING) return None def response_delete(self, request, obj_display, obj_id): """ Determine the HttpResponse for the delete_view stage. """ opts = self.model._meta if IS_POPUP_VAR in request.POST: popup_response_data = json.dumps({ 'action': 'delete', 'value': str(obj_id), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) self.message_user( request, _('The %(name)s “%(obj)s” was deleted successfully.') % { 'name': opts.verbose_name, 'obj': obj_display, }, messages.SUCCESS, ) if self.has_change_permission(request, None): post_url = reverse( 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name, ) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url ) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def render_delete_form(self, request, context): opts = self.model._meta app_label = opts.app_label request.current_app = self.admin_site.name context.update( to_field_var=TO_FIELD_VAR, is_popup_var=IS_POPUP_VAR, media=self.media, ) return TemplateResponse( request, self.delete_confirmation_template or [ "admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name), "admin/{}/delete_confirmation.html".format(app_label), "admin/delete_confirmation.html", ], context, ) def get_inline_formsets(self, request, formsets, inline_instances, obj=None): # Edit permissions on parent model are required for editable inlines. can_edit_parent = self.has_change_permission(request, obj) if obj else self.has_add_permission(request) inline_admin_formsets = [] for inline, formset in zip(inline_instances, formsets): fieldsets = list(inline.get_fieldsets(request, obj)) readonly = list(inline.get_readonly_fields(request, obj)) if can_edit_parent: has_add_permission = inline.has_add_permission(request, obj) has_change_permission = inline.has_change_permission(request, obj) has_delete_permission = inline.has_delete_permission(request, obj) else: # Disable all edit-permissions, and overide formset settings. has_add_permission = has_change_permission = has_delete_permission = False formset.extra = formset.max_num = 0 has_view_permission = inline.has_view_permission(request, obj) prepopulated = dict(inline.get_prepopulated_fields(request, obj)) inline_admin_formset = helpers.InlineAdminFormSet( inline, formset, fieldsets, prepopulated, readonly, model_admin=self, has_add_permission=has_add_permission, has_change_permission=has_change_permission, has_delete_permission=has_delete_permission, has_view_permission=has_view_permission, ) inline_admin_formsets.append(inline_admin_formset) return inline_admin_formsets def get_changeform_initial_data(self, request): """ Get the initial form data from the request's GET params. """ initial = dict(request.GET.items()) for k in initial: try: f = self.model._meta.get_field(k) except FieldDoesNotExist: continue # We have to special-case M2Ms as a list of comma-separated PKs. if isinstance(f, models.ManyToManyField): initial[k] = initial[k].split(",") return initial def _get_obj_does_not_exist_redirect(self, request, opts, object_id): """ Create a message informing the user that the object doesn't exist and return a redirect to the admin index page. """ msg = _('%(name)s with ID “%(key)s” doesn’t exist. Perhaps it was deleted?') % { 'name': opts.verbose_name, 'key': unquote(object_id), } self.message_user(request, msg, messages.WARNING) url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(url) @csrf_protect_m def changeform_view(self, request, object_id=None, form_url='', extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._changeform_view(request, object_id, form_url, extra_context) def _changeform_view(self, request, object_id, form_url, extra_context): to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) model = self.model opts = model._meta if request.method == 'POST' and '_saveasnew' in request.POST: object_id = None add = object_id is None if add: if not self.has_add_permission(request): raise PermissionDenied obj = None else: obj = self.get_object(request, unquote(object_id), to_field) if request.method == 'POST': if not self.has_change_permission(request, obj): raise PermissionDenied else: if not self.has_view_or_change_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) fieldsets = self.get_fieldsets(request, obj) ModelForm = self.get_form( request, obj, change=not add, fields=flatten_fieldsets(fieldsets) ) if request.method == 'POST': form = ModelForm(request.POST, request.FILES, instance=obj) formsets, inline_instances = self._create_formsets( request, form.instance if add else obj, change=not add, ) form_validated = form.is_valid() if form_validated: new_object = self.save_form(request, form, change=not add) else: new_object = form.instance if all_valid(formsets) and form_validated: self.save_model(request, new_object, form, not add) self.save_related(request, form, formsets, not add) change_message = self.construct_change_message(request, form, formsets, add) if add: self.log_addition(request, new_object, change_message) return self.response_add(request, new_object) else: self.log_change(request, new_object, change_message) return self.response_change(request, new_object) else: form_validated = False else: if add: initial = self.get_changeform_initial_data(request) form = ModelForm(initial=initial) formsets, inline_instances = self._create_formsets(request, form.instance, change=False) else: form = ModelForm(instance=obj) formsets, inline_instances = self._create_formsets(request, obj, change=True) if not add and not self.has_change_permission(request, obj): readonly_fields = flatten_fieldsets(fieldsets) else: readonly_fields = self.get_readonly_fields(request, obj) adminForm = helpers.AdminForm( form, list(fieldsets), # Clear prepopulated fields on a view-only form to avoid a crash. self.get_prepopulated_fields(request, obj) if add or self.has_change_permission(request, obj) else {}, readonly_fields, model_admin=self) media = self.media + adminForm.media inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj) for inline_formset in inline_formsets: media = media + inline_formset.media if add: title = _('Add %s') elif self.has_change_permission(request, obj): title = _('Change %s') else: title = _('View %s') context = { **self.admin_site.each_context(request), 'title': title % opts.verbose_name, 'subtitle': str(obj) if obj else None, 'adminform': adminForm, 'object_id': object_id, 'original': obj, 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, 'media': media, 'inline_admin_formsets': inline_formsets, 'errors': helpers.AdminErrorList(form, formsets), 'preserved_filters': self.get_preserved_filters(request), } # Hide the "Save" and "Save and continue" buttons if "Save as New" was # previously chosen to prevent the interface from getting confusing. if request.method == 'POST' and not form_validated and "_saveasnew" in request.POST: context['show_save'] = False context['show_save_and_continue'] = False # Use the change template instead of the add template. add = False context.update(extra_context or {}) return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url) def add_view(self, request, form_url='', extra_context=None): return self.changeform_view(request, None, form_url, extra_context) def change_view(self, request, object_id, form_url='', extra_context=None): return self.changeform_view(request, object_id, form_url, extra_context) def _get_edited_object_pks(self, request, prefix): """Return POST data values of list_editable primary keys.""" pk_pattern = re.compile( r'{}-\d+-{}$'.format(re.escape(prefix), self.model._meta.pk.name) ) return [value for key, value in request.POST.items() if pk_pattern.match(key)] def _get_list_editable_queryset(self, request, prefix): """ Based on POST data, return a queryset of the objects that were edited via list_editable. """ object_pks = self._get_edited_object_pks(request, prefix) queryset = self.get_queryset(request) validate = queryset.model._meta.pk.to_python try: for pk in object_pks: validate(pk) except ValidationError: # Disable the optimization if the POST data was tampered with. return queryset return queryset.filter(pk__in=object_pks) @csrf_protect_m def changelist_view(self, request, extra_context=None): """ The 'change list' admin view for this model. """ from django.contrib.admin.views.main import ERROR_FLAG opts = self.model._meta app_label = opts.app_label if not self.has_view_or_change_permission(request): raise PermissionDenied try: cl = self.get_changelist_instance(request) except IncorrectLookupParameters: # Wacky lookup parameters were given, so redirect to the main # changelist page, without parameters, and pass an 'invalid=1' # parameter via the query string. If wacky parameters were given # and the 'invalid=1' parameter was already in the query string, # something is screwed up with the database, so display an error # page. if ERROR_FLAG in request.GET: return SimpleTemplateResponse('admin/invalid_setup.html', { 'title': _('Database error'), }) return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1') # If the request was POSTed, this might be a bulk action or a bulk # edit. Try to look up an action or confirmation first, but if this # isn't an action the POST will fall through to the bulk edit check, # below. action_failed = False selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) actions = self.get_actions(request) # Actions with no confirmation if (actions and request.method == 'POST' and 'index' in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True else: msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) action_failed = True # Actions with confirmation if (actions and request.method == 'POST' and helpers.ACTION_CHECKBOX_NAME in request.POST and 'index' not in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True if action_failed: # Redirect back to the changelist page to avoid resubmitting the # form if the user refreshes the browser or uses the "No, take # me back" button on the action confirmation page. return HttpResponseRedirect(request.get_full_path()) # If we're allowing changelist editing, we need to construct a formset # for the changelist given all the fields to be edited. Then we'll # use the formset to validate/process POSTed data. formset = cl.formset = None # Handle POSTed bulk-edit data. if request.method == 'POST' and cl.list_editable and '_save' in request.POST: if not self.has_change_permission(request): raise PermissionDenied FormSet = self.get_changelist_formset(request) modified_objects = self._get_list_editable_queryset(request, FormSet.get_default_prefix()) formset = cl.formset = FormSet(request.POST, request.FILES, queryset=modified_objects) if formset.is_valid(): changecount = 0 for form in formset.forms: if form.has_changed(): obj = self.save_form(request, form, change=True) self.save_model(request, obj, form, change=True) self.save_related(request, form, formsets=[], change=True) change_msg = self.construct_change_message(request, form, None) self.log_change(request, obj, change_msg) changecount += 1 if changecount: msg = ngettext( "%(count)s %(name)s was changed successfully.", "%(count)s %(name)s were changed successfully.", changecount ) % { 'count': changecount, 'name': model_ngettext(opts, changecount), } self.message_user(request, msg, messages.SUCCESS) return HttpResponseRedirect(request.get_full_path()) # Handle GET -- construct a formset for display. elif cl.list_editable and self.has_change_permission(request): FormSet = self.get_changelist_formset(request) formset = cl.formset = FormSet(queryset=cl.result_list) # Build the list of media to be used by the formset. if formset: media = self.media + formset.media else: media = self.media # Build the action form and populate it with available actions. if actions: action_form = self.action_form(auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) media += action_form.media else: action_form = None selection_note_all = ngettext( '%(total_count)s selected', 'All %(total_count)s selected', cl.result_count ) context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)}, 'selection_note_all': selection_note_all % {'total_count': cl.result_count}, 'title': cl.title, 'subtitle': None, 'is_popup': cl.is_popup, 'to_field': cl.to_field, 'cl': cl, 'media': media, 'has_add_permission': self.has_add_permission(request), 'opts': cl.opts, 'action_form': action_form, 'actions_on_top': self.actions_on_top, 'actions_on_bottom': self.actions_on_bottom, 'actions_selection_counter': self.actions_selection_counter, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.change_list_template or [ 'admin/%s/%s/change_list.html' % (app_label, opts.model_name), 'admin/%s/change_list.html' % app_label, 'admin/change_list.html' ], context) def get_deleted_objects(self, objs, request): """ Hook for customizing the delete process for the delete view and the "delete selected" action. """ return get_deleted_objects(objs, request, self.admin_site) @csrf_protect_m def delete_view(self, request, object_id, extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._delete_view(request, object_id, extra_context) def _delete_view(self, request, object_id, extra_context): "The 'delete' admin view for this model." opts = self.model._meta app_label = opts.app_label to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) obj = self.get_object(request, unquote(object_id), to_field) if not self.has_delete_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) # Populate deleted_objects, a data structure of all related objects that # will also be deleted. deleted_objects, model_count, perms_needed, protected = self.get_deleted_objects([obj], request) if request.POST and not protected: # The user has confirmed the deletion. if perms_needed: raise PermissionDenied obj_display = str(obj) attr = str(to_field) if to_field else opts.pk.attname obj_id = obj.serializable_value(attr) self.log_deletion(request, obj, obj_display) self.delete_model(request, obj) return self.response_delete(request, obj_display, obj_id) object_name = str(opts.verbose_name) if perms_needed or protected: title = _("Cannot delete %(name)s") % {"name": object_name} else: title = _("Are you sure?") context = { **self.admin_site.each_context(request), 'title': title, 'subtitle': None, 'object_name': object_name, 'object': obj, 'deleted_objects': deleted_objects, 'model_count': dict(model_count).items(), 'perms_lacking': perms_needed, 'protected': protected, 'opts': opts, 'app_label': app_label, 'preserved_filters': self.get_preserved_filters(request), 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, **(extra_context or {}), } return self.render_delete_form(request, context) def history_view(self, request, object_id, extra_context=None): "The 'history' admin view for this model." from django.contrib.admin.models import LogEntry from django.contrib.admin.views.main import PAGE_VAR # First check if the user can see this history. model = self.model obj = self.get_object(request, unquote(object_id)) if obj is None: return self._get_obj_does_not_exist_redirect(request, model._meta, object_id) if not self.has_view_or_change_permission(request, obj): raise PermissionDenied # Then get the history for this object. opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( object_id=unquote(object_id), content_type=get_content_type_for_model(model) ).select_related().order_by('action_time') paginator = self.get_paginator(request, action_list, 100) page_number = request.GET.get(PAGE_VAR, 1) page_obj = paginator.get_page(page_number) page_range = paginator.get_elided_page_range(page_obj.number) context = { **self.admin_site.each_context(request), 'title': _('Change history: %s') % obj, 'subtitle': None, 'action_list': page_obj, 'page_range': page_range, 'page_var': PAGE_VAR, 'pagination_required': paginator.count > 100, 'module_name': str(capfirst(opts.verbose_name_plural)), 'object': obj, 'opts': opts, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.object_history_template or [ "admin/%s/%s/object_history.html" % (app_label, opts.model_name), "admin/%s/object_history.html" % app_label, "admin/object_history.html" ], context) def get_formset_kwargs(self, request, obj, inline, prefix): formset_params = { 'instance': obj, 'prefix': prefix, 'queryset': inline.get_queryset(request), } if request.method == 'POST': formset_params.update({ 'data': request.POST.copy(), 'files': request.FILES, 'save_as_new': '_saveasnew' in request.POST }) return formset_params def _create_formsets(self, request, obj, change): "Helper function to generate formsets for add/change_view." formsets = [] inline_instances = [] prefixes = {} get_formsets_args = [request] if change: get_formsets_args.append(obj) for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args): prefix = FormSet.get_default_prefix() prefixes[prefix] = prefixes.get(prefix, 0) + 1 if prefixes[prefix] != 1 or not prefix: prefix = "%s-%s" % (prefix, prefixes[prefix]) formset_params = self.get_formset_kwargs(request, obj, inline, prefix) formset = FormSet(**formset_params) def user_deleted_form(request, obj, formset, index): """Return whether or not the user deleted the form.""" return ( inline.has_delete_permission(request, obj) and '{}-{}-DELETE'.format(formset.prefix, index) in request.POST ) # Bypass validation of each view-only inline form (since the form's # data won't be in request.POST), unless the form was deleted. if not inline.has_change_permission(request, obj if change else None): for index, form in enumerate(formset.initial_forms): if user_deleted_form(request, obj, formset, index): continue form._errors = {} form.cleaned_data = form.initial formsets.append(formset) inline_instances.append(inline) return formsets, inline_instances class InlineModelAdmin(BaseModelAdmin): """ Options for inline editing of ``model`` instances. Provide ``fk_name`` to specify the attribute name of the ``ForeignKey`` from ``model`` to its parent. This is required if ``model`` has more than one ``ForeignKey`` to its parent. """ model = None fk_name = None formset = BaseInlineFormSet extra = 3 min_num = None max_num = None template = None verbose_name = None verbose_name_plural = None can_delete = True show_change_link = False checks_class = InlineModelAdminChecks classes = None def __init__(self, parent_model, admin_site): self.admin_site = admin_site self.parent_model = parent_model self.opts = self.model._meta self.has_registered_model = admin_site.is_registered(self.model) super().__init__() if self.verbose_name_plural is None: if self.verbose_name is None: self.verbose_name_plural = self.model._meta.verbose_name_plural else: self.verbose_name_plural = format_lazy('{}s', self.verbose_name) if self.verbose_name is None: self.verbose_name = self.model._meta.verbose_name @property def media(self): extra = '' if settings.DEBUG else '.min' js = ['vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'inlines.js'] if self.filter_vertical or self.filter_horizontal: js.extend(['SelectBox.js', 'SelectFilter2.js']) if self.classes and 'collapse' in self.classes: js.append('collapse.js') return forms.Media(js=['admin/js/%s' % url for url in js]) def get_extra(self, request, obj=None, **kwargs): """Hook for customizing the number of extra inline forms.""" return self.extra def get_min_num(self, request, obj=None, **kwargs): """Hook for customizing the min number of inline forms.""" return self.min_num def get_max_num(self, request, obj=None, **kwargs): """Hook for customizing the max number of extra inline forms.""" return self.max_num def get_formset(self, request, obj=None, **kwargs): """Return a BaseInlineFormSet class for use in admin add/change views.""" if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) exclude.extend(self.get_readonly_fields(request, obj)) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # InlineModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # If exclude is an empty list we use None, since that's the actual # default. exclude = exclude or None can_delete = self.can_delete and self.has_delete_permission(request, obj) defaults = { 'form': self.form, 'formset': self.formset, 'fk_name': self.fk_name, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), 'extra': self.get_extra(request, obj, **kwargs), 'min_num': self.get_min_num(request, obj, **kwargs), 'max_num': self.get_max_num(request, obj, **kwargs), 'can_delete': can_delete, **kwargs, } base_model_form = defaults['form'] can_change = self.has_change_permission(request, obj) if request else True can_add = self.has_add_permission(request, obj) if request else True class DeleteProtectedModelForm(base_model_form): def hand_clean_DELETE(self): """ We don't validate the 'DELETE' field itself because on templates it's not rendered using the field information, but just using a generic "deletion_field" of the InlineModelAdmin. """ if self.cleaned_data.get(DELETION_FIELD_NAME, False): using = router.db_for_write(self._meta.model) collector = NestedObjects(using=using) if self.instance._state.adding: return collector.collect([self.instance]) if collector.protected: objs = [] for p in collector.protected: objs.append( # Translators: Model verbose name and instance representation, # suitable to be an item in a list. _('%(class_name)s %(instance)s') % { 'class_name': p._meta.verbose_name, 'instance': p} ) params = { 'class_name': self._meta.model._meta.verbose_name, 'instance': self.instance, 'related_objects': get_text_list(objs, _('and')), } msg = _("Deleting %(class_name)s %(instance)s would require " "deleting the following protected related objects: " "%(related_objects)s") raise ValidationError(msg, code='deleting_protected', params=params) def is_valid(self): result = super().is_valid() self.hand_clean_DELETE() return result def has_changed(self): # Protect against unauthorized edits. if not can_change and not self.instance._state.adding: return False if not can_add and self.instance._state.adding: return False return super().has_changed() defaults['form'] = DeleteProtectedModelForm if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS return inlineformset_factory(self.parent_model, self.model, **defaults) def _get_form_for_get_fields(self, request, obj=None): return self.get_formset(request, obj, fields=None).form def get_queryset(self, request): queryset = super().get_queryset(request) if not self.has_view_or_change_permission(request): queryset = queryset.none() return queryset def _has_any_perms_for_target_model(self, request, perms): """ This method is called only when the ModelAdmin's model is for an ManyToManyField's implicit through model (if self.opts.auto_created). Return True if the user has any of the given permissions ('add', 'change', etc.) for the model that points to the through model. """ opts = self.opts # Find the target model of an auto-created many-to-many relationship. for field in opts.fields: if field.remote_field and field.remote_field.model != self.parent_model: opts = field.remote_field.model._meta break return any( request.user.has_perm('%s.%s' % (opts.app_label, get_permission_codename(perm, opts))) for perm in perms ) def has_add_permission(self, request, obj): if self.opts.auto_created: # Auto-created intermediate models don't have their own # permissions. The user needs to have the change permission for the # related model in order to be able to do anything with the # intermediate model. return self._has_any_perms_for_target_model(request, ['change']) return super().has_add_permission(request) def has_change_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_change_permission(request) def has_delete_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_delete_permission(request, obj) def has_view_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). The 'change' permission # also implies the 'view' permission. return self._has_any_perms_for_target_model(request, ['view', 'change']) return super().has_view_permission(request) class StackedInline(InlineModelAdmin): template = 'admin/edit_inline/stacked.html' class TabularInline(InlineModelAdmin): template = 'admin/edit_inline/tabular.html'
177aace9ae1eaf81b2d81ed8eeab49e37f5e64512f728f4fb3ef21d9fd3066b9
import re from functools import update_wrapper from weakref import WeakSet from django.apps import apps from django.conf import settings from django.contrib.admin import ModelAdmin, actions from django.contrib.admin.views.autocomplete import AutocompleteJsonView from django.contrib.auth import REDIRECT_FIELD_NAME from django.core.exceptions import ImproperlyConfigured from django.db.models.base import ModelBase from django.http import ( Http404, HttpResponsePermanentRedirect, HttpResponseRedirect, ) from django.template.response import TemplateResponse from django.urls import NoReverseMatch, Resolver404, resolve, reverse from django.utils.decorators import method_decorator from django.utils.functional import LazyObject from django.utils.module_loading import import_string from django.utils.text import capfirst from django.utils.translation import gettext as _, gettext_lazy from django.views.decorators.cache import never_cache from django.views.decorators.common import no_append_slash from django.views.decorators.csrf import csrf_protect from django.views.i18n import JavaScriptCatalog all_sites = WeakSet() class AlreadyRegistered(Exception): pass class NotRegistered(Exception): pass class AdminSite: """ An AdminSite object encapsulates an instance of the Django admin application, ready to be hooked in to your URLconf. Models are registered with the AdminSite using the register() method, and the get_urls() method can then be used to access Django view functions that present a full admin interface for the collection of registered models. """ # Text to put at the end of each page's <title>. site_title = gettext_lazy('Django site admin') # Text to put in each page's <h1>. site_header = gettext_lazy('Django administration') # Text to put at the top of the admin index page. index_title = gettext_lazy('Site administration') # URL for the "View site" link at the top of each admin page. site_url = '/' enable_nav_sidebar = True empty_value_display = '-' login_form = None index_template = None app_index_template = None login_template = None logout_template = None password_change_template = None password_change_done_template = None final_catch_all_view = True def __init__(self, name='admin'): self._registry = {} # model_class class -> admin_class instance self.name = name self._actions = {'delete_selected': actions.delete_selected} self._global_actions = self._actions.copy() all_sites.add(self) def __repr__(self): return f'{self.__class__.__name__}(name={self.name!r})' def check(self, app_configs): """ Run the system checks on all ModelAdmins, except if they aren't customized at all. """ if app_configs is None: app_configs = apps.get_app_configs() app_configs = set(app_configs) # Speed up lookups below errors = [] modeladmins = (o for o in self._registry.values() if o.__class__ is not ModelAdmin) for modeladmin in modeladmins: if modeladmin.model._meta.app_config in app_configs: errors.extend(modeladmin.check()) return errors def register(self, model_or_iterable, admin_class=None, **options): """ Register the given model(s) with the given admin class. The model(s) should be Model classes, not instances. If an admin class isn't given, use ModelAdmin (the default admin options). If keyword arguments are given -- e.g., list_display -- apply them as options to the admin class. If a model is already registered, raise AlreadyRegistered. If a model is abstract, raise ImproperlyConfigured. """ admin_class = admin_class or ModelAdmin if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model._meta.abstract: raise ImproperlyConfigured( 'The model %s is abstract, so it cannot be registered with admin.' % model.__name__ ) if model in self._registry: registered_admin = str(self._registry[model]) msg = 'The model %s is already registered ' % model.__name__ if registered_admin.endswith('.ModelAdmin'): # Most likely registered without a ModelAdmin subclass. msg += 'in app %r.' % re.sub(r'\.ModelAdmin$', '', registered_admin) else: msg += 'with %r.' % registered_admin raise AlreadyRegistered(msg) # Ignore the registration if the model has been # swapped out. if not model._meta.swapped: # If we got **options then dynamically construct a subclass of # admin_class with those **options. if options: # For reasons I don't quite understand, without a __module__ # the created class appears to "live" in the wrong place, # which causes issues later on. options['__module__'] = __name__ admin_class = type("%sAdmin" % model.__name__, (admin_class,), options) # Instantiate the admin class to save in the registry self._registry[model] = admin_class(model, self) def unregister(self, model_or_iterable): """ Unregister the given model(s). If a model isn't already registered, raise NotRegistered. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model not in self._registry: raise NotRegistered('The model %s is not registered' % model.__name__) del self._registry[model] def is_registered(self, model): """ Check if a model class is registered with this `AdminSite`. """ return model in self._registry def add_action(self, action, name=None): """ Register an action to be available globally. """ name = name or action.__name__ self._actions[name] = action self._global_actions[name] = action def disable_action(self, name): """ Disable a globally-registered action. Raise KeyError for invalid names. """ del self._actions[name] def get_action(self, name): """ Explicitly get a registered global action whether it's enabled or not. Raise KeyError for invalid names. """ return self._global_actions[name] @property def actions(self): """ Get all the enabled actions as an iterable of (name, func). """ return self._actions.items() def has_permission(self, request): """ Return True if the given HttpRequest has permission to view *at least one* page in the admin site. """ return request.user.is_active and request.user.is_staff def admin_view(self, view, cacheable=False): """ Decorator to create an admin view attached to this ``AdminSite``. This wraps the view and provides permission checking by calling ``self.has_permission``. You'll want to use this from within ``AdminSite.get_urls()``: class MyAdminSite(AdminSite): def get_urls(self): from django.urls import path urls = super().get_urls() urls += [ path('my_view/', self.admin_view(some_view)) ] return urls By default, admin_views are marked non-cacheable using the ``never_cache`` decorator. If the view can be safely cached, set cacheable=True. """ def inner(request, *args, **kwargs): if not self.has_permission(request): if request.path == reverse('admin:logout', current_app=self.name): index_path = reverse('admin:index', current_app=self.name) return HttpResponseRedirect(index_path) # Inner import to prevent django.contrib.admin (app) from # importing django.contrib.auth.models.User (unrelated model). from django.contrib.auth.views import redirect_to_login return redirect_to_login( request.get_full_path(), reverse('admin:login', current_app=self.name) ) return view(request, *args, **kwargs) if not cacheable: inner = never_cache(inner) # We add csrf_protect here so this function can be used as a utility # function for any view, without having to repeat 'csrf_protect'. if not getattr(view, 'csrf_exempt', False): inner = csrf_protect(inner) return update_wrapper(inner, view) def get_urls(self): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level, # and django.contrib.contenttypes.views imports ContentType. from django.contrib.contenttypes import views as contenttype_views from django.urls import include, path, re_path def wrap(view, cacheable=False): def wrapper(*args, **kwargs): return self.admin_view(view, cacheable)(*args, **kwargs) wrapper.admin_site = self return update_wrapper(wrapper, view) # Admin-site-wide views. urlpatterns = [ path('', wrap(self.index), name='index'), path('login/', self.login, name='login'), path('logout/', wrap(self.logout), name='logout'), path('password_change/', wrap(self.password_change, cacheable=True), name='password_change'), path( 'password_change/done/', wrap(self.password_change_done, cacheable=True), name='password_change_done', ), path('autocomplete/', wrap(self.autocomplete_view), name='autocomplete'), path('jsi18n/', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'), path( 'r/<int:content_type_id>/<path:object_id>/', wrap(contenttype_views.shortcut), name='view_on_site', ), ] # Add in each model's views, and create a list of valid URLS for the # app_index valid_app_labels = [] for model, model_admin in self._registry.items(): urlpatterns += [ path('%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)), ] if model._meta.app_label not in valid_app_labels: valid_app_labels.append(model._meta.app_label) # If there were ModelAdmins registered, we should have a list of app # labels for which we need to allow access to the app_index view, if valid_app_labels: regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$' urlpatterns += [ re_path(regex, wrap(self.app_index), name='app_list'), ] if self.final_catch_all_view: urlpatterns.append(re_path(r'(?P<url>.*)$', wrap(self.catch_all_view))) return urlpatterns @property def urls(self): return self.get_urls(), 'admin', self.name def each_context(self, request): """ Return a dictionary of variables to put in the template context for *every* page in the admin site. For sites running on a subpath, use the SCRIPT_NAME value if site_url hasn't been customized. """ script_name = request.META['SCRIPT_NAME'] site_url = script_name if self.site_url == '/' and script_name else self.site_url return { 'site_title': self.site_title, 'site_header': self.site_header, 'site_url': site_url, 'has_permission': self.has_permission(request), 'available_apps': self.get_app_list(request), 'is_popup': False, 'is_nav_sidebar_enabled': self.enable_nav_sidebar, } def password_change(self, request, extra_context=None): """ Handle the "change password" task -- both form display and validation. """ from django.contrib.admin.forms import AdminPasswordChangeForm from django.contrib.auth.views import PasswordChangeView url = reverse('admin:password_change_done', current_app=self.name) defaults = { 'form_class': AdminPasswordChangeForm, 'success_url': url, 'extra_context': {**self.each_context(request), **(extra_context or {})}, } if self.password_change_template is not None: defaults['template_name'] = self.password_change_template request.current_app = self.name return PasswordChangeView.as_view(**defaults)(request) def password_change_done(self, request, extra_context=None): """ Display the "success" page after a password change. """ from django.contrib.auth.views import PasswordChangeDoneView defaults = { 'extra_context': {**self.each_context(request), **(extra_context or {})}, } if self.password_change_done_template is not None: defaults['template_name'] = self.password_change_done_template request.current_app = self.name return PasswordChangeDoneView.as_view(**defaults)(request) def i18n_javascript(self, request, extra_context=None): """ Display the i18n JavaScript that the Django admin requires. `extra_context` is unused but present for consistency with the other admin views. """ return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request) def logout(self, request, extra_context=None): """ Log out the user for the given HttpRequest. This should *not* assume the user is already logged in. """ from django.contrib.auth.views import LogoutView defaults = { 'extra_context': { **self.each_context(request), # Since the user isn't logged out at this point, the value of # has_permission must be overridden. 'has_permission': False, **(extra_context or {}) }, } if self.logout_template is not None: defaults['template_name'] = self.logout_template request.current_app = self.name return LogoutView.as_view(**defaults)(request) @method_decorator(never_cache) def login(self, request, extra_context=None): """ Display the login form for the given HttpRequest. """ if request.method == 'GET' and self.has_permission(request): # Already logged-in, redirect to admin index index_path = reverse('admin:index', current_app=self.name) return HttpResponseRedirect(index_path) # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level, # and django.contrib.admin.forms eventually imports User. from django.contrib.admin.forms import AdminAuthenticationForm from django.contrib.auth.views import LoginView context = { **self.each_context(request), 'title': _('Log in'), 'subtitle': None, 'app_path': request.get_full_path(), 'username': request.user.get_username(), } if (REDIRECT_FIELD_NAME not in request.GET and REDIRECT_FIELD_NAME not in request.POST): context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name) context.update(extra_context or {}) defaults = { 'extra_context': context, 'authentication_form': self.login_form or AdminAuthenticationForm, 'template_name': self.login_template or 'admin/login.html', } request.current_app = self.name return LoginView.as_view(**defaults)(request) def autocomplete_view(self, request): return AutocompleteJsonView.as_view(admin_site=self)(request) @no_append_slash def catch_all_view(self, request, url): if settings.APPEND_SLASH and not url.endswith('/'): urlconf = getattr(request, 'urlconf', None) try: match = resolve('%s/' % request.path_info, urlconf) except Resolver404: pass else: if getattr(match.func, 'should_append_slash', True): return HttpResponsePermanentRedirect('%s/' % request.path) raise Http404 def _build_app_dict(self, request, label=None): """ Build the app dictionary. The optional `label` parameter filters models of a specific app. """ app_dict = {} if label: models = { m: m_a for m, m_a in self._registry.items() if m._meta.app_label == label } else: models = self._registry for model, model_admin in models.items(): app_label = model._meta.app_label has_module_perms = model_admin.has_module_permission(request) if not has_module_perms: continue perms = model_admin.get_model_perms(request) # Check whether user has any perm for this module. # If so, add the module to the model_list. if True not in perms.values(): continue info = (app_label, model._meta.model_name) model_dict = { 'model': model, 'name': capfirst(model._meta.verbose_name_plural), 'object_name': model._meta.object_name, 'perms': perms, 'admin_url': None, 'add_url': None, } if perms.get('change') or perms.get('view'): model_dict['view_only'] = not perms.get('change') try: model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name) except NoReverseMatch: pass if perms.get('add'): try: model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name) except NoReverseMatch: pass if app_label in app_dict: app_dict[app_label]['models'].append(model_dict) else: app_dict[app_label] = { 'name': apps.get_app_config(app_label).verbose_name, 'app_label': app_label, 'app_url': reverse( 'admin:app_list', kwargs={'app_label': app_label}, current_app=self.name, ), 'has_module_perms': has_module_perms, 'models': [model_dict], } if label: return app_dict.get(label) return app_dict def get_app_list(self, request): """ Return a sorted list of all the installed apps that have been registered in this site. """ app_dict = self._build_app_dict(request) # Sort the apps alphabetically. app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower()) # Sort the models alphabetically within each app. for app in app_list: app['models'].sort(key=lambda x: x['name']) return app_list def index(self, request, extra_context=None): """ Display the main admin index page, which lists all of the installed apps that have been registered in this site. """ app_list = self.get_app_list(request) context = { **self.each_context(request), 'title': self.index_title, 'subtitle': None, 'app_list': app_list, **(extra_context or {}), } request.current_app = self.name return TemplateResponse(request, self.index_template or 'admin/index.html', context) def app_index(self, request, app_label, extra_context=None): app_dict = self._build_app_dict(request, app_label) if not app_dict: raise Http404('The requested admin page does not exist.') # Sort the models alphabetically within each app. app_dict['models'].sort(key=lambda x: x['name']) context = { **self.each_context(request), 'title': _('%(app)s administration') % {'app': app_dict['name']}, 'subtitle': None, 'app_list': [app_dict], 'app_label': app_label, **(extra_context or {}), } request.current_app = self.name return TemplateResponse(request, self.app_index_template or [ 'admin/%s/app_index.html' % app_label, 'admin/app_index.html' ], context) class DefaultAdminSite(LazyObject): def _setup(self): AdminSiteClass = import_string(apps.get_app_config('admin').default_site) self._wrapped = AdminSiteClass() def __repr__(self): return repr(self._wrapped) # This global object represents the default admin site, for the common case. # You can provide your own AdminSite using the (Simple)AdminConfig.default_site # attribute. You can also instantiate AdminSite in your own code to create a # custom admin site. site = DefaultAdminSite()
b6c3520d09624b2df2639e031b6178a754ab92d783b557bec48b7f43a4f1dba8
""" Built-in, globally-available admin actions. """ from django.contrib import messages from django.contrib.admin import helpers from django.contrib.admin.decorators import action from django.contrib.admin.utils import model_ngettext from django.core.exceptions import PermissionDenied from django.template.response import TemplateResponse from django.utils.translation import gettext as _, gettext_lazy @action( permissions=['delete'], description=gettext_lazy('Delete selected %(verbose_name_plural)s'), ) def delete_selected(modeladmin, request, queryset): """ Default action which deletes the selected objects. This action first displays a confirmation page which shows all the deletable objects, or, if the user has no permission one of the related childs (foreignkeys), a "permission denied" message. Next, it deletes all selected objects and redirects back to the change list. """ opts = modeladmin.model._meta app_label = opts.app_label # Populate deletable_objects, a data structure of all related objects that # will also be deleted. deletable_objects, model_count, perms_needed, protected = modeladmin.get_deleted_objects(queryset, request) # The user has already confirmed the deletion. # Do the deletion and return None to display the change list view again. if request.POST.get('post') and not protected: if perms_needed: raise PermissionDenied n = queryset.count() if n: for obj in queryset: obj_display = str(obj) modeladmin.log_deletion(request, obj, obj_display) modeladmin.delete_queryset(request, queryset) modeladmin.message_user(request, _("Successfully deleted %(count)d %(items)s.") % { "count": n, "items": model_ngettext(modeladmin.opts, n) }, messages.SUCCESS) # Return None to display the change list page again. return None objects_name = model_ngettext(queryset) if perms_needed or protected: title = _("Cannot delete %(name)s") % {"name": objects_name} else: title = _("Are you sure?") context = { **modeladmin.admin_site.each_context(request), 'title': title, 'subtitle': None, 'objects_name': str(objects_name), 'deletable_objects': [deletable_objects], 'model_count': dict(model_count).items(), 'queryset': queryset, 'perms_lacking': perms_needed, 'protected': protected, 'opts': opts, 'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME, 'media': modeladmin.media, } request.current_app = modeladmin.admin_site.name # Display the confirmation page return TemplateResponse(request, modeladmin.delete_selected_confirmation_template or [ "admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.model_name), "admin/%s/delete_selected_confirmation.html" % app_label, "admin/delete_selected_confirmation.html" ], context)
7ce5227758dcdb0e2f56bc048a4ce5ceab395b7528f969fbe547e8c3332a9b33
""" Form Widget classes specific to the Django admin site. """ import copy import json from django import forms from django.conf import settings from django.core.exceptions import ValidationError from django.core.validators import URLValidator from django.db.models import CASCADE, UUIDField from django.urls import reverse from django.urls.exceptions import NoReverseMatch from django.utils.html import smart_urlquote from django.utils.http import urlencode from django.utils.text import Truncator from django.utils.translation import get_language, gettext as _ class FilteredSelectMultiple(forms.SelectMultiple): """ A SelectMultiple with a JavaScript filter interface. Note that the resulting JavaScript assumes that the jsi18n catalog has been loaded in the page """ class Media: js = [ 'admin/js/core.js', 'admin/js/SelectBox.js', 'admin/js/SelectFilter2.js', ] def __init__(self, verbose_name, is_stacked, attrs=None, choices=()): self.verbose_name = verbose_name self.is_stacked = is_stacked super().__init__(attrs, choices) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) context['widget']['attrs']['class'] = 'selectfilter' if self.is_stacked: context['widget']['attrs']['class'] += 'stacked' context['widget']['attrs']['data-field-name'] = self.verbose_name context['widget']['attrs']['data-is-stacked'] = int(self.is_stacked) return context class AdminDateWidget(forms.DateInput): class Media: js = [ 'admin/js/calendar.js', 'admin/js/admin/DateTimeShortcuts.js', ] def __init__(self, attrs=None, format=None): attrs = {'class': 'vDateField', 'size': '10', **(attrs or {})} super().__init__(attrs=attrs, format=format) class AdminTimeWidget(forms.TimeInput): class Media: js = [ 'admin/js/calendar.js', 'admin/js/admin/DateTimeShortcuts.js', ] def __init__(self, attrs=None, format=None): attrs = {'class': 'vTimeField', 'size': '8', **(attrs or {})} super().__init__(attrs=attrs, format=format) class AdminSplitDateTime(forms.SplitDateTimeWidget): """ A SplitDateTime Widget that has some admin-specific styling. """ template_name = 'admin/widgets/split_datetime.html' def __init__(self, attrs=None): widgets = [AdminDateWidget, AdminTimeWidget] # Note that we're calling MultiWidget, not SplitDateTimeWidget, because # we want to define widgets. forms.MultiWidget.__init__(self, widgets, attrs) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) context['date_label'] = _('Date:') context['time_label'] = _('Time:') return context class AdminRadioSelect(forms.RadioSelect): template_name = 'admin/widgets/radio.html' class AdminFileWidget(forms.ClearableFileInput): template_name = 'admin/widgets/clearable_file_input.html' def url_params_from_lookup_dict(lookups): """ Convert the type of lookups specified in a ForeignKey limit_choices_to attribute to a dictionary of query parameters """ params = {} if lookups and hasattr(lookups, 'items'): for k, v in lookups.items(): if callable(v): v = v() if isinstance(v, (tuple, list)): v = ','.join(str(x) for x in v) elif isinstance(v, bool): v = ('0', '1')[v] else: v = str(v) params[k] = v return params class ForeignKeyRawIdWidget(forms.TextInput): """ A Widget for displaying ForeignKeys in the "raw_id" interface rather than in a <select> box. """ template_name = 'admin/widgets/foreign_key_raw_id.html' def __init__(self, rel, admin_site, attrs=None, using=None): self.rel = rel self.admin_site = admin_site self.db = using super().__init__(attrs) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) rel_to = self.rel.model if rel_to in self.admin_site._registry: # The related object is registered with the same AdminSite related_url = reverse( 'admin:%s_%s_changelist' % ( rel_to._meta.app_label, rel_to._meta.model_name, ), current_app=self.admin_site.name, ) params = self.url_parameters() if params: related_url += '?' + urlencode(params) context['related_url'] = related_url context['link_title'] = _('Lookup') # The JavaScript code looks for this class. css_class = 'vForeignKeyRawIdAdminField' if isinstance(self.rel.get_related_field(), UUIDField): css_class += ' vUUIDField' context['widget']['attrs'].setdefault('class', css_class) else: context['related_url'] = None if context['widget']['value']: context['link_label'], context['link_url'] = self.label_and_url_for_value(value) else: context['link_label'] = None return context def base_url_parameters(self): limit_choices_to = self.rel.limit_choices_to if callable(limit_choices_to): limit_choices_to = limit_choices_to() return url_params_from_lookup_dict(limit_choices_to) def url_parameters(self): from django.contrib.admin.views.main import TO_FIELD_VAR params = self.base_url_parameters() params.update({TO_FIELD_VAR: self.rel.get_related_field().name}) return params def label_and_url_for_value(self, value): key = self.rel.get_related_field().name try: obj = self.rel.model._default_manager.using(self.db).get(**{key: value}) except (ValueError, self.rel.model.DoesNotExist, ValidationError): return '', '' try: url = reverse( '%s:%s_%s_change' % ( self.admin_site.name, obj._meta.app_label, obj._meta.object_name.lower(), ), args=(obj.pk,) ) except NoReverseMatch: url = '' # Admin not registered for target model. return Truncator(obj).words(14), url class ManyToManyRawIdWidget(ForeignKeyRawIdWidget): """ A Widget for displaying ManyToMany ids in the "raw_id" interface rather than in a <select multiple> box. """ template_name = 'admin/widgets/many_to_many_raw_id.html' def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) if self.rel.model in self.admin_site._registry: # The related object is registered with the same AdminSite context['widget']['attrs']['class'] = 'vManyToManyRawIdAdminField' return context def url_parameters(self): return self.base_url_parameters() def label_and_url_for_value(self, value): return '', '' def value_from_datadict(self, data, files, name): value = data.get(name) if value: return value.split(',') def format_value(self, value): return ','.join(str(v) for v in value) if value else '' class RelatedFieldWidgetWrapper(forms.Widget): """ This class is a wrapper to a given widget to add the add icon for the admin interface. """ template_name = 'admin/widgets/related_widget_wrapper.html' def __init__(self, widget, rel, admin_site, can_add_related=None, can_change_related=False, can_delete_related=False, can_view_related=False): self.needs_multipart_form = widget.needs_multipart_form self.attrs = widget.attrs self.choices = widget.choices self.widget = widget self.rel = rel # Backwards compatible check for whether a user can add related # objects. if can_add_related is None: can_add_related = rel.model in admin_site._registry self.can_add_related = can_add_related # XXX: The UX does not support multiple selected values. multiple = getattr(widget, 'allow_multiple_selected', False) self.can_change_related = not multiple and can_change_related # XXX: The deletion UX can be confusing when dealing with cascading deletion. cascade = getattr(rel, 'on_delete', None) is CASCADE self.can_delete_related = not multiple and not cascade and can_delete_related self.can_view_related = not multiple and can_view_related # so we can check if the related object is registered with this AdminSite self.admin_site = admin_site def __deepcopy__(self, memo): obj = copy.copy(self) obj.widget = copy.deepcopy(self.widget, memo) obj.attrs = self.widget.attrs memo[id(self)] = obj return obj @property def is_hidden(self): return self.widget.is_hidden @property def media(self): return self.widget.media def get_related_url(self, info, action, *args): return reverse("admin:%s_%s_%s" % (info + (action,)), current_app=self.admin_site.name, args=args) def get_context(self, name, value, attrs): from django.contrib.admin.views.main import IS_POPUP_VAR, TO_FIELD_VAR rel_opts = self.rel.model._meta info = (rel_opts.app_label, rel_opts.model_name) self.widget.choices = self.choices url_params = '&'.join("%s=%s" % param for param in [ (TO_FIELD_VAR, self.rel.get_related_field().name), (IS_POPUP_VAR, 1), ]) context = { 'rendered_widget': self.widget.render(name, value, attrs), 'is_hidden': self.is_hidden, 'name': name, 'url_params': url_params, 'model': rel_opts.verbose_name, 'can_add_related': self.can_add_related, 'can_change_related': self.can_change_related, 'can_delete_related': self.can_delete_related, 'can_view_related': self.can_view_related, } if self.can_add_related: context['add_related_url'] = self.get_related_url(info, 'add') if self.can_delete_related: context['delete_related_template_url'] = self.get_related_url(info, 'delete', '__fk__') if self.can_view_related or self.can_change_related: context['change_related_template_url'] = self.get_related_url(info, 'change', '__fk__') return context def value_from_datadict(self, data, files, name): return self.widget.value_from_datadict(data, files, name) def value_omitted_from_data(self, data, files, name): return self.widget.value_omitted_from_data(data, files, name) def id_for_label(self, id_): return self.widget.id_for_label(id_) class AdminTextareaWidget(forms.Textarea): def __init__(self, attrs=None): super().__init__(attrs={'class': 'vLargeTextField', **(attrs or {})}) class AdminTextInputWidget(forms.TextInput): def __init__(self, attrs=None): super().__init__(attrs={'class': 'vTextField', **(attrs or {})}) class AdminEmailInputWidget(forms.EmailInput): def __init__(self, attrs=None): super().__init__(attrs={'class': 'vTextField', **(attrs or {})}) class AdminURLFieldWidget(forms.URLInput): template_name = 'admin/widgets/url.html' def __init__(self, attrs=None, validator_class=URLValidator): super().__init__(attrs={'class': 'vURLField', **(attrs or {})}) self.validator = validator_class() def get_context(self, name, value, attrs): try: self.validator(value if value else '') url_valid = True except ValidationError: url_valid = False context = super().get_context(name, value, attrs) context['current_label'] = _('Currently:') context['change_label'] = _('Change:') context['widget']['href'] = smart_urlquote(context['widget']['value']) if value else '' context['url_valid'] = url_valid return context class AdminIntegerFieldWidget(forms.NumberInput): class_name = 'vIntegerField' def __init__(self, attrs=None): super().__init__(attrs={'class': self.class_name, **(attrs or {})}) class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget): class_name = 'vBigIntegerField' class AdminUUIDInputWidget(forms.TextInput): def __init__(self, attrs=None): super().__init__(attrs={'class': 'vUUIDField', **(attrs or {})}) # Mapping of lowercase language codes [returned by Django's get_language()] to # language codes supported by select2. # See django/contrib/admin/static/admin/js/vendor/select2/i18n/* SELECT2_TRANSLATIONS = {x.lower(): x for x in [ 'ar', 'az', 'bg', 'ca', 'cs', 'da', 'de', 'el', 'en', 'es', 'et', 'eu', 'fa', 'fi', 'fr', 'gl', 'he', 'hi', 'hr', 'hu', 'id', 'is', 'it', 'ja', 'km', 'ko', 'lt', 'lv', 'mk', 'ms', 'nb', 'nl', 'pl', 'pt-BR', 'pt', 'ro', 'ru', 'sk', 'sr-Cyrl', 'sr', 'sv', 'th', 'tr', 'uk', 'vi', ]} SELECT2_TRANSLATIONS.update({'zh-hans': 'zh-CN', 'zh-hant': 'zh-TW'}) class AutocompleteMixin: """ Select widget mixin that loads options from AutocompleteJsonView via AJAX. Renders the necessary data attributes for select2 and adds the static form media. """ url_name = '%s:autocomplete' def __init__(self, field, admin_site, attrs=None, choices=(), using=None): self.field = field self.admin_site = admin_site self.db = using self.choices = choices self.attrs = {} if attrs is None else attrs.copy() self.i18n_name = SELECT2_TRANSLATIONS.get(get_language()) def get_url(self): return reverse(self.url_name % self.admin_site.name) def build_attrs(self, base_attrs, extra_attrs=None): """ Set select2's AJAX attributes. Attributes can be set using the html5 data attribute. Nested attributes require a double dash as per https://select2.org/configuration/data-attributes#nested-subkey-options """ attrs = super().build_attrs(base_attrs, extra_attrs=extra_attrs) attrs.setdefault('class', '') attrs.update({ 'data-ajax--cache': 'true', 'data-ajax--delay': 250, 'data-ajax--type': 'GET', 'data-ajax--url': self.get_url(), 'data-app-label': self.field.model._meta.app_label, 'data-model-name': self.field.model._meta.model_name, 'data-field-name': self.field.name, 'data-theme': 'admin-autocomplete', 'data-allow-clear': json.dumps(not self.is_required), 'data-placeholder': '', # Allows clearing of the input. 'lang': self.i18n_name, 'class': attrs['class'] + (' ' if attrs['class'] else '') + 'admin-autocomplete', }) return attrs def optgroups(self, name, value, attr=None): """Return selected options based on the ModelChoiceIterator.""" default = (None, [], 0) groups = [default] has_selected = False selected_choices = { str(v) for v in value if str(v) not in self.choices.field.empty_values } if not self.is_required and not self.allow_multiple_selected: default[1].append(self.create_option(name, '', '', False, 0)) remote_model_opts = self.field.remote_field.model._meta to_field_name = getattr(self.field.remote_field, 'field_name', remote_model_opts.pk.attname) to_field_name = remote_model_opts.get_field(to_field_name).attname choices = ( (getattr(obj, to_field_name), self.choices.field.label_from_instance(obj)) for obj in self.choices.queryset.using(self.db).filter(**{'%s__in' % to_field_name: selected_choices}) ) for option_value, option_label in choices: selected = ( str(option_value) in value and (has_selected is False or self.allow_multiple_selected) ) has_selected |= selected index = len(default[1]) subgroup = default[1] subgroup.append(self.create_option(name, option_value, option_label, selected_choices, index)) return groups @property def media(self): extra = '' if settings.DEBUG else '.min' i18n_file = ('admin/js/vendor/select2/i18n/%s.js' % self.i18n_name,) if self.i18n_name else () return forms.Media( js=( 'admin/js/vendor/jquery/jquery%s.js' % extra, 'admin/js/vendor/select2/select2.full%s.js' % extra, ) + i18n_file + ( 'admin/js/jquery.init.js', 'admin/js/autocomplete.js', ), css={ 'screen': ( 'admin/css/vendor/select2/select2%s.css' % extra, 'admin/css/autocomplete.css', ), }, ) class AutocompleteSelect(AutocompleteMixin, forms.Select): pass class AutocompleteSelectMultiple(AutocompleteMixin, forms.SelectMultiple): pass
05adfcc85c462ee7c2f5818b3503654a039ccf9b998d5ab0e392b4ee314e7210
import datetime import decimal import json from collections import defaultdict from django.core.exceptions import FieldDoesNotExist from django.db import models, router from django.db.models.constants import LOOKUP_SEP from django.db.models.deletion import Collector from django.forms.utils import pretty_name from django.urls import NoReverseMatch, reverse from django.utils import formats, timezone from django.utils.html import format_html from django.utils.regex_helper import _lazy_re_compile from django.utils.text import capfirst from django.utils.translation import ngettext, override as translation_override QUOTE_MAP = {i: '_%02X' % i for i in b'":/_#?;@&=+$,"[]<>%\n\\'} UNQUOTE_MAP = {v: chr(k) for k, v in QUOTE_MAP.items()} UNQUOTE_RE = _lazy_re_compile('_(?:%s)' % '|'.join([x[1:] for x in UNQUOTE_MAP])) class FieldIsAForeignKeyColumnName(Exception): """A field is a foreign key attname, i.e. <FK>_id.""" pass def lookup_spawns_duplicates(opts, lookup_path): """ Return True if the given lookup path spawns duplicates. """ lookup_fields = lookup_path.split(LOOKUP_SEP) # Go through the fields (following all relations) and look for an m2m. for field_name in lookup_fields: if field_name == 'pk': field_name = opts.pk.name try: field = opts.get_field(field_name) except FieldDoesNotExist: # Ignore query lookups. continue else: if hasattr(field, 'path_infos'): # This field is a relation; update opts to follow the relation. path_info = field.path_infos opts = path_info[-1].to_opts if any(path.m2m for path in path_info): # This field is a m2m relation so duplicates must be # handled. return True return False def prepare_lookup_value(key, value, separator=','): """ Return a lookup value prepared to be used in queryset filtering. """ # if key ends with __in, split parameter into separate values if key.endswith('__in'): value = value.split(separator) # if key ends with __isnull, special case '' and the string literals 'false' and '0' elif key.endswith('__isnull'): value = value.lower() not in ('', 'false', '0') return value def quote(s): """ Ensure that primary key values do not confuse the admin URLs by escaping any '/', '_' and ':' and similarly problematic characters. Similar to urllib.parse.quote(), except that the quoting is slightly different so that it doesn't get automatically unquoted by the web browser. """ return s.translate(QUOTE_MAP) if isinstance(s, str) else s def unquote(s): """Undo the effects of quote().""" return UNQUOTE_RE.sub(lambda m: UNQUOTE_MAP[m[0]], s) def flatten(fields): """ Return a list which is a single level of flattening of the original list. """ flat = [] for field in fields: if isinstance(field, (list, tuple)): flat.extend(field) else: flat.append(field) return flat def flatten_fieldsets(fieldsets): """Return a list of field names from an admin fieldsets structure.""" field_names = [] for name, opts in fieldsets: field_names.extend( flatten(opts['fields']) ) return field_names def get_deleted_objects(objs, request, admin_site): """ Find all objects related to ``objs`` that should also be deleted. ``objs`` must be a homogeneous iterable of objects (e.g. a QuerySet). Return a nested list of strings suitable for display in the template with the ``unordered_list`` filter. """ try: obj = objs[0] except IndexError: return [], {}, set(), [] else: using = router.db_for_write(obj._meta.model) collector = NestedObjects(using=using, origin=objs) collector.collect(objs) perms_needed = set() def format_callback(obj): model = obj.__class__ has_admin = model in admin_site._registry opts = obj._meta no_edit_link = '%s: %s' % (capfirst(opts.verbose_name), obj) if has_admin: if not admin_site._registry[model].has_delete_permission(request, obj): perms_needed.add(opts.verbose_name) try: admin_url = reverse('%s:%s_%s_change' % (admin_site.name, opts.app_label, opts.model_name), None, (quote(obj.pk),)) except NoReverseMatch: # Change url doesn't exist -- don't display link to edit return no_edit_link # Display a link to the admin page. return format_html('{}: <a href="{}">{}</a>', capfirst(opts.verbose_name), admin_url, obj) else: # Don't display link to edit, because it either has no # admin or is edited inline. return no_edit_link to_delete = collector.nested(format_callback) protected = [format_callback(obj) for obj in collector.protected] model_count = {model._meta.verbose_name_plural: len(objs) for model, objs in collector.model_objs.items()} return to_delete, model_count, perms_needed, protected class NestedObjects(Collector): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.edges = {} # {from_instance: [to_instances]} self.protected = set() self.model_objs = defaultdict(set) def add_edge(self, source, target): self.edges.setdefault(source, []).append(target) def collect(self, objs, source=None, source_attr=None, **kwargs): for obj in objs: if source_attr and not source_attr.endswith('+'): related_name = source_attr % { 'class': source._meta.model_name, 'app_label': source._meta.app_label, } self.add_edge(getattr(obj, related_name), obj) else: self.add_edge(None, obj) self.model_objs[obj._meta.model].add(obj) try: return super().collect(objs, source_attr=source_attr, **kwargs) except models.ProtectedError as e: self.protected.update(e.protected_objects) except models.RestrictedError as e: self.protected.update(e.restricted_objects) def related_objects(self, related_model, related_fields, objs): qs = super().related_objects(related_model, related_fields, objs) return qs.select_related(*[related_field.name for related_field in related_fields]) def _nested(self, obj, seen, format_callback): if obj in seen: return [] seen.add(obj) children = [] for child in self.edges.get(obj, ()): children.extend(self._nested(child, seen, format_callback)) if format_callback: ret = [format_callback(obj)] else: ret = [obj] if children: ret.append(children) return ret def nested(self, format_callback=None): """ Return the graph as a nested list. """ seen = set() roots = [] for root in self.edges.get(None, ()): roots.extend(self._nested(root, seen, format_callback)) return roots def can_fast_delete(self, *args, **kwargs): """ We always want to load the objects into memory so that we can display them to the user in confirm page. """ return False def model_format_dict(obj): """ Return a `dict` with keys 'verbose_name' and 'verbose_name_plural', typically for use with string formatting. `obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance. """ if isinstance(obj, (models.Model, models.base.ModelBase)): opts = obj._meta elif isinstance(obj, models.query.QuerySet): opts = obj.model._meta else: opts = obj return { 'verbose_name': opts.verbose_name, 'verbose_name_plural': opts.verbose_name_plural, } def model_ngettext(obj, n=None): """ Return the appropriate `verbose_name` or `verbose_name_plural` value for `obj` depending on the count `n`. `obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance. If `obj` is a `QuerySet` instance, `n` is optional and the length of the `QuerySet` is used. """ if isinstance(obj, models.query.QuerySet): if n is None: n = obj.count() obj = obj.model d = model_format_dict(obj) singular, plural = d["verbose_name"], d["verbose_name_plural"] return ngettext(singular, plural, n or 0) def lookup_field(name, obj, model_admin=None): opts = obj._meta try: f = _get_non_gfk_field(opts, name) except (FieldDoesNotExist, FieldIsAForeignKeyColumnName): # For non-field values, the value is either a method, property or # returned via a callable. if callable(name): attr = name value = attr(obj) elif hasattr(model_admin, name) and name != '__str__': attr = getattr(model_admin, name) value = attr(obj) else: attr = getattr(obj, name) if callable(attr): value = attr() else: value = attr f = None else: attr = None value = getattr(obj, name) return f, attr, value def _get_non_gfk_field(opts, name): """ For historical reasons, the admin app relies on GenericForeignKeys as being "not found" by get_field(). This could likely be cleaned up. Reverse relations should also be excluded as these aren't attributes of the model (rather something like `foo_set`). """ field = opts.get_field(name) if (field.is_relation and # Generic foreign keys OR reverse relations ((field.many_to_one and not field.related_model) or field.one_to_many)): raise FieldDoesNotExist() # Avoid coercing <FK>_id fields to FK if field.is_relation and not field.many_to_many and hasattr(field, 'attname') and field.attname == name: raise FieldIsAForeignKeyColumnName() return field def label_for_field(name, model, model_admin=None, return_attr=False, form=None): """ Return a sensible label for a field name. The name can be a callable, property (but not created with @property decorator), or the name of an object's attribute, as well as a model field. If return_attr is True, also return the resolved attribute (which could be a callable). This will be None if (and only if) the name refers to a field. """ attr = None try: field = _get_non_gfk_field(model._meta, name) try: label = field.verbose_name except AttributeError: # field is likely a ForeignObjectRel label = field.related_model._meta.verbose_name except FieldDoesNotExist: if name == "__str__": label = str(model._meta.verbose_name) attr = str else: if callable(name): attr = name elif hasattr(model_admin, name): attr = getattr(model_admin, name) elif hasattr(model, name): attr = getattr(model, name) elif form and name in form.fields: attr = form.fields[name] else: message = "Unable to lookup '%s' on %s" % (name, model._meta.object_name) if model_admin: message += " or %s" % model_admin.__class__.__name__ if form: message += " or %s" % form.__class__.__name__ raise AttributeError(message) if hasattr(attr, "short_description"): label = attr.short_description elif (isinstance(attr, property) and hasattr(attr, "fget") and hasattr(attr.fget, "short_description")): label = attr.fget.short_description elif callable(attr): if attr.__name__ == "<lambda>": label = "--" else: label = pretty_name(attr.__name__) else: label = pretty_name(name) except FieldIsAForeignKeyColumnName: label = pretty_name(name) attr = name if return_attr: return (label, attr) else: return label def help_text_for_field(name, model): help_text = "" try: field = _get_non_gfk_field(model._meta, name) except (FieldDoesNotExist, FieldIsAForeignKeyColumnName): pass else: if hasattr(field, 'help_text'): help_text = field.help_text return help_text def display_for_field(value, field, empty_value_display): from django.contrib.admin.templatetags.admin_list import _boolean_icon if getattr(field, 'flatchoices', None): return dict(field.flatchoices).get(value, empty_value_display) # BooleanField needs special-case null-handling, so it comes before the # general null test. elif isinstance(field, models.BooleanField): return _boolean_icon(value) elif value is None: return empty_value_display elif isinstance(field, models.DateTimeField): return formats.localize(timezone.template_localtime(value)) elif isinstance(field, (models.DateField, models.TimeField)): return formats.localize(value) elif isinstance(field, models.DecimalField): return formats.number_format(value, field.decimal_places) elif isinstance(field, (models.IntegerField, models.FloatField)): return formats.number_format(value) elif isinstance(field, models.FileField) and value: return format_html('<a href="{}">{}</a>', value.url, value) elif isinstance(field, models.JSONField) and value: try: return json.dumps(value, ensure_ascii=False, cls=field.encoder) except TypeError: return display_for_value(value, empty_value_display) else: return display_for_value(value, empty_value_display) def display_for_value(value, empty_value_display, boolean=False): from django.contrib.admin.templatetags.admin_list import _boolean_icon if boolean: return _boolean_icon(value) elif value is None: return empty_value_display elif isinstance(value, bool): return str(value) elif isinstance(value, datetime.datetime): return formats.localize(timezone.template_localtime(value)) elif isinstance(value, (datetime.date, datetime.time)): return formats.localize(value) elif isinstance(value, (int, decimal.Decimal, float)): return formats.number_format(value) elif isinstance(value, (list, tuple)): return ', '.join(str(v) for v in value) else: return str(value) class NotRelationField(Exception): pass def get_model_from_relation(field): if hasattr(field, 'path_infos'): return field.path_infos[-1].to_opts.model else: raise NotRelationField def reverse_field_path(model, path): """ Create a reversed field path. E.g. Given (Order, "user__groups"), return (Group, "user__order"). Final field must be a related model, not a data field. """ reversed_path = [] parent = model pieces = path.split(LOOKUP_SEP) for piece in pieces: field = parent._meta.get_field(piece) # skip trailing data field if extant: if len(reversed_path) == len(pieces) - 1: # final iteration try: get_model_from_relation(field) except NotRelationField: break # Field should point to another model if field.is_relation and not (field.auto_created and not field.concrete): related_name = field.related_query_name() parent = field.remote_field.model else: related_name = field.field.name parent = field.related_model reversed_path.insert(0, related_name) return (parent, LOOKUP_SEP.join(reversed_path)) def get_fields_from_path(model, path): """ Return list of Fields given path relative to model. e.g. (ModelX, "user__groups__name") -> [ <django.db.models.fields.related.ForeignKey object at 0x...>, <django.db.models.fields.related.ManyToManyField object at 0x...>, <django.db.models.fields.CharField object at 0x...>, ] """ pieces = path.split(LOOKUP_SEP) fields = [] for piece in pieces: if fields: parent = get_model_from_relation(fields[-1]) else: parent = model fields.append(parent._meta.get_field(piece)) return fields def construct_change_message(form, formsets, add): """ Construct a JSON structure describing changes from a changed object. Translations are deactivated so that strings are stored untranslated. Translation happens later on LogEntry access. """ # Evaluating `form.changed_data` prior to disabling translations is required # to avoid fields affected by localization from being included incorrectly, # e.g. where date formats differ such as MM/DD/YYYY vs DD/MM/YYYY. changed_data = form.changed_data with translation_override(None): # Deactivate translations while fetching verbose_name for form # field labels and using `field_name`, if verbose_name is not provided. # Translations will happen later on LogEntry access. changed_field_labels = _get_changed_field_labels_from_form(form, changed_data) change_message = [] if add: change_message.append({'added': {}}) elif form.changed_data: change_message.append({'changed': {'fields': changed_field_labels}}) if formsets: with translation_override(None): for formset in formsets: for added_object in formset.new_objects: change_message.append({ 'added': { 'name': str(added_object._meta.verbose_name), 'object': str(added_object), } }) for changed_object, changed_fields in formset.changed_objects: change_message.append({ 'changed': { 'name': str(changed_object._meta.verbose_name), 'object': str(changed_object), 'fields': _get_changed_field_labels_from_form(formset.forms[0], changed_fields), } }) for deleted_object in formset.deleted_objects: change_message.append({ 'deleted': { 'name': str(deleted_object._meta.verbose_name), 'object': str(deleted_object), } }) return change_message def _get_changed_field_labels_from_form(form, changed_data): changed_field_labels = [] for field_name in changed_data: try: verbose_field_name = form.fields[field_name].label or field_name except KeyError: verbose_field_name = field_name changed_field_labels.append(str(verbose_field_name)) return changed_field_labels
c13fb7afb941741b1102eb6cfca8c2844e2de8f0281d3b4428fd45df087d39d8
""" This encapsulates the logic for displaying filters in the Django admin. Filters are specified in models with the "list_filter" option. Each filter subclass knows how to display a filter for a field that passes a certain test -- e.g. being a DateField or ForeignKey. """ import datetime from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.admin.utils import ( get_model_from_relation, prepare_lookup_value, reverse_field_path, ) from django.core.exceptions import ImproperlyConfigured, ValidationError from django.db import models from django.utils import timezone from django.utils.translation import gettext_lazy as _ class ListFilter: title = None # Human-readable title to appear in the right sidebar. template = 'admin/filter.html' def __init__(self, request, params, model, model_admin): # This dictionary will eventually contain the request's query string # parameters actually used by this filter. self.used_parameters = {} if self.title is None: raise ImproperlyConfigured( "The list filter '%s' does not specify a 'title'." % self.__class__.__name__ ) def has_output(self): """ Return True if some choices would be output for this filter. """ raise NotImplementedError('subclasses of ListFilter must provide a has_output() method') def choices(self, changelist): """ Return choices ready to be output in the template. `changelist` is the ChangeList to be displayed. """ raise NotImplementedError('subclasses of ListFilter must provide a choices() method') def queryset(self, request, queryset): """ Return the filtered queryset. """ raise NotImplementedError('subclasses of ListFilter must provide a queryset() method') def expected_parameters(self): """ Return the list of parameter names that are expected from the request's query string and that will be used by this filter. """ raise NotImplementedError('subclasses of ListFilter must provide an expected_parameters() method') class SimpleListFilter(ListFilter): # The parameter that should be used in the query string for that filter. parameter_name = None def __init__(self, request, params, model, model_admin): super().__init__(request, params, model, model_admin) if self.parameter_name is None: raise ImproperlyConfigured( "The list filter '%s' does not specify a 'parameter_name'." % self.__class__.__name__ ) if self.parameter_name in params: value = params.pop(self.parameter_name) self.used_parameters[self.parameter_name] = value lookup_choices = self.lookups(request, model_admin) if lookup_choices is None: lookup_choices = () self.lookup_choices = list(lookup_choices) def has_output(self): return len(self.lookup_choices) > 0 def value(self): """ Return the value (in string format) provided in the request's query string for this filter, if any, or None if the value wasn't provided. """ return self.used_parameters.get(self.parameter_name) def lookups(self, request, model_admin): """ Must be overridden to return a list of tuples (value, verbose value) """ raise NotImplementedError( 'The SimpleListFilter.lookups() method must be overridden to ' 'return a list of tuples (value, verbose value).' ) def expected_parameters(self): return [self.parameter_name] def choices(self, changelist): yield { 'selected': self.value() is None, 'query_string': changelist.get_query_string(remove=[self.parameter_name]), 'display': _('All'), } for lookup, title in self.lookup_choices: yield { 'selected': self.value() == str(lookup), 'query_string': changelist.get_query_string({self.parameter_name: lookup}), 'display': title, } class FieldListFilter(ListFilter): _field_list_filters = [] _take_priority_index = 0 list_separator = ',' def __init__(self, field, request, params, model, model_admin, field_path): self.field = field self.field_path = field_path self.title = getattr(field, 'verbose_name', field_path) super().__init__(request, params, model, model_admin) for p in self.expected_parameters(): if p in params: value = params.pop(p) self.used_parameters[p] = prepare_lookup_value(p, value, self.list_separator) def has_output(self): return True def queryset(self, request, queryset): try: return queryset.filter(**self.used_parameters) except (ValueError, ValidationError) as e: # Fields may raise a ValueError or ValidationError when converting # the parameters to the correct type. raise IncorrectLookupParameters(e) @classmethod def register(cls, test, list_filter_class, take_priority=False): if take_priority: # This is to allow overriding the default filters for certain types # of fields with some custom filters. The first found in the list # is used in priority. cls._field_list_filters.insert( cls._take_priority_index, (test, list_filter_class)) cls._take_priority_index += 1 else: cls._field_list_filters.append((test, list_filter_class)) @classmethod def create(cls, field, request, params, model, model_admin, field_path): for test, list_filter_class in cls._field_list_filters: if test(field): return list_filter_class(field, request, params, model, model_admin, field_path=field_path) class RelatedFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): other_model = get_model_from_relation(field) self.lookup_kwarg = '%s__%s__exact' % (field_path, field.target_field.name) self.lookup_kwarg_isnull = '%s__isnull' % field_path self.lookup_val = params.get(self.lookup_kwarg) self.lookup_val_isnull = params.get(self.lookup_kwarg_isnull) super().__init__(field, request, params, model, model_admin, field_path) self.lookup_choices = self.field_choices(field, request, model_admin) if hasattr(field, 'verbose_name'): self.lookup_title = field.verbose_name else: self.lookup_title = other_model._meta.verbose_name self.title = self.lookup_title self.empty_value_display = model_admin.get_empty_value_display() @property def include_empty_choice(self): """ Return True if a "(None)" choice should be included, which filters out everything except empty relationships. """ return self.field.null or (self.field.is_relation and self.field.many_to_many) def has_output(self): if self.include_empty_choice: extra = 1 else: extra = 0 return len(self.lookup_choices) + extra > 1 def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg_isnull] def field_admin_ordering(self, field, request, model_admin): """ Return the model admin's ordering for related field, if provided. """ related_admin = model_admin.admin_site._registry.get(field.remote_field.model) if related_admin is not None: return related_admin.get_ordering(request) return () def field_choices(self, field, request, model_admin): ordering = self.field_admin_ordering(field, request, model_admin) return field.get_choices(include_blank=False, ordering=ordering) def choices(self, changelist): yield { 'selected': self.lookup_val is None and not self.lookup_val_isnull, 'query_string': changelist.get_query_string(remove=[self.lookup_kwarg, self.lookup_kwarg_isnull]), 'display': _('All'), } for pk_val, val in self.lookup_choices: yield { 'selected': self.lookup_val == str(pk_val), 'query_string': changelist.get_query_string({self.lookup_kwarg: pk_val}, [self.lookup_kwarg_isnull]), 'display': val, } if self.include_empty_choice: yield { 'selected': bool(self.lookup_val_isnull), 'query_string': changelist.get_query_string({self.lookup_kwarg_isnull: 'True'}, [self.lookup_kwarg]), 'display': self.empty_value_display, } FieldListFilter.register(lambda f: f.remote_field, RelatedFieldListFilter) class BooleanFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.lookup_kwarg = '%s__exact' % field_path self.lookup_kwarg2 = '%s__isnull' % field_path self.lookup_val = params.get(self.lookup_kwarg) self.lookup_val2 = params.get(self.lookup_kwarg2) super().__init__(field, request, params, model, model_admin, field_path) if (self.used_parameters and self.lookup_kwarg in self.used_parameters and self.used_parameters[self.lookup_kwarg] in ('1', '0')): self.used_parameters[self.lookup_kwarg] = bool(int(self.used_parameters[self.lookup_kwarg])) def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg2] def choices(self, changelist): field_choices = dict(self.field.flatchoices) for lookup, title in ( (None, _('All')), ('1', field_choices.get(True, _('Yes'))), ('0', field_choices.get(False, _('No'))), ): yield { 'selected': self.lookup_val == lookup and not self.lookup_val2, 'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}, [self.lookup_kwarg2]), 'display': title, } if self.field.null: yield { 'selected': self.lookup_val2 == 'True', 'query_string': changelist.get_query_string({self.lookup_kwarg2: 'True'}, [self.lookup_kwarg]), 'display': field_choices.get(None, _('Unknown')), } FieldListFilter.register(lambda f: isinstance(f, models.BooleanField), BooleanFieldListFilter) class ChoicesFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.lookup_kwarg = '%s__exact' % field_path self.lookup_kwarg_isnull = '%s__isnull' % field_path self.lookup_val = params.get(self.lookup_kwarg) self.lookup_val_isnull = params.get(self.lookup_kwarg_isnull) super().__init__(field, request, params, model, model_admin, field_path) def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg_isnull] def choices(self, changelist): yield { 'selected': self.lookup_val is None, 'query_string': changelist.get_query_string(remove=[self.lookup_kwarg, self.lookup_kwarg_isnull]), 'display': _('All') } none_title = '' for lookup, title in self.field.flatchoices: if lookup is None: none_title = title continue yield { 'selected': str(lookup) == self.lookup_val, 'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}, [self.lookup_kwarg_isnull]), 'display': title, } if none_title: yield { 'selected': bool(self.lookup_val_isnull), 'query_string': changelist.get_query_string({self.lookup_kwarg_isnull: 'True'}, [self.lookup_kwarg]), 'display': none_title, } FieldListFilter.register(lambda f: bool(f.choices), ChoicesFieldListFilter) class DateFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.field_generic = '%s__' % field_path self.date_params = {k: v for k, v in params.items() if k.startswith(self.field_generic)} now = timezone.now() # When time zone support is enabled, convert "now" to the user's time # zone so Django's definition of "Today" matches what the user expects. if timezone.is_aware(now): now = timezone.localtime(now) if isinstance(field, models.DateTimeField): today = now.replace(hour=0, minute=0, second=0, microsecond=0) else: # field is a models.DateField today = now.date() tomorrow = today + datetime.timedelta(days=1) if today.month == 12: next_month = today.replace(year=today.year + 1, month=1, day=1) else: next_month = today.replace(month=today.month + 1, day=1) next_year = today.replace(year=today.year + 1, month=1, day=1) self.lookup_kwarg_since = '%s__gte' % field_path self.lookup_kwarg_until = '%s__lt' % field_path self.links = ( (_('Any date'), {}), (_('Today'), { self.lookup_kwarg_since: str(today), self.lookup_kwarg_until: str(tomorrow), }), (_('Past 7 days'), { self.lookup_kwarg_since: str(today - datetime.timedelta(days=7)), self.lookup_kwarg_until: str(tomorrow), }), (_('This month'), { self.lookup_kwarg_since: str(today.replace(day=1)), self.lookup_kwarg_until: str(next_month), }), (_('This year'), { self.lookup_kwarg_since: str(today.replace(month=1, day=1)), self.lookup_kwarg_until: str(next_year), }), ) if field.null: self.lookup_kwarg_isnull = '%s__isnull' % field_path self.links += ( (_('No date'), {self.field_generic + 'isnull': 'True'}), (_('Has date'), {self.field_generic + 'isnull': 'False'}), ) super().__init__(field, request, params, model, model_admin, field_path) def expected_parameters(self): params = [self.lookup_kwarg_since, self.lookup_kwarg_until] if self.field.null: params.append(self.lookup_kwarg_isnull) return params def choices(self, changelist): for title, param_dict in self.links: yield { 'selected': self.date_params == param_dict, 'query_string': changelist.get_query_string(param_dict, [self.field_generic]), 'display': title, } FieldListFilter.register( lambda f: isinstance(f, models.DateField), DateFieldListFilter) # This should be registered last, because it's a last resort. For example, # if a field is eligible to use the BooleanFieldListFilter, that'd be much # more appropriate, and the AllValuesFieldListFilter won't get used for it. class AllValuesFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.lookup_kwarg = field_path self.lookup_kwarg_isnull = '%s__isnull' % field_path self.lookup_val = params.get(self.lookup_kwarg) self.lookup_val_isnull = params.get(self.lookup_kwarg_isnull) self.empty_value_display = model_admin.get_empty_value_display() parent_model, reverse_path = reverse_field_path(model, field_path) # Obey parent ModelAdmin queryset when deciding which options to show if model == parent_model: queryset = model_admin.get_queryset(request) else: queryset = parent_model._default_manager.all() self.lookup_choices = queryset.distinct().order_by(field.name).values_list(field.name, flat=True) super().__init__(field, request, params, model, model_admin, field_path) def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg_isnull] def choices(self, changelist): yield { 'selected': self.lookup_val is None and self.lookup_val_isnull is None, 'query_string': changelist.get_query_string(remove=[self.lookup_kwarg, self.lookup_kwarg_isnull]), 'display': _('All'), } include_none = False for val in self.lookup_choices: if val is None: include_none = True continue val = str(val) yield { 'selected': self.lookup_val == val, 'query_string': changelist.get_query_string({self.lookup_kwarg: val}, [self.lookup_kwarg_isnull]), 'display': val, } if include_none: yield { 'selected': bool(self.lookup_val_isnull), 'query_string': changelist.get_query_string({self.lookup_kwarg_isnull: 'True'}, [self.lookup_kwarg]), 'display': self.empty_value_display, } FieldListFilter.register(lambda f: True, AllValuesFieldListFilter) class RelatedOnlyFieldListFilter(RelatedFieldListFilter): def field_choices(self, field, request, model_admin): pk_qs = model_admin.get_queryset(request).distinct().values_list('%s__pk' % self.field_path, flat=True) ordering = self.field_admin_ordering(field, request, model_admin) return field.get_choices(include_blank=False, limit_choices_to={'pk__in': pk_qs}, ordering=ordering) class EmptyFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): if not field.empty_strings_allowed and not field.null: raise ImproperlyConfigured( "The list filter '%s' cannot be used with field '%s' which " "doesn't allow empty strings and nulls." % ( self.__class__.__name__, field.name, ) ) self.lookup_kwarg = '%s__isempty' % field_path self.lookup_val = params.get(self.lookup_kwarg) super().__init__(field, request, params, model, model_admin, field_path) def queryset(self, request, queryset): if self.lookup_kwarg not in self.used_parameters: return queryset if self.lookup_val not in ('0', '1'): raise IncorrectLookupParameters lookup_conditions = [] if self.field.empty_strings_allowed: lookup_conditions.append((self.field_path, '')) if self.field.null: lookup_conditions.append((f'{self.field_path}__isnull', True)) lookup_condition = models.Q(*lookup_conditions, _connector=models.Q.OR) if self.lookup_val == '1': return queryset.filter(lookup_condition) return queryset.exclude(lookup_condition) def expected_parameters(self): return [self.lookup_kwarg] def choices(self, changelist): for lookup, title in ( (None, _('All')), ('1', _('Empty')), ('0', _('Not empty')), ): yield { 'selected': self.lookup_val == lookup, 'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}), 'display': title, }
c1f8082e419430cdccb91b3d68d361c82785b6a94ce36e8c59b098253170758e
import json import os import posixpath import re from urllib.parse import unquote, urldefrag, urlsplit, urlunsplit from django.conf import settings from django.contrib.staticfiles.utils import check_settings, matches_patterns from django.core.exceptions import ImproperlyConfigured from django.core.files.base import ContentFile from django.core.files.storage import FileSystemStorage, get_storage_class from django.utils.crypto import md5 from django.utils.functional import LazyObject class StaticFilesStorage(FileSystemStorage): """ Standard file system storage for static files. The defaults for ``location`` and ``base_url`` are ``STATIC_ROOT`` and ``STATIC_URL``. """ def __init__(self, location=None, base_url=None, *args, **kwargs): if location is None: location = settings.STATIC_ROOT if base_url is None: base_url = settings.STATIC_URL check_settings(base_url) super().__init__(location, base_url, *args, **kwargs) # FileSystemStorage fallbacks to MEDIA_ROOT when location # is empty, so we restore the empty value. if not location: self.base_location = None self.location = None def path(self, name): if not self.location: raise ImproperlyConfigured("You're using the staticfiles app " "without having set the STATIC_ROOT " "setting to a filesystem path.") return super().path(name) class HashedFilesMixin: default_template = """url("%(url)s")""" max_post_process_passes = 5 patterns = ( ("*.css", ( r"""(?P<matched>url\(['"]{0,1}\s*(?P<url>.*?)["']{0,1}\))""", ( r"""(?P<matched>@import\s*["']\s*(?P<url>.*?)["'])""", """@import url("%(url)s")""", ), ( r'(?m)(?P<matched>)^(/\*# (?-i:sourceMappingURL)=(?P<url>.*) \*/)$', '/*# sourceMappingURL=%(url)s */', ), )), ('*.js', ( ( r'(?m)(?P<matched>)^(//# (?-i:sourceMappingURL)=(?P<url>.*))$', '//# sourceMappingURL=%(url)s', ), )), ) keep_intermediate_files = True def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._patterns = {} self.hashed_files = {} for extension, patterns in self.patterns: for pattern in patterns: if isinstance(pattern, (tuple, list)): pattern, template = pattern else: template = self.default_template compiled = re.compile(pattern, re.IGNORECASE) self._patterns.setdefault(extension, []).append((compiled, template)) def file_hash(self, name, content=None): """ Return a hash of the file with the given name and optional content. """ if content is None: return None hasher = md5(usedforsecurity=False) for chunk in content.chunks(): hasher.update(chunk) return hasher.hexdigest()[:12] def hashed_name(self, name, content=None, filename=None): # `filename` is the name of file to hash if `content` isn't given. # `name` is the base name to construct the new hashed filename from. parsed_name = urlsplit(unquote(name)) clean_name = parsed_name.path.strip() filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name opened = content is None if opened: if not self.exists(filename): raise ValueError("The file '%s' could not be found with %r." % (filename, self)) try: content = self.open(filename) except OSError: # Handle directory paths and fragments return name try: file_hash = self.file_hash(clean_name, content) finally: if opened: content.close() path, filename = os.path.split(clean_name) root, ext = os.path.splitext(filename) file_hash = ('.%s' % file_hash) if file_hash else '' hashed_name = os.path.join(path, "%s%s%s" % (root, file_hash, ext)) unparsed_name = list(parsed_name) unparsed_name[2] = hashed_name # Special casing for a @font-face hack, like url(myfont.eot?#iefix") # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax if '?#' in name and not unparsed_name[3]: unparsed_name[2] += '?' return urlunsplit(unparsed_name) def _url(self, hashed_name_func, name, force=False, hashed_files=None): """ Return the non-hashed URL in DEBUG mode. """ if settings.DEBUG and not force: hashed_name, fragment = name, '' else: clean_name, fragment = urldefrag(name) if urlsplit(clean_name).path.endswith('/'): # don't hash paths hashed_name = name else: args = (clean_name,) if hashed_files is not None: args += (hashed_files,) hashed_name = hashed_name_func(*args) final_url = super().url(hashed_name) # Special casing for a @font-face hack, like url(myfont.eot?#iefix") # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax query_fragment = '?#' in name # [sic!] if fragment or query_fragment: urlparts = list(urlsplit(final_url)) if fragment and not urlparts[4]: urlparts[4] = fragment if query_fragment and not urlparts[3]: urlparts[2] += '?' final_url = urlunsplit(urlparts) return unquote(final_url) def url(self, name, force=False): """ Return the non-hashed URL in DEBUG mode. """ return self._url(self.stored_name, name, force) def url_converter(self, name, hashed_files, template=None): """ Return the custom URL converter for the given file name. """ if template is None: template = self.default_template def converter(matchobj): """ Convert the matched URL to a normalized and hashed URL. This requires figuring out which files the matched URL resolves to and calling the url() method of the storage. """ matches = matchobj.groupdict() matched = matches['matched'] url = matches['url'] # Ignore absolute/protocol-relative and data-uri URLs. if re.match(r'^[a-z]+:', url): return matched # Ignore absolute URLs that don't point to a static file (dynamic # CSS / JS?). Note that STATIC_URL cannot be empty. if url.startswith('/') and not url.startswith(settings.STATIC_URL): return matched # Strip off the fragment so a path-like fragment won't interfere. url_path, fragment = urldefrag(url) if url_path.startswith('/'): # Otherwise the condition above would have returned prematurely. assert url_path.startswith(settings.STATIC_URL) target_name = url_path[len(settings.STATIC_URL):] else: # We're using the posixpath module to mix paths and URLs conveniently. source_name = name if os.sep == '/' else name.replace(os.sep, '/') target_name = posixpath.join(posixpath.dirname(source_name), url_path) # Determine the hashed name of the target file with the storage backend. hashed_url = self._url( self._stored_name, unquote(target_name), force=True, hashed_files=hashed_files, ) transformed_url = '/'.join(url_path.split('/')[:-1] + hashed_url.split('/')[-1:]) # Restore the fragment that was stripped off earlier. if fragment: transformed_url += ('?#' if '?#' in url else '#') + fragment # Return the hashed version to the file matches['url'] = unquote(transformed_url) return template % matches return converter def post_process(self, paths, dry_run=False, **options): """ Post process the given dictionary of files (called from collectstatic). Processing is actually two separate operations: 1. renaming files to include a hash of their content for cache-busting, and copying those files to the target storage. 2. adjusting files which contain references to other files so they refer to the cache-busting filenames. If either of these are performed on a file, then that file is considered post-processed. """ # don't even dare to process the files if we're in dry run mode if dry_run: return # where to store the new paths hashed_files = {} # build a list of adjustable files adjustable_paths = [ path for path in paths if matches_patterns(path, self._patterns) ] # Adjustable files to yield at end, keyed by the original path. processed_adjustable_paths = {} # Do a single pass first. Post-process all files once, yielding not # adjustable files and exceptions, and collecting adjustable files. for name, hashed_name, processed, _ in self._post_process(paths, adjustable_paths, hashed_files): if name not in adjustable_paths or isinstance(processed, Exception): yield name, hashed_name, processed else: processed_adjustable_paths[name] = (name, hashed_name, processed) paths = {path: paths[path] for path in adjustable_paths} substitutions = False for i in range(self.max_post_process_passes): substitutions = False for name, hashed_name, processed, subst in self._post_process(paths, adjustable_paths, hashed_files): # Overwrite since hashed_name may be newer. processed_adjustable_paths[name] = (name, hashed_name, processed) substitutions = substitutions or subst if not substitutions: break if substitutions: yield 'All', None, RuntimeError('Max post-process passes exceeded.') # Store the processed paths self.hashed_files.update(hashed_files) # Yield adjustable files with final, hashed name. yield from processed_adjustable_paths.values() def _post_process(self, paths, adjustable_paths, hashed_files): # Sort the files by directory level def path_level(name): return len(name.split(os.sep)) for name in sorted(paths, key=path_level, reverse=True): substitutions = True # use the original, local file, not the copied-but-unprocessed # file, which might be somewhere far away, like S3 storage, path = paths[name] with storage.open(path) as original_file: cleaned_name = self.clean_name(name) hash_key = self.hash_key(cleaned_name) # generate the hash with the original content, even for # adjustable files. if hash_key not in hashed_files: hashed_name = self.hashed_name(name, original_file) else: hashed_name = hashed_files[hash_key] # then get the original's file content.. if hasattr(original_file, 'seek'): original_file.seek(0) hashed_file_exists = self.exists(hashed_name) processed = False # ..to apply each replacement pattern to the content if name in adjustable_paths: old_hashed_name = hashed_name content = original_file.read().decode('utf-8') for extension, patterns in self._patterns.items(): if matches_patterns(path, (extension,)): for pattern, template in patterns: converter = self.url_converter(name, hashed_files, template) try: content = pattern.sub(converter, content) except ValueError as exc: yield name, None, exc, False if hashed_file_exists: self.delete(hashed_name) # then save the processed result content_file = ContentFile(content.encode()) if self.keep_intermediate_files: # Save intermediate file for reference self._save(hashed_name, content_file) hashed_name = self.hashed_name(name, content_file) if self.exists(hashed_name): self.delete(hashed_name) saved_name = self._save(hashed_name, content_file) hashed_name = self.clean_name(saved_name) # If the file hash stayed the same, this file didn't change if old_hashed_name == hashed_name: substitutions = False processed = True if not processed: # or handle the case in which neither processing nor # a change to the original file happened if not hashed_file_exists: processed = True saved_name = self._save(hashed_name, original_file) hashed_name = self.clean_name(saved_name) # and then set the cache accordingly hashed_files[hash_key] = hashed_name yield name, hashed_name, processed, substitutions def clean_name(self, name): return name.replace('\\', '/') def hash_key(self, name): return name def _stored_name(self, name, hashed_files): # Normalize the path to avoid multiple names for the same file like # ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same # path. name = posixpath.normpath(name) cleaned_name = self.clean_name(name) hash_key = self.hash_key(cleaned_name) cache_name = hashed_files.get(hash_key) if cache_name is None: cache_name = self.clean_name(self.hashed_name(name)) return cache_name def stored_name(self, name): cleaned_name = self.clean_name(name) hash_key = self.hash_key(cleaned_name) cache_name = self.hashed_files.get(hash_key) if cache_name: return cache_name # No cached name found, recalculate it from the files. intermediate_name = name for i in range(self.max_post_process_passes + 1): cache_name = self.clean_name( self.hashed_name(name, content=None, filename=intermediate_name) ) if intermediate_name == cache_name: # Store the hashed name if there was a miss. self.hashed_files[hash_key] = cache_name return cache_name else: # Move on to the next intermediate file. intermediate_name = cache_name # If the cache name can't be determined after the max number of passes, # the intermediate files on disk may be corrupt; avoid an infinite loop. raise ValueError("The name '%s' could not be hashed with %r." % (name, self)) class ManifestFilesMixin(HashedFilesMixin): manifest_version = '1.0' # the manifest format standard manifest_name = 'staticfiles.json' manifest_strict = True keep_intermediate_files = False def __init__(self, *args, manifest_storage=None, **kwargs): super().__init__(*args, **kwargs) if manifest_storage is None: manifest_storage = self self.manifest_storage = manifest_storage self.hashed_files = self.load_manifest() def read_manifest(self): try: with self.manifest_storage.open(self.manifest_name) as manifest: return manifest.read().decode() except FileNotFoundError: return None def load_manifest(self): content = self.read_manifest() if content is None: return {} try: stored = json.loads(content) except json.JSONDecodeError: pass else: version = stored.get('version') if version == '1.0': return stored.get('paths', {}) raise ValueError("Couldn't load manifest '%s' (version %s)" % (self.manifest_name, self.manifest_version)) def post_process(self, *args, **kwargs): self.hashed_files = {} yield from super().post_process(*args, **kwargs) if not kwargs.get('dry_run'): self.save_manifest() def save_manifest(self): payload = {'paths': self.hashed_files, 'version': self.manifest_version} if self.manifest_storage.exists(self.manifest_name): self.manifest_storage.delete(self.manifest_name) contents = json.dumps(payload).encode() self.manifest_storage._save(self.manifest_name, ContentFile(contents)) def stored_name(self, name): parsed_name = urlsplit(unquote(name)) clean_name = parsed_name.path.strip() hash_key = self.hash_key(clean_name) cache_name = self.hashed_files.get(hash_key) if cache_name is None: if self.manifest_strict: raise ValueError("Missing staticfiles manifest entry for '%s'" % clean_name) cache_name = self.clean_name(self.hashed_name(name)) unparsed_name = list(parsed_name) unparsed_name[2] = cache_name # Special casing for a @font-face hack, like url(myfont.eot?#iefix") # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax if '?#' in name and not unparsed_name[3]: unparsed_name[2] += '?' return urlunsplit(unparsed_name) class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage): """ A static file system storage backend which also saves hashed copies of the files it saves. """ pass class ConfiguredStorage(LazyObject): def _setup(self): self._wrapped = get_storage_class(settings.STATICFILES_STORAGE)() staticfiles_storage = ConfiguredStorage()
628db56bf2442b9d36bfe49cefa8a3b2de50b315fb33163edceb21866c1a8a6b
from django.apps import apps from .requests import RequestSite def get_current_site(request): """ Check if contrib.sites is installed and return either the current ``Site`` object or a ``RequestSite`` object based on the request. """ # Import is inside the function because its point is to avoid importing the # Site models when django.contrib.sites isn't installed. if apps.is_installed('django.contrib.sites'): from .models import Site return Site.objects.get_current(request) else: return RequestSite(request)
377326a8ae30ce08565200bf3650610c5df4cefc98030a2bc0f426837d8c9b82
import functools import itertools from collections import defaultdict from django.contrib.contenttypes.models import ContentType from django.core import checks from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist from django.db import DEFAULT_DB_ALIAS, models, router, transaction from django.db.models import DO_NOTHING, ForeignObject, ForeignObjectRel from django.db.models.base import ModelBase, make_foreign_order_accessors from django.db.models.fields.mixins import FieldCacheMixin from django.db.models.fields.related import ( ReverseManyToOneDescriptor, lazy_related_operation, ) from django.db.models.query_utils import PathInfo from django.db.models.sql import AND from django.db.models.sql.where import WhereNode from django.utils.functional import cached_property class GenericForeignKey(FieldCacheMixin): """ Provide a generic many-to-one relation through the ``content_type`` and ``object_id`` fields. This class also doubles as an accessor to the related object (similar to ForwardManyToOneDescriptor) by adding itself as a model attribute. """ # Field flags auto_created = False concrete = False editable = False hidden = False is_relation = True many_to_many = False many_to_one = True one_to_many = False one_to_one = False related_model = None remote_field = None def __init__(self, ct_field='content_type', fk_field='object_id', for_concrete_model=True): self.ct_field = ct_field self.fk_field = fk_field self.for_concrete_model = for_concrete_model self.editable = False self.rel = None self.column = None def contribute_to_class(self, cls, name, **kwargs): self.name = name self.model = cls cls._meta.add_field(self, private=True) setattr(cls, name, self) def get_filter_kwargs_for_object(self, obj): """See corresponding method on Field""" return { self.fk_field: getattr(obj, self.fk_field), self.ct_field: getattr(obj, self.ct_field), } def get_forward_related_filter(self, obj): """See corresponding method on RelatedField""" return { self.fk_field: obj.pk, self.ct_field: ContentType.objects.get_for_model(obj).pk, } def __str__(self): model = self.model return '%s.%s' % (model._meta.label, self.name) def check(self, **kwargs): return [ *self._check_field_name(), *self._check_object_id_field(), *self._check_content_type_field(), ] def _check_field_name(self): if self.name.endswith("_"): return [ checks.Error( 'Field names must not end with an underscore.', obj=self, id='fields.E001', ) ] else: return [] def _check_object_id_field(self): try: self.model._meta.get_field(self.fk_field) except FieldDoesNotExist: return [ checks.Error( "The GenericForeignKey object ID references the " "nonexistent field '%s'." % self.fk_field, obj=self, id='contenttypes.E001', ) ] else: return [] def _check_content_type_field(self): """ Check if field named `field_name` in model `model` exists and is a valid content_type field (is a ForeignKey to ContentType). """ try: field = self.model._meta.get_field(self.ct_field) except FieldDoesNotExist: return [ checks.Error( "The GenericForeignKey content type references the " "nonexistent field '%s.%s'." % ( self.model._meta.object_name, self.ct_field ), obj=self, id='contenttypes.E002', ) ] else: if not isinstance(field, models.ForeignKey): return [ checks.Error( "'%s.%s' is not a ForeignKey." % ( self.model._meta.object_name, self.ct_field ), hint=( "GenericForeignKeys must use a ForeignKey to " "'contenttypes.ContentType' as the 'content_type' field." ), obj=self, id='contenttypes.E003', ) ] elif field.remote_field.model != ContentType: return [ checks.Error( "'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'." % ( self.model._meta.object_name, self.ct_field ), hint=( "GenericForeignKeys must use a ForeignKey to " "'contenttypes.ContentType' as the 'content_type' field." ), obj=self, id='contenttypes.E004', ) ] else: return [] def get_cache_name(self): return self.name def get_content_type(self, obj=None, id=None, using=None): if obj is not None: return ContentType.objects.db_manager(obj._state.db).get_for_model( obj, for_concrete_model=self.for_concrete_model) elif id is not None: return ContentType.objects.db_manager(using).get_for_id(id) else: # This should never happen. I love comments like this, don't you? raise Exception("Impossible arguments to GFK.get_content_type!") def get_prefetch_queryset(self, instances, queryset=None): if queryset is not None: raise ValueError("Custom queryset can't be used for this lookup.") # For efficiency, group the instances by content type and then do one # query per model fk_dict = defaultdict(set) # We need one instance for each group in order to get the right db: instance_dict = {} ct_attname = self.model._meta.get_field(self.ct_field).get_attname() for instance in instances: # We avoid looking for values if either ct_id or fkey value is None ct_id = getattr(instance, ct_attname) if ct_id is not None: fk_val = getattr(instance, self.fk_field) if fk_val is not None: fk_dict[ct_id].add(fk_val) instance_dict[ct_id] = instance ret_val = [] for ct_id, fkeys in fk_dict.items(): instance = instance_dict[ct_id] ct = self.get_content_type(id=ct_id, using=instance._state.db) ret_val.extend(ct.get_all_objects_for_this_type(pk__in=fkeys)) # For doing the join in Python, we have to match both the FK val and the # content type, so we use a callable that returns a (fk, class) pair. def gfk_key(obj): ct_id = getattr(obj, ct_attname) if ct_id is None: return None else: model = self.get_content_type(id=ct_id, using=obj._state.db).model_class() return (model._meta.pk.get_prep_value(getattr(obj, self.fk_field)), model) return ( ret_val, lambda obj: (obj.pk, obj.__class__), gfk_key, True, self.name, False, ) def __get__(self, instance, cls=None): if instance is None: return self # Don't use getattr(instance, self.ct_field) here because that might # reload the same ContentType over and over (#5570). Instead, get the # content type ID here, and later when the actual instance is needed, # use ContentType.objects.get_for_id(), which has a global cache. f = self.model._meta.get_field(self.ct_field) ct_id = getattr(instance, f.get_attname(), None) pk_val = getattr(instance, self.fk_field) rel_obj = self.get_cached_value(instance, default=None) if rel_obj is None and self.is_cached(instance): return rel_obj if rel_obj is not None: ct_match = ct_id == self.get_content_type(obj=rel_obj, using=instance._state.db).id pk_match = rel_obj._meta.pk.to_python(pk_val) == rel_obj.pk if ct_match and pk_match: return rel_obj else: rel_obj = None if ct_id is not None: ct = self.get_content_type(id=ct_id, using=instance._state.db) try: rel_obj = ct.get_object_for_this_type(pk=pk_val) except ObjectDoesNotExist: pass self.set_cached_value(instance, rel_obj) return rel_obj def __set__(self, instance, value): ct = None fk = None if value is not None: ct = self.get_content_type(obj=value) fk = value.pk setattr(instance, self.ct_field, ct) setattr(instance, self.fk_field, fk) self.set_cached_value(instance, value) class GenericRel(ForeignObjectRel): """ Used by GenericRelation to store information about the relation. """ def __init__(self, field, to, related_name=None, related_query_name=None, limit_choices_to=None): super().__init__( field, to, related_name=related_query_name or '+', related_query_name=related_query_name, limit_choices_to=limit_choices_to, on_delete=DO_NOTHING, ) class GenericRelation(ForeignObject): """ Provide a reverse to a relation created by a GenericForeignKey. """ # Field flags auto_created = False empty_strings_allowed = False many_to_many = False many_to_one = False one_to_many = True one_to_one = False rel_class = GenericRel mti_inherited = False def __init__(self, to, object_id_field='object_id', content_type_field='content_type', for_concrete_model=True, related_query_name=None, limit_choices_to=None, **kwargs): kwargs['rel'] = self.rel_class( self, to, related_query_name=related_query_name, limit_choices_to=limit_choices_to, ) # Reverse relations are always nullable (Django can't enforce that a # foreign key on the related model points to this model). kwargs['null'] = True kwargs['blank'] = True kwargs['on_delete'] = models.CASCADE kwargs['editable'] = False kwargs['serialize'] = False # This construct is somewhat of an abuse of ForeignObject. This field # represents a relation from pk to object_id field. But, this relation # isn't direct, the join is generated reverse along foreign key. So, # the from_field is object_id field, to_field is pk because of the # reverse join. super().__init__(to, from_fields=[object_id_field], to_fields=[], **kwargs) self.object_id_field_name = object_id_field self.content_type_field_name = content_type_field self.for_concrete_model = for_concrete_model def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_generic_foreign_key_existence(), ] def _is_matching_generic_foreign_key(self, field): """ Return True if field is a GenericForeignKey whose content type and object id fields correspond to the equivalent attributes on this GenericRelation. """ return ( isinstance(field, GenericForeignKey) and field.ct_field == self.content_type_field_name and field.fk_field == self.object_id_field_name ) def _check_generic_foreign_key_existence(self): target = self.remote_field.model if isinstance(target, ModelBase): fields = target._meta.private_fields if any(self._is_matching_generic_foreign_key(field) for field in fields): return [] else: return [ checks.Error( "The GenericRelation defines a relation with the model " "'%s', but that model does not have a GenericForeignKey." % target._meta.label, obj=self, id='contenttypes.E004', ) ] else: return [] def resolve_related_fields(self): self.to_fields = [self.model._meta.pk.name] return [(self.remote_field.model._meta.get_field(self.object_id_field_name), self.model._meta.pk)] def _get_path_info_with_parent(self, filtered_relation): """ Return the path that joins the current model through any parent models. The idea is that if you have a GFK defined on a parent model then we need to join the parent model first, then the child model. """ # With an inheritance chain ChildTag -> Tag and Tag defines the # GenericForeignKey, and a TaggedItem model has a GenericRelation to # ChildTag, then we need to generate a join from TaggedItem to Tag # (as Tag.object_id == TaggedItem.pk), and another join from Tag to # ChildTag (as that is where the relation is to). Do this by first # generating a join to the parent model, then generating joins to the # child models. path = [] opts = self.remote_field.model._meta.concrete_model._meta parent_opts = opts.get_field(self.object_id_field_name).model._meta target = parent_opts.pk path.append(PathInfo( from_opts=self.model._meta, to_opts=parent_opts, target_fields=(target,), join_field=self.remote_field, m2m=True, direct=False, filtered_relation=filtered_relation, )) # Collect joins needed for the parent -> child chain. This is easiest # to do if we collect joins for the child -> parent chain and then # reverse the direction (call to reverse() and use of # field.remote_field.get_path_info()). parent_field_chain = [] while parent_opts != opts: field = opts.get_ancestor_link(parent_opts.model) parent_field_chain.append(field) opts = field.remote_field.model._meta parent_field_chain.reverse() for field in parent_field_chain: path.extend(field.remote_field.path_infos) return path def get_path_info(self, filtered_relation=None): opts = self.remote_field.model._meta object_id_field = opts.get_field(self.object_id_field_name) if object_id_field.model != opts.model: return self._get_path_info_with_parent(filtered_relation) else: target = opts.pk return [PathInfo( from_opts=self.model._meta, to_opts=opts, target_fields=(target,), join_field=self.remote_field, m2m=True, direct=False, filtered_relation=filtered_relation, )] def get_reverse_path_info(self, filtered_relation=None): opts = self.model._meta from_opts = self.remote_field.model._meta return [PathInfo( from_opts=from_opts, to_opts=opts, target_fields=(opts.pk,), join_field=self, m2m=not self.unique, direct=False, filtered_relation=filtered_relation, )] def value_to_string(self, obj): qs = getattr(obj, self.name).all() return str([instance.pk for instance in qs]) def contribute_to_class(self, cls, name, **kwargs): kwargs['private_only'] = True super().contribute_to_class(cls, name, **kwargs) self.model = cls # Disable the reverse relation for fields inherited by subclasses of a # model in multi-table inheritance. The reverse relation points to the # field of the base model. if self.mti_inherited: self.remote_field.related_name = '+' self.remote_field.related_query_name = None setattr(cls, self.name, ReverseGenericManyToOneDescriptor(self.remote_field)) # Add get_RELATED_order() and set_RELATED_order() to the model this # field belongs to, if the model on the other end of this relation # is ordered with respect to its corresponding GenericForeignKey. if not cls._meta.abstract: def make_generic_foreign_order_accessors(related_model, model): if self._is_matching_generic_foreign_key(model._meta.order_with_respect_to): make_foreign_order_accessors(model, related_model) lazy_related_operation(make_generic_foreign_order_accessors, self.model, self.remote_field.model) def set_attributes_from_rel(self): pass def get_internal_type(self): return "ManyToManyField" def get_content_type(self): """ Return the content type associated with this field's model. """ return ContentType.objects.get_for_model(self.model, for_concrete_model=self.for_concrete_model) def get_extra_restriction(self, alias, remote_alias): field = self.remote_field.model._meta.get_field(self.content_type_field_name) contenttype_pk = self.get_content_type().pk lookup = field.get_lookup('exact')(field.get_col(remote_alias), contenttype_pk) return WhereNode([lookup], connector=AND) def bulk_related_objects(self, objs, using=DEFAULT_DB_ALIAS): """ Return all objects related to ``objs`` via this ``GenericRelation``. """ return self.remote_field.model._base_manager.db_manager(using).filter(**{ "%s__pk" % self.content_type_field_name: ContentType.objects.db_manager(using).get_for_model( self.model, for_concrete_model=self.for_concrete_model).pk, "%s__in" % self.object_id_field_name: [obj.pk for obj in objs] }) class ReverseGenericManyToOneDescriptor(ReverseManyToOneDescriptor): """ Accessor to the related objects manager on the one-to-many relation created by GenericRelation. In the example:: class Post(Model): comments = GenericRelation(Comment) ``post.comments`` is a ReverseGenericManyToOneDescriptor instance. """ @cached_property def related_manager_cls(self): return create_generic_related_manager( self.rel.model._default_manager.__class__, self.rel, ) @cached_property def related_manager_cache_key(self): # By default, GenericRel instances will be marked as hidden unless # related_query_name is given (their accessor name being "+" when # hidden), which would cause multiple GenericRelations declared on a # single model to collide, so always use the remote field's name. return self.field.get_cache_name() def create_generic_related_manager(superclass, rel): """ Factory function to create a manager that subclasses another manager (generally the default manager of a given model) and adds behaviors specific to generic relations. """ class GenericRelatedObjectManager(superclass): def __init__(self, instance=None): super().__init__() self.instance = instance self.model = rel.model self.get_content_type = functools.partial( ContentType.objects.db_manager(instance._state.db).get_for_model, for_concrete_model=rel.field.for_concrete_model, ) self.content_type = self.get_content_type(instance) self.content_type_field_name = rel.field.content_type_field_name self.object_id_field_name = rel.field.object_id_field_name self.prefetch_cache_name = rel.field.attname self.pk_val = instance.pk self.core_filters = { '%s__pk' % self.content_type_field_name: self.content_type.id, self.object_id_field_name: self.pk_val, } def __call__(self, *, manager): manager = getattr(self.model, manager) manager_class = create_generic_related_manager(manager.__class__, rel) return manager_class(instance=self.instance) do_not_call_in_templates = True def __str__(self): return repr(self) def _apply_rel_filters(self, queryset): """ Filter the queryset for the instance this manager is bound to. """ db = self._db or router.db_for_read(self.model, instance=self.instance) return queryset.using(db).filter(**self.core_filters) def _remove_prefetched_objects(self): try: self.instance._prefetched_objects_cache.pop(self.prefetch_cache_name) except (AttributeError, KeyError): pass # nothing to clear from cache def get_queryset(self): try: return self.instance._prefetched_objects_cache[self.prefetch_cache_name] except (AttributeError, KeyError): queryset = super().get_queryset() return self._apply_rel_filters(queryset) def get_prefetch_queryset(self, instances, queryset=None): if queryset is None: queryset = super().get_queryset() queryset._add_hints(instance=instances[0]) queryset = queryset.using(queryset._db or self._db) # Group instances by content types. content_type_queries = ( models.Q( (f'{self.content_type_field_name}__pk', content_type_id), (f'{self.object_id_field_name}__in', {obj.pk for obj in objs}), ) for content_type_id, objs in itertools.groupby( sorted(instances, key=lambda obj: self.get_content_type(obj).pk), lambda obj: self.get_content_type(obj).pk, ) ) query = models.Q(*content_type_queries, _connector=models.Q.OR) # We (possibly) need to convert object IDs to the type of the # instances' PK in order to match up instances: object_id_converter = instances[0]._meta.pk.to_python content_type_id_field_name = '%s_id' % self.content_type_field_name return ( queryset.filter(query), lambda relobj: ( object_id_converter(getattr(relobj, self.object_id_field_name)), getattr(relobj, content_type_id_field_name), ), lambda obj: (obj.pk, self.get_content_type(obj).pk), False, self.prefetch_cache_name, False, ) def add(self, *objs, bulk=True): self._remove_prefetched_objects() db = router.db_for_write(self.model, instance=self.instance) def check_and_update_obj(obj): if not isinstance(obj, self.model): raise TypeError("'%s' instance expected, got %r" % ( self.model._meta.object_name, obj )) setattr(obj, self.content_type_field_name, self.content_type) setattr(obj, self.object_id_field_name, self.pk_val) if bulk: pks = [] for obj in objs: if obj._state.adding or obj._state.db != db: raise ValueError( "%r instance isn't saved. Use bulk=False or save " "the object first." % obj ) check_and_update_obj(obj) pks.append(obj.pk) self.model._base_manager.using(db).filter(pk__in=pks).update(**{ self.content_type_field_name: self.content_type, self.object_id_field_name: self.pk_val, }) else: with transaction.atomic(using=db, savepoint=False): for obj in objs: check_and_update_obj(obj) obj.save() add.alters_data = True def remove(self, *objs, bulk=True): if not objs: return self._clear(self.filter(pk__in=[o.pk for o in objs]), bulk) remove.alters_data = True def clear(self, *, bulk=True): self._clear(self, bulk) clear.alters_data = True def _clear(self, queryset, bulk): self._remove_prefetched_objects() db = router.db_for_write(self.model, instance=self.instance) queryset = queryset.using(db) if bulk: # `QuerySet.delete()` creates its own atomic block which # contains the `pre_delete` and `post_delete` signal handlers. queryset.delete() else: with transaction.atomic(using=db, savepoint=False): for obj in queryset: obj.delete() _clear.alters_data = True def set(self, objs, *, bulk=True, clear=False): # Force evaluation of `objs` in case it's a queryset whose value # could be affected by `manager.clear()`. Refs #19816. objs = tuple(objs) db = router.db_for_write(self.model, instance=self.instance) with transaction.atomic(using=db, savepoint=False): if clear: self.clear() self.add(*objs, bulk=bulk) else: old_objs = set(self.using(db).all()) new_objs = [] for obj in objs: if obj in old_objs: old_objs.remove(obj) else: new_objs.append(obj) self.remove(*old_objs) self.add(*new_objs, bulk=bulk) set.alters_data = True def create(self, **kwargs): self._remove_prefetched_objects() kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super().using(db).create(**kwargs) create.alters_data = True def get_or_create(self, **kwargs): kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super().using(db).get_or_create(**kwargs) get_or_create.alters_data = True def update_or_create(self, **kwargs): kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super().using(db).update_or_create(**kwargs) update_or_create.alters_data = True return GenericRelatedObjectManager
65d3fc793b6a7c870da4907a37f462258e9c4a539acee3abcdf8d1080e70db38
# RemovedInDjango50Warning. from django.core.serializers.base import ( PickleSerializer as BasePickleSerializer, ) from django.core.signing import JSONSerializer as BaseJSONSerializer JSONSerializer = BaseJSONSerializer PickleSerializer = BasePickleSerializer
ef0c1801065fb174d9584bb55b4cedc73d3dc3e2dc9aa144eabe6bf4ee51fe34
import re from django.utils.regex_helper import _lazy_re_compile # Regular expression for recognizing HEXEWKB and WKT. A prophylactic measure # to prevent potentially malicious input from reaching the underlying C # library. Not a substitute for good web security programming practices. hex_regex = _lazy_re_compile(r'^[0-9A-F]+$', re.I) wkt_regex = _lazy_re_compile( r'^(SRID=(?P<srid>\-?[0-9]+);)?' r'(?P<wkt>' r'(?P<type>POINT|LINESTRING|LINEARRING|POLYGON|MULTIPOINT|' r'MULTILINESTRING|MULTIPOLYGON|GEOMETRYCOLLECTION)' r'[ACEGIMLONPSRUTYZ0-9,\.\-\+\(\) ]+)$', re.I ) json_regex = _lazy_re_compile(r'^(\s+)?\{.*}(\s+)?$', re.DOTALL)
e24e797e6254b109fa5cf26d09f1c7cdbc0831d7338557df6c1ec429c46f0d76
import binascii import json from django.conf import settings from django.contrib.messages.storage.base import BaseStorage, Message from django.core import signing from django.http import SimpleCookie from django.utils.safestring import SafeData, mark_safe class MessageEncoder(json.JSONEncoder): """ Compactly serialize instances of the ``Message`` class as JSON. """ message_key = '__json_message' def default(self, obj): if isinstance(obj, Message): # Using 0/1 here instead of False/True to produce more compact json is_safedata = 1 if isinstance(obj.message, SafeData) else 0 message = [self.message_key, is_safedata, obj.level, obj.message] if obj.extra_tags is not None: message.append(obj.extra_tags) return message return super().default(obj) class MessageDecoder(json.JSONDecoder): """ Decode JSON that includes serialized ``Message`` instances. """ def process_messages(self, obj): if isinstance(obj, list) and obj: if obj[0] == MessageEncoder.message_key: if obj[1]: obj[3] = mark_safe(obj[3]) return Message(*obj[2:]) return [self.process_messages(item) for item in obj] if isinstance(obj, dict): return {key: self.process_messages(value) for key, value in obj.items()} return obj def decode(self, s, **kwargs): decoded = super().decode(s, **kwargs) return self.process_messages(decoded) class MessageSerializer: def dumps(self, obj): return json.dumps( obj, separators=(',', ':'), cls=MessageEncoder, ).encode('latin-1') def loads(self, data): return json.loads(data.decode('latin-1'), cls=MessageDecoder) class CookieStorage(BaseStorage): """ Store messages in a cookie. """ cookie_name = 'messages' # uwsgi's default configuration enforces a maximum size of 4kb for all the # HTTP headers. In order to leave some room for other cookies and headers, # restrict the session cookie to 1/2 of 4kb. See #18781. max_cookie_size = 2048 not_finished = '__messagesnotfinished__' key_salt = 'django.contrib.messages' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.signer = signing.get_cookie_signer(salt=self.key_salt) def _get(self, *args, **kwargs): """ Retrieve a list of messages from the messages cookie. If the not_finished sentinel value is found at the end of the message list, remove it and return a result indicating that not all messages were retrieved by this storage. """ data = self.request.COOKIES.get(self.cookie_name) messages = self._decode(data) all_retrieved = not (messages and messages[-1] == self.not_finished) if messages and not all_retrieved: # remove the sentinel value messages.pop() return messages, all_retrieved def _update_cookie(self, encoded_data, response): """ Either set the cookie with the encoded data if there is any data to store, or delete the cookie. """ if encoded_data: response.set_cookie( self.cookie_name, encoded_data, domain=settings.SESSION_COOKIE_DOMAIN, secure=settings.SESSION_COOKIE_SECURE or None, httponly=settings.SESSION_COOKIE_HTTPONLY or None, samesite=settings.SESSION_COOKIE_SAMESITE, ) else: response.delete_cookie( self.cookie_name, domain=settings.SESSION_COOKIE_DOMAIN, samesite=settings.SESSION_COOKIE_SAMESITE, ) def _store(self, messages, response, remove_oldest=True, *args, **kwargs): """ Store the messages to a cookie and return a list of any messages which could not be stored. If the encoded data is larger than ``max_cookie_size``, remove messages until the data fits (these are the messages which are returned), and add the not_finished sentinel value to indicate as much. """ unstored_messages = [] encoded_data = self._encode(messages) if self.max_cookie_size: # data is going to be stored eventually by SimpleCookie, which # adds its own overhead, which we must account for. cookie = SimpleCookie() # create outside the loop def stored_length(val): return len(cookie.value_encode(val)[1]) while encoded_data and stored_length(encoded_data) > self.max_cookie_size: if remove_oldest: unstored_messages.append(messages.pop(0)) else: unstored_messages.insert(0, messages.pop()) encoded_data = self._encode(messages + [self.not_finished], encode_empty=unstored_messages) self._update_cookie(encoded_data, response) return unstored_messages def _encode(self, messages, encode_empty=False): """ Return an encoded version of the messages list which can be stored as plain text. Since the data will be retrieved from the client-side, the encoded data also contains a hash to ensure that the data was not tampered with. """ if messages or encode_empty: return self.signer.sign_object(messages, serializer=MessageSerializer, compress=True) def _decode(self, data): """ Safely decode an encoded text stream back into a list of messages. If the encoded text stream contained an invalid hash or was in an invalid format, return None. """ if not data: return None try: return self.signer.unsign_object(data, serializer=MessageSerializer) except (signing.BadSignature, binascii.Error, json.JSONDecodeError): pass # Mark the data as used (so it gets removed) since something was wrong # with the data. self.used = True return None
807fd1661005bf93702a9db5a19f0a36852ff22b4acb4d49c75b590d8ad8b6a0
import datetime import json from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange, Range from django.contrib.postgres import forms, lookups from django.db import models from django.db.models.lookups import PostgresOperatorLookup from .utils import AttributeSetter __all__ = [ 'RangeField', 'IntegerRangeField', 'BigIntegerRangeField', 'DecimalRangeField', 'DateTimeRangeField', 'DateRangeField', 'RangeBoundary', 'RangeOperators', ] class RangeBoundary(models.Expression): """A class that represents range boundaries.""" def __init__(self, inclusive_lower=True, inclusive_upper=False): self.lower = '[' if inclusive_lower else '(' self.upper = ']' if inclusive_upper else ')' def as_sql(self, compiler, connection): return "'%s%s'" % (self.lower, self.upper), [] class RangeOperators: # https://www.postgresql.org/docs/current/functions-range.html#RANGE-OPERATORS-TABLE EQUAL = '=' NOT_EQUAL = '<>' CONTAINS = '@>' CONTAINED_BY = '<@' OVERLAPS = '&&' FULLY_LT = '<<' FULLY_GT = '>>' NOT_LT = '&>' NOT_GT = '&<' ADJACENT_TO = '-|-' class RangeField(models.Field): empty_strings_allowed = False def __init__(self, *args, **kwargs): if 'default_bounds' in kwargs: raise TypeError( f"Cannot use 'default_bounds' with {self.__class__.__name__}." ) # Initializing base_field here ensures that its model matches the model for self. if hasattr(self, 'base_field'): self.base_field = self.base_field() super().__init__(*args, **kwargs) @property def model(self): try: return self.__dict__['model'] except KeyError: raise AttributeError("'%s' object has no attribute 'model'" % self.__class__.__name__) @model.setter def model(self, model): self.__dict__['model'] = model self.base_field.model = model @classmethod def _choices_is_value(cls, value): return isinstance(value, (list, tuple)) or super()._choices_is_value(value) def get_prep_value(self, value): if value is None: return None elif isinstance(value, Range): return value elif isinstance(value, (list, tuple)): return self.range_type(value[0], value[1]) return value def to_python(self, value): if isinstance(value, str): # Assume we're deserializing vals = json.loads(value) for end in ('lower', 'upper'): if end in vals: vals[end] = self.base_field.to_python(vals[end]) value = self.range_type(**vals) elif isinstance(value, (list, tuple)): value = self.range_type(value[0], value[1]) return value def set_attributes_from_name(self, name): super().set_attributes_from_name(name) self.base_field.set_attributes_from_name(name) def value_to_string(self, obj): value = self.value_from_object(obj) if value is None: return None if value.isempty: return json.dumps({"empty": True}) base_field = self.base_field result = {"bounds": value._bounds} for end in ('lower', 'upper'): val = getattr(value, end) if val is None: result[end] = None else: obj = AttributeSetter(base_field.attname, val) result[end] = base_field.value_to_string(obj) return json.dumps(result) def formfield(self, **kwargs): kwargs.setdefault('form_class', self.form_field) return super().formfield(**kwargs) CANONICAL_RANGE_BOUNDS = '[)' class ContinuousRangeField(RangeField): """ Continuous range field. It allows specifying default bounds for list and tuple inputs. """ def __init__(self, *args, default_bounds=CANONICAL_RANGE_BOUNDS, **kwargs): if default_bounds not in ('[)', '(]', '()', '[]'): raise ValueError("default_bounds must be one of '[)', '(]', '()', or '[]'.") self.default_bounds = default_bounds super().__init__(*args, **kwargs) def get_prep_value(self, value): if isinstance(value, (list, tuple)): return self.range_type(value[0], value[1], self.default_bounds) return super().get_prep_value(value) def formfield(self, **kwargs): kwargs.setdefault('default_bounds', self.default_bounds) return super().formfield(**kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.default_bounds and self.default_bounds != CANONICAL_RANGE_BOUNDS: kwargs['default_bounds'] = self.default_bounds return name, path, args, kwargs class IntegerRangeField(RangeField): base_field = models.IntegerField range_type = NumericRange form_field = forms.IntegerRangeField def db_type(self, connection): return 'int4range' class BigIntegerRangeField(RangeField): base_field = models.BigIntegerField range_type = NumericRange form_field = forms.IntegerRangeField def db_type(self, connection): return 'int8range' class DecimalRangeField(ContinuousRangeField): base_field = models.DecimalField range_type = NumericRange form_field = forms.DecimalRangeField def db_type(self, connection): return 'numrange' class DateTimeRangeField(ContinuousRangeField): base_field = models.DateTimeField range_type = DateTimeTZRange form_field = forms.DateTimeRangeField def db_type(self, connection): return 'tstzrange' class DateRangeField(RangeField): base_field = models.DateField range_type = DateRange form_field = forms.DateRangeField def db_type(self, connection): return 'daterange' RangeField.register_lookup(lookups.DataContains) RangeField.register_lookup(lookups.ContainedBy) RangeField.register_lookup(lookups.Overlap) class DateTimeRangeContains(PostgresOperatorLookup): """ Lookup for Date/DateTimeRange containment to cast the rhs to the correct type. """ lookup_name = 'contains' postgres_operator = RangeOperators.CONTAINS def process_rhs(self, compiler, connection): # Transform rhs value for db lookup. if isinstance(self.rhs, datetime.date): value = models.Value(self.rhs) self.rhs = value.resolve_expression(compiler.query) return super().process_rhs(compiler, connection) def as_postgresql(self, compiler, connection): sql, params = super().as_postgresql(compiler, connection) # Cast the rhs if needed. cast_sql = '' if ( isinstance(self.rhs, models.Expression) and self.rhs._output_field_or_none and # Skip cast if rhs has a matching range type. not isinstance(self.rhs._output_field_or_none, self.lhs.output_field.__class__) ): cast_internal_type = self.lhs.output_field.base_field.get_internal_type() cast_sql = '::{}'.format(connection.data_types.get(cast_internal_type)) return '%s%s' % (sql, cast_sql), params DateRangeField.register_lookup(DateTimeRangeContains) DateTimeRangeField.register_lookup(DateTimeRangeContains) class RangeContainedBy(PostgresOperatorLookup): lookup_name = 'contained_by' type_mapping = { 'smallint': 'int4range', 'integer': 'int4range', 'bigint': 'int8range', 'double precision': 'numrange', 'numeric': 'numrange', 'date': 'daterange', 'timestamp with time zone': 'tstzrange', } postgres_operator = RangeOperators.CONTAINED_BY def process_rhs(self, compiler, connection): rhs, rhs_params = super().process_rhs(compiler, connection) # Ignore precision for DecimalFields. db_type = self.lhs.output_field.cast_db_type(connection).split('(')[0] cast_type = self.type_mapping[db_type] return '%s::%s' % (rhs, cast_type), rhs_params def process_lhs(self, compiler, connection): lhs, lhs_params = super().process_lhs(compiler, connection) if isinstance(self.lhs.output_field, models.FloatField): lhs = '%s::numeric' % lhs elif isinstance(self.lhs.output_field, models.SmallIntegerField): lhs = '%s::integer' % lhs return lhs, lhs_params def get_prep_lookup(self): return RangeField().get_prep_value(self.rhs) models.DateField.register_lookup(RangeContainedBy) models.DateTimeField.register_lookup(RangeContainedBy) models.IntegerField.register_lookup(RangeContainedBy) models.FloatField.register_lookup(RangeContainedBy) models.DecimalField.register_lookup(RangeContainedBy) @RangeField.register_lookup class FullyLessThan(PostgresOperatorLookup): lookup_name = 'fully_lt' postgres_operator = RangeOperators.FULLY_LT @RangeField.register_lookup class FullGreaterThan(PostgresOperatorLookup): lookup_name = 'fully_gt' postgres_operator = RangeOperators.FULLY_GT @RangeField.register_lookup class NotLessThan(PostgresOperatorLookup): lookup_name = 'not_lt' postgres_operator = RangeOperators.NOT_LT @RangeField.register_lookup class NotGreaterThan(PostgresOperatorLookup): lookup_name = 'not_gt' postgres_operator = RangeOperators.NOT_GT @RangeField.register_lookup class AdjacentToLookup(PostgresOperatorLookup): lookup_name = 'adjacent_to' postgres_operator = RangeOperators.ADJACENT_TO @RangeField.register_lookup class RangeStartsWith(models.Transform): lookup_name = 'startswith' function = 'lower' @property def output_field(self): return self.lhs.output_field.base_field @RangeField.register_lookup class RangeEndsWith(models.Transform): lookup_name = 'endswith' function = 'upper' @property def output_field(self): return self.lhs.output_field.base_field @RangeField.register_lookup class IsEmpty(models.Transform): lookup_name = 'isempty' function = 'isempty' output_field = models.BooleanField() @RangeField.register_lookup class LowerInclusive(models.Transform): lookup_name = 'lower_inc' function = 'LOWER_INC' output_field = models.BooleanField() @RangeField.register_lookup class LowerInfinite(models.Transform): lookup_name = 'lower_inf' function = 'LOWER_INF' output_field = models.BooleanField() @RangeField.register_lookup class UpperInclusive(models.Transform): lookup_name = 'upper_inc' function = 'UPPER_INC' output_field = models.BooleanField() @RangeField.register_lookup class UpperInfinite(models.Transform): lookup_name = 'upper_inf' function = 'UPPER_INF' output_field = models.BooleanField()
99f4356e6b0717c2323406b456839a7ac6d736f51136d4af749fbc1de5d7e7b9
from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange from django import forms from django.core import exceptions from django.forms.widgets import HiddenInput, MultiWidget from django.utils.translation import gettext_lazy as _ __all__ = [ 'BaseRangeField', 'IntegerRangeField', 'DecimalRangeField', 'DateTimeRangeField', 'DateRangeField', 'HiddenRangeWidget', 'RangeWidget', ] class RangeWidget(MultiWidget): def __init__(self, base_widget, attrs=None): widgets = (base_widget, base_widget) super().__init__(widgets, attrs) def decompress(self, value): if value: return (value.lower, value.upper) return (None, None) class HiddenRangeWidget(RangeWidget): """A widget that splits input into two <input type="hidden"> inputs.""" def __init__(self, attrs=None): super().__init__(HiddenInput, attrs) class BaseRangeField(forms.MultiValueField): default_error_messages = { 'invalid': _('Enter two valid values.'), 'bound_ordering': _('The start of the range must not exceed the end of the range.'), } hidden_widget = HiddenRangeWidget def __init__(self, **kwargs): if 'widget' not in kwargs: kwargs['widget'] = RangeWidget(self.base_field.widget) if 'fields' not in kwargs: kwargs['fields'] = [self.base_field(required=False), self.base_field(required=False)] kwargs.setdefault('required', False) kwargs.setdefault('require_all_fields', False) self.range_kwargs = {} if default_bounds := kwargs.pop('default_bounds', None): self.range_kwargs = {'bounds': default_bounds} super().__init__(**kwargs) def prepare_value(self, value): lower_base, upper_base = self.fields if isinstance(value, self.range_type): return [ lower_base.prepare_value(value.lower), upper_base.prepare_value(value.upper), ] if value is None: return [ lower_base.prepare_value(None), upper_base.prepare_value(None), ] return value def compress(self, values): if not values: return None lower, upper = values if lower is not None and upper is not None and lower > upper: raise exceptions.ValidationError( self.error_messages['bound_ordering'], code='bound_ordering', ) try: range_value = self.range_type(lower, upper, **self.range_kwargs) except TypeError: raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', ) else: return range_value class IntegerRangeField(BaseRangeField): default_error_messages = {'invalid': _('Enter two whole numbers.')} base_field = forms.IntegerField range_type = NumericRange class DecimalRangeField(BaseRangeField): default_error_messages = {'invalid': _('Enter two numbers.')} base_field = forms.DecimalField range_type = NumericRange class DateTimeRangeField(BaseRangeField): default_error_messages = {'invalid': _('Enter two valid date/times.')} base_field = forms.DateTimeField range_type = DateTimeTZRange class DateRangeField(BaseRangeField): default_error_messages = {'invalid': _('Enter two valid dates.')} base_field = forms.DateField range_type = DateRange
54ba1cc2f09af241b796dd6a98a414e9fea4ff2f3156c40c1eb61cffaf1e1ee3
import warnings from django.contrib.postgres.fields import ArrayField from django.db.models import ( Aggregate, BooleanField, JSONField, TextField, Value, ) from django.utils.deprecation import RemovedInDjango50Warning from .mixins import OrderableAggMixin __all__ = [ 'ArrayAgg', 'BitAnd', 'BitOr', 'BitXor', 'BoolAnd', 'BoolOr', 'JSONBAgg', 'StringAgg', ] # RemovedInDjango50Warning NOT_PROVIDED = object() class DeprecatedConvertValueMixin: def __init__(self, *expressions, default=NOT_PROVIDED, **extra): if default is NOT_PROVIDED: default = None self._default_provided = False else: self._default_provided = True super().__init__(*expressions, default=default, **extra) def convert_value(self, value, expression, connection): if value is None and not self._default_provided: warnings.warn(self.deprecation_msg, category=RemovedInDjango50Warning) return self.deprecation_value return value class ArrayAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): function = 'ARRAY_AGG' template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)' allow_distinct = True # RemovedInDjango50Warning deprecation_value = property(lambda self: []) deprecation_msg = ( 'In Django 5.0, ArrayAgg() will return None instead of an empty list ' 'if there are no rows. Pass default=None to opt into the new behavior ' 'and silence this warning or default=Value([]) to keep the previous ' 'behavior.' ) @property def output_field(self): return ArrayField(self.source_expressions[0].output_field) class BitAnd(Aggregate): function = 'BIT_AND' class BitOr(Aggregate): function = 'BIT_OR' class BitXor(Aggregate): function = 'BIT_XOR' class BoolAnd(Aggregate): function = 'BOOL_AND' output_field = BooleanField() class BoolOr(Aggregate): function = 'BOOL_OR' output_field = BooleanField() class JSONBAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): function = 'JSONB_AGG' template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)' allow_distinct = True output_field = JSONField() # RemovedInDjango50Warning deprecation_value = '[]' deprecation_msg = ( "In Django 5.0, JSONBAgg() will return None instead of an empty list " "if there are no rows. Pass default=None to opt into the new behavior " "and silence this warning or default=Value('[]') to keep the previous " "behavior." ) class StringAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): function = 'STRING_AGG' template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)' allow_distinct = True output_field = TextField() # RemovedInDjango50Warning deprecation_value = '' deprecation_msg = ( "In Django 5.0, StringAgg() will return None instead of an empty " "string if there are no rows. Pass default=None to opt into the new " "behavior and silence this warning or default=Value('') to keep the " "previous behavior." ) def __init__(self, expression, delimiter, **extra): delimiter_expr = Value(str(delimiter)) super().__init__(expression, delimiter_expr, **extra)
e728a3031e1322a48020f9e523f745bf8feb2781d69a05424ecfff68c95c3e3f
from django.db.models import Aggregate, FloatField, IntegerField __all__ = [ 'CovarPop', 'Corr', 'RegrAvgX', 'RegrAvgY', 'RegrCount', 'RegrIntercept', 'RegrR2', 'RegrSlope', 'RegrSXX', 'RegrSXY', 'RegrSYY', 'StatAggregate', ] class StatAggregate(Aggregate): output_field = FloatField() def __init__(self, y, x, output_field=None, filter=None, default=None): if not x or not y: raise ValueError('Both y and x must be provided.') super().__init__(y, x, output_field=output_field, filter=filter, default=default) class Corr(StatAggregate): function = 'CORR' class CovarPop(StatAggregate): def __init__(self, y, x, sample=False, filter=None, default=None): self.function = 'COVAR_SAMP' if sample else 'COVAR_POP' super().__init__(y, x, filter=filter, default=default) class RegrAvgX(StatAggregate): function = 'REGR_AVGX' class RegrAvgY(StatAggregate): function = 'REGR_AVGY' class RegrCount(StatAggregate): function = 'REGR_COUNT' output_field = IntegerField() empty_result_set_value = 0 class RegrIntercept(StatAggregate): function = 'REGR_INTERCEPT' class RegrR2(StatAggregate): function = 'REGR_R2' class RegrSlope(StatAggregate): function = 'REGR_SLOPE' class RegrSXX(StatAggregate): function = 'REGR_SXX' class RegrSXY(StatAggregate): function = 'REGR_SXY' class RegrSYY(StatAggregate): function = 'REGR_SYY'
5c0e470fcde66efca3d6bcdee9f1f5860bc9731c121b276f6596978960e3c1b4
from django.db.models.expressions import OrderByList class OrderableAggMixin: def __init__(self, *expressions, ordering=(), **extra): if isinstance(ordering, (list, tuple)): self.order_by = OrderByList(*ordering) else: self.order_by = OrderByList(ordering) super().__init__(*expressions, **extra) def resolve_expression(self, *args, **kwargs): self.order_by = self.order_by.resolve_expression(*args, **kwargs) return super().resolve_expression(*args, **kwargs) def get_source_expressions(self): return super().get_source_expressions() + [self.order_by] def set_source_expressions(self, exprs): *exprs, self.order_by = exprs return super().set_source_expressions(exprs) def as_sql(self, compiler, connection): order_by_sql, order_by_params = compiler.compile(self.order_by) sql, sql_params = super().as_sql(compiler, connection, ordering=order_by_sql) return sql, (*sql_params, *order_by_params)
a0df0b272bb3d8d231d8f3068f3f08e307dffd638a9424990638eebc9119448c
""" Management utility to create superusers. """ import getpass import os import sys from django.contrib.auth import get_user_model from django.contrib.auth.management import get_default_username from django.contrib.auth.password_validation import validate_password from django.core import exceptions from django.core.management.base import BaseCommand, CommandError from django.db import DEFAULT_DB_ALIAS from django.utils.text import capfirst class NotRunningInTTYException(Exception): pass PASSWORD_FIELD = 'password' class Command(BaseCommand): help = 'Used to create a superuser.' requires_migrations_checks = True stealth_options = ('stdin',) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.UserModel = get_user_model() self.username_field = self.UserModel._meta.get_field(self.UserModel.USERNAME_FIELD) def add_arguments(self, parser): parser.add_argument( '--%s' % self.UserModel.USERNAME_FIELD, help='Specifies the login for the superuser.', ) parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', help=( 'Tells Django to NOT prompt the user for input of any kind. ' 'You must use --%s with --noinput, along with an option for ' 'any other required field. Superusers created with --noinput will ' 'not be able to log in until they\'re given a valid password.' % self.UserModel.USERNAME_FIELD ), ) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Specifies the database to use. Default is "default".', ) for field_name in self.UserModel.REQUIRED_FIELDS: field = self.UserModel._meta.get_field(field_name) if field.many_to_many: if field.remote_field.through and not field.remote_field.through._meta.auto_created: raise CommandError( "Required field '%s' specifies a many-to-many " "relation through model, which is not supported." % field_name ) else: parser.add_argument( '--%s' % field_name, action='append', help=( 'Specifies the %s for the superuser. Can be used ' 'multiple times.' % field_name, ), ) else: parser.add_argument( '--%s' % field_name, help='Specifies the %s for the superuser.' % field_name, ) def execute(self, *args, **options): self.stdin = options.get('stdin', sys.stdin) # Used for testing return super().execute(*args, **options) def handle(self, *args, **options): username = options[self.UserModel.USERNAME_FIELD] database = options['database'] user_data = {} verbose_field_name = self.username_field.verbose_name try: self.UserModel._meta.get_field(PASSWORD_FIELD) except exceptions.FieldDoesNotExist: pass else: # If not provided, create the user with an unusable password. user_data[PASSWORD_FIELD] = None try: if options['interactive']: # Same as user_data but without many to many fields and with # foreign keys as fake model instances instead of raw IDs. fake_user_data = {} if hasattr(self.stdin, 'isatty') and not self.stdin.isatty(): raise NotRunningInTTYException default_username = get_default_username(database=database) if username: error_msg = self._validate_username(username, verbose_field_name, database) if error_msg: self.stderr.write(error_msg) username = None elif username == '': raise CommandError('%s cannot be blank.' % capfirst(verbose_field_name)) # Prompt for username. while username is None: message = self._get_input_message(self.username_field, default_username) username = self.get_input_data(self.username_field, message, default_username) if username: error_msg = self._validate_username(username, verbose_field_name, database) if error_msg: self.stderr.write(error_msg) username = None continue user_data[self.UserModel.USERNAME_FIELD] = username fake_user_data[self.UserModel.USERNAME_FIELD] = ( self.username_field.remote_field.model(username) if self.username_field.remote_field else username ) # Prompt for required fields. for field_name in self.UserModel.REQUIRED_FIELDS: field = self.UserModel._meta.get_field(field_name) user_data[field_name] = options[field_name] if user_data[field_name] is not None: user_data[field_name] = field.clean(user_data[field_name], None) while user_data[field_name] is None: message = self._get_input_message(field) input_value = self.get_input_data(field, message) user_data[field_name] = input_value if field.many_to_many and input_value: if not input_value.strip(): user_data[field_name] = None self.stderr.write('Error: This field cannot be blank.') continue user_data[field_name] = [pk.strip() for pk in input_value.split(',')] if not field.many_to_many: fake_user_data[field_name] = user_data[field_name] # Wrap any foreign keys in fake model instances. if field.many_to_one: fake_user_data[field_name] = field.remote_field.model(user_data[field_name]) # Prompt for a password if the model has one. while PASSWORD_FIELD in user_data and user_data[PASSWORD_FIELD] is None: password = getpass.getpass() password2 = getpass.getpass('Password (again): ') if password != password2: self.stderr.write("Error: Your passwords didn't match.") # Don't validate passwords that don't match. continue if password.strip() == '': self.stderr.write("Error: Blank passwords aren't allowed.") # Don't validate blank passwords. continue try: validate_password(password2, self.UserModel(**fake_user_data)) except exceptions.ValidationError as err: self.stderr.write('\n'.join(err.messages)) response = input('Bypass password validation and create user anyway? [y/N]: ') if response.lower() != 'y': continue user_data[PASSWORD_FIELD] = password else: # Non-interactive mode. # Use password from environment variable, if provided. if PASSWORD_FIELD in user_data and 'DJANGO_SUPERUSER_PASSWORD' in os.environ: user_data[PASSWORD_FIELD] = os.environ['DJANGO_SUPERUSER_PASSWORD'] # Use username from environment variable, if not provided in # options. if username is None: username = os.environ.get('DJANGO_SUPERUSER_' + self.UserModel.USERNAME_FIELD.upper()) if username is None: raise CommandError('You must use --%s with --noinput.' % self.UserModel.USERNAME_FIELD) else: error_msg = self._validate_username(username, verbose_field_name, database) if error_msg: raise CommandError(error_msg) user_data[self.UserModel.USERNAME_FIELD] = username for field_name in self.UserModel.REQUIRED_FIELDS: env_var = 'DJANGO_SUPERUSER_' + field_name.upper() value = options[field_name] or os.environ.get(env_var) if not value: raise CommandError('You must use --%s with --noinput.' % field_name) field = self.UserModel._meta.get_field(field_name) user_data[field_name] = field.clean(value, None) if field.many_to_many and isinstance(user_data[field_name], str): user_data[field_name] = [ pk.strip() for pk in user_data[field_name].split(',') ] self.UserModel._default_manager.db_manager(database).create_superuser(**user_data) if options['verbosity'] >= 1: self.stdout.write("Superuser created successfully.") except KeyboardInterrupt: self.stderr.write('\nOperation cancelled.') sys.exit(1) except exceptions.ValidationError as e: raise CommandError('; '.join(e.messages)) except NotRunningInTTYException: self.stdout.write( 'Superuser creation skipped due to not running in a TTY. ' 'You can run `manage.py createsuperuser` in your project ' 'to create one manually.' ) def get_input_data(self, field, message, default=None): """ Override this method if you want to customize data inputs or validation exceptions. """ raw_value = input(message) if default and raw_value == '': raw_value = default try: val = field.clean(raw_value, None) except exceptions.ValidationError as e: self.stderr.write("Error: %s" % '; '.join(e.messages)) val = None return val def _get_input_message(self, field, default=None): return '%s%s%s: ' % ( capfirst(field.verbose_name), " (leave blank to use '%s')" % default if default else '', ' (%s.%s)' % ( field.remote_field.model._meta.object_name, field.m2m_target_field_name() if field.many_to_many else field.remote_field.field_name, ) if field.remote_field else '', ) def _validate_username(self, username, verbose_field_name, database): """Validate username. If invalid, return a string error message.""" if self.username_field.unique: try: self.UserModel._default_manager.db_manager(database).get_by_natural_key(username) except self.UserModel.DoesNotExist: pass else: return 'Error: That %s is already taken.' % verbose_field_name if not username: return '%s cannot be blank.' % capfirst(verbose_field_name) try: self.username_field.clean(username, None) except exceptions.ValidationError as e: return '; '.join(e.messages)
60b8c1206b26cb7f34bc4fba00949a5556ed739f52a693ac43e7ef66659d27f4
from django.apps import apps from django.core.exceptions import FieldDoesNotExist, PermissionDenied from django.http import Http404, JsonResponse from django.views.generic.list import BaseListView class AutocompleteJsonView(BaseListView): """Handle AutocompleteWidget's AJAX requests for data.""" paginate_by = 20 admin_site = None def get(self, request, *args, **kwargs): """ Return a JsonResponse with search results as defined in serialize_result(), by default: { results: [{id: "123" text: "foo"}], pagination: {more: true} } """ self.term, self.model_admin, self.source_field, to_field_name = self.process_request(request) if not self.has_perm(request): raise PermissionDenied self.object_list = self.get_queryset() context = self.get_context_data() return JsonResponse({ 'results': [ self.serialize_result(obj, to_field_name) for obj in context['object_list'] ], 'pagination': {'more': context['page_obj'].has_next()}, }) def serialize_result(self, obj, to_field_name): """ Convert the provided model object to a dictionary that is added to the results list. """ return {'id': str(getattr(obj, to_field_name)), 'text': str(obj)} def get_paginator(self, *args, **kwargs): """Use the ModelAdmin's paginator.""" return self.model_admin.get_paginator(self.request, *args, **kwargs) def get_queryset(self): """Return queryset based on ModelAdmin.get_search_results().""" qs = self.model_admin.get_queryset(self.request) qs = qs.complex_filter(self.source_field.get_limit_choices_to()) qs, search_use_distinct = self.model_admin.get_search_results(self.request, qs, self.term) if search_use_distinct: qs = qs.distinct() return qs def process_request(self, request): """ Validate request integrity, extract and return request parameters. Since the subsequent view permission check requires the target model admin, which is determined here, raise PermissionDenied if the requested app, model or field are malformed. Raise Http404 if the target model admin is not configured properly with search_fields. """ term = request.GET.get('term', '') try: app_label = request.GET['app_label'] model_name = request.GET['model_name'] field_name = request.GET['field_name'] except KeyError as e: raise PermissionDenied from e # Retrieve objects from parameters. try: source_model = apps.get_model(app_label, model_name) except LookupError as e: raise PermissionDenied from e try: source_field = source_model._meta.get_field(field_name) except FieldDoesNotExist as e: raise PermissionDenied from e try: remote_model = source_field.remote_field.model except AttributeError as e: raise PermissionDenied from e try: model_admin = self.admin_site._registry[remote_model] except KeyError as e: raise PermissionDenied from e # Validate suitability of objects. if not model_admin.get_search_fields(request): raise Http404( '%s must have search_fields for the autocomplete_view.' % type(model_admin).__qualname__ ) to_field_name = getattr(source_field.remote_field, 'field_name', remote_model._meta.pk.attname) to_field_name = remote_model._meta.get_field(to_field_name).attname if not model_admin.to_field_allowed(request, to_field_name): raise PermissionDenied return term, model_admin, source_field, to_field_name def has_perm(self, request, obj=None): """Check if user has permission to access the related model.""" return self.model_admin.has_view_permission(request, obj=obj)
0fae4a581230e9edb0f28de3b88418e45859c62782beb37cd0a0d93f4e5be13a
import datetime from django.conf import settings from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.utils import ( display_for_field, display_for_value, get_fields_from_path, label_for_field, lookup_field, ) from django.contrib.admin.views.main import ( ALL_VAR, IS_POPUP_VAR, ORDER_VAR, PAGE_VAR, SEARCH_VAR, ) from django.core.exceptions import ObjectDoesNotExist from django.db import models from django.template import Library from django.template.loader import get_template from django.templatetags.static import static from django.urls import NoReverseMatch from django.utils import formats, timezone from django.utils.html import format_html from django.utils.safestring import mark_safe from django.utils.text import capfirst from django.utils.translation import gettext as _ from .base import InclusionAdminNode register = Library() @register.simple_tag def paginator_number(cl, i): """ Generate an individual page index link in a paginated list. """ if i == cl.paginator.ELLIPSIS: return format_html('{} ', cl.paginator.ELLIPSIS) elif i == cl.page_num: return format_html('<span class="this-page">{}</span> ', i) else: return format_html( '<a href="{}"{}>{}</a> ', cl.get_query_string({PAGE_VAR: i}), mark_safe(' class="end"' if i == cl.paginator.num_pages else ''), i, ) def pagination(cl): """ Generate the series of links to the pages in a paginated list. """ pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page page_range = cl.paginator.get_elided_page_range(cl.page_num) if pagination_required else [] need_show_all_link = cl.can_show_all and not cl.show_all and cl.multi_page return { 'cl': cl, 'pagination_required': pagination_required, 'show_all_url': need_show_all_link and cl.get_query_string({ALL_VAR: ''}), 'page_range': page_range, 'ALL_VAR': ALL_VAR, '1': 1, } @register.tag(name='pagination') def pagination_tag(parser, token): return InclusionAdminNode( parser, token, func=pagination, template_name='pagination.html', takes_context=False, ) def result_headers(cl): """ Generate the list column headers. """ ordering_field_columns = cl.get_ordering_field_columns() for i, field_name in enumerate(cl.list_display): text, attr = label_for_field( field_name, cl.model, model_admin=cl.model_admin, return_attr=True ) is_field_sortable = cl.sortable_by is None or field_name in cl.sortable_by if attr: field_name = _coerce_field_name(field_name, i) # Potentially not sortable # if the field is the action checkbox: no sorting and special class if field_name == 'action_checkbox': yield { "text": text, "class_attrib": mark_safe(' class="action-checkbox-column"'), "sortable": False, } continue admin_order_field = getattr(attr, "admin_order_field", None) # Set ordering for attr that is a property, if defined. if isinstance(attr, property) and hasattr(attr, 'fget'): admin_order_field = getattr(attr.fget, 'admin_order_field', None) if not admin_order_field: is_field_sortable = False if not is_field_sortable: # Not sortable yield { 'text': text, 'class_attrib': format_html(' class="column-{}"', field_name), 'sortable': False, } continue # OK, it is sortable if we got this far th_classes = ['sortable', 'column-{}'.format(field_name)] order_type = '' new_order_type = 'asc' sort_priority = 0 # Is it currently being sorted on? is_sorted = i in ordering_field_columns if is_sorted: order_type = ordering_field_columns.get(i).lower() sort_priority = list(ordering_field_columns).index(i) + 1 th_classes.append('sorted %sending' % order_type) new_order_type = {'asc': 'desc', 'desc': 'asc'}[order_type] # build new ordering param o_list_primary = [] # URL for making this field the primary sort o_list_remove = [] # URL for removing this field from sort o_list_toggle = [] # URL for toggling order type for this field def make_qs_param(t, n): return ('-' if t == 'desc' else '') + str(n) for j, ot in ordering_field_columns.items(): if j == i: # Same column param = make_qs_param(new_order_type, j) # We want clicking on this header to bring the ordering to the # front o_list_primary.insert(0, param) o_list_toggle.append(param) # o_list_remove - omit else: param = make_qs_param(ot, j) o_list_primary.append(param) o_list_toggle.append(param) o_list_remove.append(param) if i not in ordering_field_columns: o_list_primary.insert(0, make_qs_param(new_order_type, i)) yield { "text": text, "sortable": True, "sorted": is_sorted, "ascending": order_type == "asc", "sort_priority": sort_priority, "url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}), "url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}), "url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}), "class_attrib": format_html(' class="{}"', ' '.join(th_classes)) if th_classes else '', } def _boolean_icon(field_val): icon_url = static('admin/img/icon-%s.svg' % {True: 'yes', False: 'no', None: 'unknown'}[field_val]) return format_html('<img src="{}" alt="{}">', icon_url, field_val) def _coerce_field_name(field_name, field_index): """ Coerce a field_name (which may be a callable) to a string. """ if callable(field_name): if field_name.__name__ == '<lambda>': return 'lambda' + str(field_index) else: return field_name.__name__ return field_name def items_for_result(cl, result, form): """ Generate the actual list of data. """ def link_in_col(is_first, field_name, cl): if cl.list_display_links is None: return False if is_first and not cl.list_display_links: return True return field_name in cl.list_display_links first = True pk = cl.lookup_opts.pk.attname for field_index, field_name in enumerate(cl.list_display): empty_value_display = cl.model_admin.get_empty_value_display() row_classes = ['field-%s' % _coerce_field_name(field_name, field_index)] try: f, attr, value = lookup_field(field_name, result, cl.model_admin) except ObjectDoesNotExist: result_repr = empty_value_display else: empty_value_display = getattr(attr, 'empty_value_display', empty_value_display) if f is None or f.auto_created: if field_name == 'action_checkbox': row_classes = ['action-checkbox'] boolean = getattr(attr, 'boolean', False) result_repr = display_for_value(value, empty_value_display, boolean) if isinstance(value, (datetime.date, datetime.time)): row_classes.append('nowrap') else: if isinstance(f.remote_field, models.ManyToOneRel): field_val = getattr(result, f.name) if field_val is None: result_repr = empty_value_display else: result_repr = field_val else: result_repr = display_for_field(value, f, empty_value_display) if isinstance(f, (models.DateField, models.TimeField, models.ForeignKey)): row_classes.append('nowrap') row_class = mark_safe(' class="%s"' % ' '.join(row_classes)) # If list_display_links not defined, add the link tag to the first field if link_in_col(first, field_name, cl): table_tag = 'th' if first else 'td' first = False # Display link to the result's change_view if the url exists, else # display just the result's representation. try: url = cl.url_for_result(result) except NoReverseMatch: link_or_text = result_repr else: url = add_preserved_filters({'preserved_filters': cl.preserved_filters, 'opts': cl.opts}, url) # Convert the pk to something that can be used in JavaScript. # Problem cases are non-ASCII strings. if cl.to_field: attr = str(cl.to_field) else: attr = pk value = result.serializable_value(attr) link_or_text = format_html( '<a href="{}"{}>{}</a>', url, format_html( ' data-popup-opener="{}"', value ) if cl.is_popup else '', result_repr) yield format_html('<{}{}>{}</{}>', table_tag, row_class, link_or_text, table_tag) else: # By default the fields come from ModelAdmin.list_editable, but if we pull # the fields out of the form instead of list_editable custom admins # can provide fields on a per request basis if (form and field_name in form.fields and not ( field_name == cl.model._meta.pk.name and form[cl.model._meta.pk.name].is_hidden)): bf = form[field_name] result_repr = mark_safe(str(bf.errors) + str(bf)) yield format_html('<td{}>{}</td>', row_class, result_repr) if form and not form[cl.model._meta.pk.name].is_hidden: yield format_html('<td>{}</td>', form[cl.model._meta.pk.name]) class ResultList(list): """ Wrapper class used to return items in a list_editable changelist, annotated with the form object for error reporting purposes. Needed to maintain backwards compatibility with existing admin templates. """ def __init__(self, form, *items): self.form = form super().__init__(*items) def results(cl): if cl.formset: for res, form in zip(cl.result_list, cl.formset.forms): yield ResultList(form, items_for_result(cl, res, form)) else: for res in cl.result_list: yield ResultList(None, items_for_result(cl, res, None)) def result_hidden_fields(cl): if cl.formset: for res, form in zip(cl.result_list, cl.formset.forms): if form[cl.model._meta.pk.name].is_hidden: yield mark_safe(form[cl.model._meta.pk.name]) def result_list(cl): """ Display the headers and data list together. """ headers = list(result_headers(cl)) num_sorted_fields = 0 for h in headers: if h['sortable'] and h['sorted']: num_sorted_fields += 1 return { 'cl': cl, 'result_hidden_fields': list(result_hidden_fields(cl)), 'result_headers': headers, 'num_sorted_fields': num_sorted_fields, 'results': list(results(cl)), } @register.tag(name='result_list') def result_list_tag(parser, token): return InclusionAdminNode( parser, token, func=result_list, template_name='change_list_results.html', takes_context=False, ) def date_hierarchy(cl): """ Display the date hierarchy for date drill-down functionality. """ if cl.date_hierarchy: field_name = cl.date_hierarchy field = get_fields_from_path(cl.model, field_name)[-1] if isinstance(field, models.DateTimeField): dates_or_datetimes = 'datetimes' qs_kwargs = {'is_dst': True} if settings.USE_DEPRECATED_PYTZ else {} else: dates_or_datetimes = 'dates' qs_kwargs = {} year_field = '%s__year' % field_name month_field = '%s__month' % field_name day_field = '%s__day' % field_name field_generic = '%s__' % field_name year_lookup = cl.params.get(year_field) month_lookup = cl.params.get(month_field) day_lookup = cl.params.get(day_field) def link(filters): return cl.get_query_string(filters, [field_generic]) if not (year_lookup or month_lookup or day_lookup): # select appropriate start level date_range = cl.queryset.aggregate(first=models.Min(field_name), last=models.Max(field_name)) if date_range['first'] and date_range['last']: if dates_or_datetimes == 'datetimes': date_range = { k: timezone.localtime(v) if timezone.is_aware(v) else v for k, v in date_range.items() } if date_range['first'].year == date_range['last'].year: year_lookup = date_range['first'].year if date_range['first'].month == date_range['last'].month: month_lookup = date_range['first'].month if year_lookup and month_lookup and day_lookup: day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup)) return { 'show': True, 'back': { 'link': link({year_field: year_lookup, month_field: month_lookup}), 'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT')) }, 'choices': [{'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))}] } elif year_lookup and month_lookup: days = getattr(cl.queryset, dates_or_datetimes)(field_name, 'day', **qs_kwargs) return { 'show': True, 'back': { 'link': link({year_field: year_lookup}), 'title': str(year_lookup) }, 'choices': [{ 'link': link({year_field: year_lookup, month_field: month_lookup, day_field: day.day}), 'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT')) } for day in days] } elif year_lookup: months = getattr(cl.queryset, dates_or_datetimes)(field_name, 'month', **qs_kwargs) return { 'show': True, 'back': { 'link': link({}), 'title': _('All dates') }, 'choices': [{ 'link': link({year_field: year_lookup, month_field: month.month}), 'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT')) } for month in months] } else: years = getattr(cl.queryset, dates_or_datetimes)(field_name, 'year', **qs_kwargs) return { 'show': True, 'back': None, 'choices': [{ 'link': link({year_field: str(year.year)}), 'title': str(year.year), } for year in years] } @register.tag(name='date_hierarchy') def date_hierarchy_tag(parser, token): return InclusionAdminNode( parser, token, func=date_hierarchy, template_name='date_hierarchy.html', takes_context=False, ) def search_form(cl): """ Display a search form for searching the list. """ return { 'cl': cl, 'show_result_count': cl.result_count != cl.full_result_count, 'search_var': SEARCH_VAR, 'is_popup_var': IS_POPUP_VAR, } @register.tag(name='search_form') def search_form_tag(parser, token): return InclusionAdminNode(parser, token, func=search_form, template_name='search_form.html', takes_context=False) @register.simple_tag def admin_list_filter(cl, spec): tpl = get_template(spec.template) return tpl.render({ 'title': spec.title, 'choices': list(spec.choices(cl)), 'spec': spec, }) def admin_actions(context): """ Track the number of times the action field has been rendered on the page, so we know which value to use. """ context['action_index'] = context.get('action_index', -1) + 1 return context @register.tag(name='admin_actions') def admin_actions_tag(parser, token): return InclusionAdminNode(parser, token, func=admin_actions, template_name='actions.html') @register.tag(name='change_list_object_tools') def change_list_object_tools_tag(parser, token): """Display the row of change list object tools.""" return InclusionAdminNode( parser, token, func=lambda context: context, template_name='change_list_object_tools.html', )
8d9c1ed33158a1bfa41aa5cc3b2b8f1d79152b89a30c7ced573dd2acc72bc211
import json from django import template from django.template.context import Context from .base import InclusionAdminNode register = template.Library() def prepopulated_fields_js(context): """ Create a list of prepopulated_fields that should render JavaScript for the prepopulated fields for both the admin form and inlines. """ prepopulated_fields = [] if 'adminform' in context: prepopulated_fields.extend(context['adminform'].prepopulated_fields) if 'inline_admin_formsets' in context: for inline_admin_formset in context['inline_admin_formsets']: for inline_admin_form in inline_admin_formset: if inline_admin_form.original is None: prepopulated_fields.extend(inline_admin_form.prepopulated_fields) prepopulated_fields_json = [] for field in prepopulated_fields: prepopulated_fields_json.append({ "id": "#%s" % field["field"].auto_id, "name": field["field"].name, "dependency_ids": ["#%s" % dependency.auto_id for dependency in field["dependencies"]], "dependency_list": [dependency.name for dependency in field["dependencies"]], "maxLength": field["field"].field.max_length or 50, "allowUnicode": getattr(field["field"].field, "allow_unicode", False) }) context.update({ 'prepopulated_fields': prepopulated_fields, 'prepopulated_fields_json': json.dumps(prepopulated_fields_json), }) return context @register.tag(name='prepopulated_fields_js') def prepopulated_fields_js_tag(parser, token): return InclusionAdminNode(parser, token, func=prepopulated_fields_js, template_name="prepopulated_fields_js.html") def submit_row(context): """ Display the row of buttons for delete and save. """ add = context['add'] change = context['change'] is_popup = context['is_popup'] save_as = context['save_as'] show_save = context.get('show_save', True) show_save_and_add_another = context.get('show_save_and_add_another', True) show_save_and_continue = context.get('show_save_and_continue', True) has_add_permission = context['has_add_permission'] has_change_permission = context['has_change_permission'] has_view_permission = context['has_view_permission'] has_editable_inline_admin_formsets = context['has_editable_inline_admin_formsets'] can_save = (has_change_permission and change) or (has_add_permission and add) or has_editable_inline_admin_formsets can_save_and_add_another = ( has_add_permission and not is_popup and (not save_as or add) and can_save and show_save_and_add_another ) can_save_and_continue = not is_popup and can_save and has_view_permission and show_save_and_continue can_change = has_change_permission or has_editable_inline_admin_formsets ctx = Context(context) ctx.update({ 'can_change': can_change, 'show_delete_link': ( not is_popup and context['has_delete_permission'] and change and context.get('show_delete', True) ), 'show_save_as_new': not is_popup and has_change_permission and change and save_as, 'show_save_and_add_another': can_save_and_add_another, 'show_save_and_continue': can_save_and_continue, 'show_save': show_save and can_save, 'show_close': not(show_save and can_save) }) return ctx @register.tag(name='submit_row') def submit_row_tag(parser, token): return InclusionAdminNode(parser, token, func=submit_row, template_name='submit_line.html') @register.tag(name='change_form_object_tools') def change_form_object_tools_tag(parser, token): """Display the row of change form object tools.""" return InclusionAdminNode( parser, token, func=lambda context: context, template_name='change_form_object_tools.html', ) @register.filter def cell_count(inline_admin_form): """Return the number of cells used in a tabular inline.""" count = 1 # Hidden cell with hidden 'id' field for fieldset in inline_admin_form: # Count all visible fields. for line in fieldset: for field in line: if not field.field.is_hidden: count += 1 if inline_admin_form.formset.can_delete: # Delete checkbox count += 1 return count
08d9db9ee86fd67f455c112087455c73211b96df086729bdd9ade391579bedd1
import itertools from django.apps import apps from django.contrib.contenttypes.models import ContentType from django.core.management import BaseCommand from django.db import DEFAULT_DB_ALIAS, router from django.db.models.deletion import Collector class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', help='Tells Django to NOT prompt the user for input of any kind.', ) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates the database to use. Defaults to the "default" database.', ) parser.add_argument( '--include-stale-apps', action='store_true', default=False, help=( "Deletes stale content types including ones from previously " "installed apps that have been removed from INSTALLED_APPS." ), ) def handle(self, **options): db = options['database'] include_stale_apps = options['include_stale_apps'] interactive = options['interactive'] verbosity = options['verbosity'] if not router.allow_migrate_model(db, ContentType): return ContentType.objects.clear_cache() apps_content_types = itertools.groupby( ContentType.objects.using(db).order_by('app_label', 'model'), lambda obj: obj.app_label, ) for app_label, content_types in apps_content_types: if not include_stale_apps and app_label not in apps.app_configs: continue to_remove = [ct for ct in content_types if ct.model_class() is None] # Confirm that the content type is stale before deletion. using = router.db_for_write(ContentType) if to_remove: if interactive: ct_info = [] for ct in to_remove: ct_info.append(' - Content type for %s.%s' % (ct.app_label, ct.model)) collector = NoFastDeleteCollector(using=using, origin=ct) collector.collect([ct]) for obj_type, objs in collector.data.items(): if objs != {ct}: ct_info.append(' - %s %s object(s)' % ( len(objs), obj_type._meta.label, )) content_type_display = '\n'.join(ct_info) self.stdout.write("""Some content types in your database are stale and can be deleted. Any objects that depend on these content types will also be deleted. The content types and dependent objects that would be deleted are: %s This list doesn't include any cascade deletions to data outside of Django's models (uncommon). Are you sure you want to delete these content types? If you're unsure, answer 'no'.""" % content_type_display) ok_to_delete = input("Type 'yes' to continue, or 'no' to cancel: ") else: ok_to_delete = 'yes' if ok_to_delete == 'yes': for ct in to_remove: if verbosity >= 2: self.stdout.write("Deleting stale content type '%s | %s'" % (ct.app_label, ct.model)) ct.delete() else: if verbosity >= 2: self.stdout.write("Stale content types remain.") class NoFastDeleteCollector(Collector): def can_fast_delete(self, *args, **kwargs): """ Always load related objects to display them when showing confirmation. """ return False
60683758e77bf525e05f45895b78437590168631e987044d6a6e72ea046af1ed
import logging import string from datetime import datetime, timedelta from django.conf import settings from django.core import signing from django.utils import timezone from django.utils.crypto import get_random_string from django.utils.module_loading import import_string # session_key should not be case sensitive because some backends can store it # on case insensitive file systems. VALID_KEY_CHARS = string.ascii_lowercase + string.digits class CreateError(Exception): """ Used internally as a consistent exception type to catch from save (see the docstring for SessionBase.save() for details). """ pass class UpdateError(Exception): """ Occurs if Django tries to update a session that was deleted. """ pass class SessionBase: """ Base class for all Session classes. """ TEST_COOKIE_NAME = 'testcookie' TEST_COOKIE_VALUE = 'worked' __not_given = object() def __init__(self, session_key=None): self._session_key = session_key self.accessed = False self.modified = False self.serializer = import_string(settings.SESSION_SERIALIZER) def __contains__(self, key): return key in self._session def __getitem__(self, key): return self._session[key] def __setitem__(self, key, value): self._session[key] = value self.modified = True def __delitem__(self, key): del self._session[key] self.modified = True @property def key_salt(self): return 'django.contrib.sessions.' + self.__class__.__qualname__ def get(self, key, default=None): return self._session.get(key, default) def pop(self, key, default=__not_given): self.modified = self.modified or key in self._session args = () if default is self.__not_given else (default,) return self._session.pop(key, *args) def setdefault(self, key, value): if key in self._session: return self._session[key] else: self.modified = True self._session[key] = value return value def set_test_cookie(self): self[self.TEST_COOKIE_NAME] = self.TEST_COOKIE_VALUE def test_cookie_worked(self): return self.get(self.TEST_COOKIE_NAME) == self.TEST_COOKIE_VALUE def delete_test_cookie(self): del self[self.TEST_COOKIE_NAME] def encode(self, session_dict): "Return the given session dictionary serialized and encoded as a string." return signing.dumps( session_dict, salt=self.key_salt, serializer=self.serializer, compress=True, ) def decode(self, session_data): try: return signing.loads(session_data, salt=self.key_salt, serializer=self.serializer) except signing.BadSignature: logger = logging.getLogger('django.security.SuspiciousSession') logger.warning('Session data corrupted') except Exception: # ValueError, unpickling exceptions. If any of these happen, just # return an empty dictionary (an empty session). pass return {} def update(self, dict_): self._session.update(dict_) self.modified = True def has_key(self, key): return key in self._session def keys(self): return self._session.keys() def values(self): return self._session.values() def items(self): return self._session.items() def clear(self): # To avoid unnecessary persistent storage accesses, we set up the # internals directly (loading data wastes time, since we are going to # set it to an empty dict anyway). self._session_cache = {} self.accessed = True self.modified = True def is_empty(self): "Return True when there is no session_key and the session is empty." try: return not self._session_key and not self._session_cache except AttributeError: return True def _get_new_session_key(self): "Return session key that isn't being used." while True: session_key = get_random_string(32, VALID_KEY_CHARS) if not self.exists(session_key): return session_key def _get_or_create_session_key(self): if self._session_key is None: self._session_key = self._get_new_session_key() return self._session_key def _validate_session_key(self, key): """ Key must be truthy and at least 8 characters long. 8 characters is an arbitrary lower bound for some minimal key security. """ return key and len(key) >= 8 def _get_session_key(self): return self.__session_key def _set_session_key(self, value): """ Validate session key on assignment. Invalid values will set to None. """ if self._validate_session_key(value): self.__session_key = value else: self.__session_key = None session_key = property(_get_session_key) _session_key = property(_get_session_key, _set_session_key) def _get_session(self, no_load=False): """ Lazily load session from storage (unless "no_load" is True, when only an empty dict is stored) and store it in the current instance. """ self.accessed = True try: return self._session_cache except AttributeError: if self.session_key is None or no_load: self._session_cache = {} else: self._session_cache = self.load() return self._session_cache _session = property(_get_session) def get_session_cookie_age(self): return settings.SESSION_COOKIE_AGE def get_expiry_age(self, **kwargs): """Get the number of seconds until the session expires. Optionally, this function accepts `modification` and `expiry` keyword arguments specifying the modification and expiry of the session. """ try: modification = kwargs['modification'] except KeyError: modification = timezone.now() # Make the difference between "expiry=None passed in kwargs" and # "expiry not passed in kwargs", in order to guarantee not to trigger # self.load() when expiry is provided. try: expiry = kwargs['expiry'] except KeyError: expiry = self.get('_session_expiry') if not expiry: # Checks both None and 0 cases return self.get_session_cookie_age() if not isinstance(expiry, (datetime, str)): return expiry if isinstance(expiry, str): expiry = datetime.fromisoformat(expiry) delta = expiry - modification return delta.days * 86400 + delta.seconds def get_expiry_date(self, **kwargs): """Get session the expiry date (as a datetime object). Optionally, this function accepts `modification` and `expiry` keyword arguments specifying the modification and expiry of the session. """ try: modification = kwargs['modification'] except KeyError: modification = timezone.now() # Same comment as in get_expiry_age try: expiry = kwargs['expiry'] except KeyError: expiry = self.get('_session_expiry') if isinstance(expiry, datetime): return expiry elif isinstance(expiry, str): return datetime.fromisoformat(expiry) expiry = expiry or self.get_session_cookie_age() return modification + timedelta(seconds=expiry) def set_expiry(self, value): """ Set a custom expiration for the session. ``value`` can be an integer, a Python ``datetime`` or ``timedelta`` object or ``None``. If ``value`` is an integer, the session will expire after that many seconds of inactivity. If set to ``0`` then the session will expire on browser close. If ``value`` is a ``datetime`` or ``timedelta`` object, the session will expire at that specific future time. If ``value`` is ``None``, the session uses the global session expiry policy. """ if value is None: # Remove any custom expiration for this session. try: del self['_session_expiry'] except KeyError: pass return if isinstance(value, timedelta): value = timezone.now() + value if isinstance(value, datetime): value = value.isoformat() self['_session_expiry'] = value def get_expire_at_browser_close(self): """ Return ``True`` if the session is set to expire when the browser closes, and ``False`` if there's an expiry date. Use ``get_expiry_date()`` or ``get_expiry_age()`` to find the actual expiry date/age, if there is one. """ if (expiry := self.get('_session_expiry')) is None: return settings.SESSION_EXPIRE_AT_BROWSER_CLOSE return expiry == 0 def flush(self): """ Remove the current session data from the database and regenerate the key. """ self.clear() self.delete() self._session_key = None def cycle_key(self): """ Create a new session key, while retaining the current session data. """ data = self._session key = self.session_key self.create() self._session_cache = data if key: self.delete(key) # Methods that child classes must implement. def exists(self, session_key): """ Return True if the given session_key already exists. """ raise NotImplementedError('subclasses of SessionBase must provide an exists() method') def create(self): """ Create a new session instance. Guaranteed to create a new object with a unique key and will have saved the result once (with empty data) before the method returns. """ raise NotImplementedError('subclasses of SessionBase must provide a create() method') def save(self, must_create=False): """ Save the session data. If 'must_create' is True, create a new session object (or raise CreateError). Otherwise, only update an existing object and don't create one (raise UpdateError if needed). """ raise NotImplementedError('subclasses of SessionBase must provide a save() method') def delete(self, session_key=None): """ Delete the session data under this key. If the key is None, use the current session key value. """ raise NotImplementedError('subclasses of SessionBase must provide a delete() method') def load(self): """ Load the session data and return a dictionary. """ raise NotImplementedError('subclasses of SessionBase must provide a load() method') @classmethod def clear_expired(cls): """ Remove expired sessions from the session store. If this operation isn't possible on a given backend, it should raise NotImplementedError. If it isn't necessary, because the backend has a built-in expiration mechanism, it should be a no-op. """ raise NotImplementedError('This backend does not support clear_expired().')
69d6e3a3d6933d4d24a2edfcfd26f8258d8ded0677ed50628bf4b4419b96d071
# LayerMapping -- A Django Model/OGR Layer Mapping Utility """ The LayerMapping class provides a way to map the contents of OGR vector files (e.g. SHP files) to Geographic-enabled Django models. For more information, please consult the GeoDjango documentation: https://docs.djangoproject.com/en/dev/ref/contrib/gis/layermapping/ """ import sys from decimal import Decimal, InvalidOperation as DecimalInvalidOperation from pathlib import Path from django.contrib.gis.db.models import GeometryField from django.contrib.gis.gdal import ( CoordTransform, DataSource, GDALException, OGRGeometry, OGRGeomType, SpatialReference, ) from django.contrib.gis.gdal.field import ( OFTDate, OFTDateTime, OFTInteger, OFTInteger64, OFTReal, OFTString, OFTTime, ) from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist from django.db import connections, models, router, transaction from django.utils.encoding import force_str # LayerMapping exceptions. class LayerMapError(Exception): pass class InvalidString(LayerMapError): pass class InvalidDecimal(LayerMapError): pass class InvalidInteger(LayerMapError): pass class MissingForeignKey(LayerMapError): pass class LayerMapping: "A class that maps OGR Layers to GeoDjango Models." # Acceptable 'base' types for a multi-geometry type. MULTI_TYPES = { 1: OGRGeomType('MultiPoint'), 2: OGRGeomType('MultiLineString'), 3: OGRGeomType('MultiPolygon'), OGRGeomType('Point25D').num: OGRGeomType('MultiPoint25D'), OGRGeomType('LineString25D').num: OGRGeomType('MultiLineString25D'), OGRGeomType('Polygon25D').num: OGRGeomType('MultiPolygon25D'), } # Acceptable Django field types and corresponding acceptable OGR # counterparts. FIELD_TYPES = { models.AutoField: OFTInteger, models.BigAutoField: OFTInteger64, models.SmallAutoField: OFTInteger, models.BooleanField: (OFTInteger, OFTReal, OFTString), models.IntegerField: (OFTInteger, OFTReal, OFTString), models.FloatField: (OFTInteger, OFTReal), models.DateField: OFTDate, models.DateTimeField: OFTDateTime, models.EmailField: OFTString, models.TimeField: OFTTime, models.DecimalField: (OFTInteger, OFTReal), models.CharField: OFTString, models.SlugField: OFTString, models.TextField: OFTString, models.URLField: OFTString, models.UUIDField: OFTString, models.BigIntegerField: (OFTInteger, OFTReal, OFTString), models.SmallIntegerField: (OFTInteger, OFTReal, OFTString), models.PositiveBigIntegerField: (OFTInteger, OFTReal, OFTString), models.PositiveIntegerField: (OFTInteger, OFTReal, OFTString), models.PositiveSmallIntegerField: (OFTInteger, OFTReal, OFTString), } def __init__(self, model, data, mapping, layer=0, source_srs=None, encoding='utf-8', transaction_mode='commit_on_success', transform=True, unique=None, using=None): """ A LayerMapping object is initialized using the given Model (not an instance), a DataSource (or string path to an OGR-supported data file), and a mapping dictionary. See the module level docstring for more details and keyword argument usage. """ # Getting the DataSource and the associated Layer. if isinstance(data, (str, Path)): self.ds = DataSource(data, encoding=encoding) else: self.ds = data self.layer = self.ds[layer] self.using = using if using is not None else router.db_for_write(model) connection = connections[self.using] self.spatial_backend = connection.ops # Setting the mapping & model attributes. self.mapping = mapping self.model = model # Checking the layer -- initialization of the object will fail if # things don't check out before hand. self.check_layer() # Getting the geometry column associated with the model (an # exception will be raised if there is no geometry column). if connection.features.supports_transform: self.geo_field = self.geometry_field() else: transform = False # Checking the source spatial reference system, and getting # the coordinate transformation object (unless the `transform` # keyword is set to False) if transform: self.source_srs = self.check_srs(source_srs) self.transform = self.coord_transform() else: self.transform = transform # Setting the encoding for OFTString fields, if specified. if encoding: # Making sure the encoding exists, if not a LookupError # exception will be thrown. from codecs import lookup lookup(encoding) self.encoding = encoding else: self.encoding = None if unique: self.check_unique(unique) transaction_mode = 'autocommit' # Has to be set to autocommit. self.unique = unique else: self.unique = None # Setting the transaction decorator with the function in the # transaction modes dictionary. self.transaction_mode = transaction_mode if transaction_mode == 'autocommit': self.transaction_decorator = None elif transaction_mode == 'commit_on_success': self.transaction_decorator = transaction.atomic else: raise LayerMapError('Unrecognized transaction mode: %s' % transaction_mode) # #### Checking routines used during initialization #### def check_fid_range(self, fid_range): "Check the `fid_range` keyword." if fid_range: if isinstance(fid_range, (tuple, list)): return slice(*fid_range) elif isinstance(fid_range, slice): return fid_range else: raise TypeError else: return None def check_layer(self): """ Check the Layer metadata and ensure that it's compatible with the mapping information and model. Unlike previous revisions, there is no need to increment through each feature in the Layer. """ # The geometry field of the model is set here. # TODO: Support more than one geometry field / model. However, this # depends on the GDAL Driver in use. self.geom_field = False self.fields = {} # Getting lists of the field names and the field types available in # the OGR Layer. ogr_fields = self.layer.fields ogr_field_types = self.layer.field_types # Function for determining if the OGR mapping field is in the Layer. def check_ogr_fld(ogr_map_fld): try: idx = ogr_fields.index(ogr_map_fld) except ValueError: raise LayerMapError('Given mapping OGR field "%s" not found in OGR Layer.' % ogr_map_fld) return idx # No need to increment through each feature in the model, simply check # the Layer metadata against what was given in the mapping dictionary. for field_name, ogr_name in self.mapping.items(): # Ensuring that a corresponding field exists in the model # for the given field name in the mapping. try: model_field = self.model._meta.get_field(field_name) except FieldDoesNotExist: raise LayerMapError('Given mapping field "%s" not in given Model fields.' % field_name) # Getting the string name for the Django field class (e.g., 'PointField'). fld_name = model_field.__class__.__name__ if isinstance(model_field, GeometryField): if self.geom_field: raise LayerMapError('LayerMapping does not support more than one GeometryField per model.') # Getting the coordinate dimension of the geometry field. coord_dim = model_field.dim try: if coord_dim == 3: gtype = OGRGeomType(ogr_name + '25D') else: gtype = OGRGeomType(ogr_name) except GDALException: raise LayerMapError('Invalid mapping for GeometryField "%s".' % field_name) # Making sure that the OGR Layer's Geometry is compatible. ltype = self.layer.geom_type if not (ltype.name.startswith(gtype.name) or self.make_multi(ltype, model_field)): raise LayerMapError('Invalid mapping geometry; model has %s%s, ' 'layer geometry type is %s.' % (fld_name, '(dim=3)' if coord_dim == 3 else '', ltype)) # Setting the `geom_field` attribute w/the name of the model field # that is a Geometry. Also setting the coordinate dimension # attribute. self.geom_field = field_name self.coord_dim = coord_dim fields_val = model_field elif isinstance(model_field, models.ForeignKey): if isinstance(ogr_name, dict): # Is every given related model mapping field in the Layer? rel_model = model_field.remote_field.model for rel_name, ogr_field in ogr_name.items(): idx = check_ogr_fld(ogr_field) try: rel_model._meta.get_field(rel_name) except FieldDoesNotExist: raise LayerMapError('ForeignKey mapping field "%s" not in %s fields.' % (rel_name, rel_model.__class__.__name__)) fields_val = rel_model else: raise TypeError('ForeignKey mapping must be of dictionary type.') else: # Is the model field type supported by LayerMapping? if model_field.__class__ not in self.FIELD_TYPES: raise LayerMapError('Django field type "%s" has no OGR mapping (yet).' % fld_name) # Is the OGR field in the Layer? idx = check_ogr_fld(ogr_name) ogr_field = ogr_field_types[idx] # Can the OGR field type be mapped to the Django field type? if not issubclass(ogr_field, self.FIELD_TYPES[model_field.__class__]): raise LayerMapError('OGR field "%s" (of type %s) cannot be mapped to Django %s.' % (ogr_field, ogr_field.__name__, fld_name)) fields_val = model_field self.fields[field_name] = fields_val def check_srs(self, source_srs): "Check the compatibility of the given spatial reference object." if isinstance(source_srs, SpatialReference): sr = source_srs elif isinstance(source_srs, self.spatial_backend.spatial_ref_sys()): sr = source_srs.srs elif isinstance(source_srs, (int, str)): sr = SpatialReference(source_srs) else: # Otherwise just pulling the SpatialReference from the layer sr = self.layer.srs if not sr: raise LayerMapError('No source reference system defined.') else: return sr def check_unique(self, unique): "Check the `unique` keyword parameter -- may be a sequence or string." if isinstance(unique, (list, tuple)): # List of fields to determine uniqueness with for attr in unique: if attr not in self.mapping: raise ValueError elif isinstance(unique, str): # Only a single field passed in. if unique not in self.mapping: raise ValueError else: raise TypeError('Unique keyword argument must be set with a tuple, list, or string.') # Keyword argument retrieval routines #### def feature_kwargs(self, feat): """ Given an OGR Feature, return a dictionary of keyword arguments for constructing the mapped model. """ # The keyword arguments for model construction. kwargs = {} # Incrementing through each model field and OGR field in the # dictionary mapping. for field_name, ogr_name in self.mapping.items(): model_field = self.fields[field_name] if isinstance(model_field, GeometryField): # Verify OGR geometry. try: val = self.verify_geom(feat.geom, model_field) except GDALException: raise LayerMapError('Could not retrieve geometry from feature.') elif isinstance(model_field, models.base.ModelBase): # The related _model_, not a field was passed in -- indicating # another mapping for the related Model. val = self.verify_fk(feat, model_field, ogr_name) else: # Otherwise, verify OGR Field type. val = self.verify_ogr_field(feat[ogr_name], model_field) # Setting the keyword arguments for the field name with the # value obtained above. kwargs[field_name] = val return kwargs def unique_kwargs(self, kwargs): """ Given the feature keyword arguments (from `feature_kwargs`), construct and return the uniqueness keyword arguments -- a subset of the feature kwargs. """ if isinstance(self.unique, str): return {self.unique: kwargs[self.unique]} else: return {fld: kwargs[fld] for fld in self.unique} # #### Verification routines used in constructing model keyword arguments. #### def verify_ogr_field(self, ogr_field, model_field): """ Verify if the OGR Field contents are acceptable to the model field. If they are, return the verified value, otherwise raise an exception. """ if (isinstance(ogr_field, OFTString) and isinstance(model_field, (models.CharField, models.TextField))): if self.encoding and ogr_field.value is not None: # The encoding for OGR data sources may be specified here # (e.g., 'cp437' for Census Bureau boundary files). val = force_str(ogr_field.value, self.encoding) else: val = ogr_field.value if model_field.max_length and val is not None and len(val) > model_field.max_length: raise InvalidString('%s model field maximum string length is %s, given %s characters.' % (model_field.name, model_field.max_length, len(val))) elif isinstance(ogr_field, OFTReal) and isinstance(model_field, models.DecimalField): try: # Creating an instance of the Decimal value to use. d = Decimal(str(ogr_field.value)) except DecimalInvalidOperation: raise InvalidDecimal('Could not construct decimal from: %s' % ogr_field.value) # Getting the decimal value as a tuple. dtup = d.as_tuple() digits = dtup[1] d_idx = dtup[2] # index where the decimal is # Maximum amount of precision, or digits to the left of the decimal. max_prec = model_field.max_digits - model_field.decimal_places # Getting the digits to the left of the decimal place for the # given decimal. if d_idx < 0: n_prec = len(digits[:d_idx]) else: n_prec = len(digits) + d_idx # If we have more than the maximum digits allowed, then throw an # InvalidDecimal exception. if n_prec > max_prec: raise InvalidDecimal( 'A DecimalField with max_digits %d, decimal_places %d must ' 'round to an absolute value less than 10^%d.' % (model_field.max_digits, model_field.decimal_places, max_prec) ) val = d elif isinstance(ogr_field, (OFTReal, OFTString)) and isinstance(model_field, models.IntegerField): # Attempt to convert any OFTReal and OFTString value to an OFTInteger. try: val = int(ogr_field.value) except ValueError: raise InvalidInteger('Could not construct integer from: %s' % ogr_field.value) else: val = ogr_field.value return val def verify_fk(self, feat, rel_model, rel_mapping): """ Given an OGR Feature, the related model and its dictionary mapping, retrieve the related model for the ForeignKey mapping. """ # TODO: It is expensive to retrieve a model for every record -- # explore if an efficient mechanism exists for caching related # ForeignKey models. # Constructing and verifying the related model keyword arguments. fk_kwargs = {} for field_name, ogr_name in rel_mapping.items(): fk_kwargs[field_name] = self.verify_ogr_field(feat[ogr_name], rel_model._meta.get_field(field_name)) # Attempting to retrieve and return the related model. try: return rel_model.objects.using(self.using).get(**fk_kwargs) except ObjectDoesNotExist: raise MissingForeignKey( 'No ForeignKey %s model found with keyword arguments: %s' % (rel_model.__name__, fk_kwargs) ) def verify_geom(self, geom, model_field): """ Verify the geometry -- construct and return a GeometryCollection if necessary (for example if the model field is MultiPolygonField while the mapped shapefile only contains Polygons). """ # Downgrade a 3D geom to a 2D one, if necessary. if self.coord_dim != geom.coord_dim: geom.coord_dim = self.coord_dim if self.make_multi(geom.geom_type, model_field): # Constructing a multi-geometry type to contain the single geometry multi_type = self.MULTI_TYPES[geom.geom_type.num] g = OGRGeometry(multi_type) g.add(geom) else: g = geom # Transforming the geometry with our Coordinate Transformation object, # but only if the class variable `transform` is set w/a CoordTransform # object. if self.transform: g.transform(self.transform) # Returning the WKT of the geometry. return g.wkt # #### Other model methods #### def coord_transform(self): "Return the coordinate transformation object." SpatialRefSys = self.spatial_backend.spatial_ref_sys() try: # Getting the target spatial reference system target_srs = SpatialRefSys.objects.using(self.using).get(srid=self.geo_field.srid).srs # Creating the CoordTransform object return CoordTransform(self.source_srs, target_srs) except Exception as exc: raise LayerMapError( 'Could not translate between the data source and model geometry.' ) from exc def geometry_field(self): "Return the GeometryField instance associated with the geographic column." # Use `get_field()` on the model's options so that we # get the correct field instance if there's model inheritance. opts = self.model._meta return opts.get_field(self.geom_field) def make_multi(self, geom_type, model_field): """ Given the OGRGeomType for a geometry and its associated GeometryField, determine whether the geometry should be turned into a GeometryCollection. """ return (geom_type.num in self.MULTI_TYPES and model_field.__class__.__name__ == 'Multi%s' % geom_type.django) def save(self, verbose=False, fid_range=False, step=False, progress=False, silent=False, stream=sys.stdout, strict=False): """ Save the contents from the OGR DataSource Layer into the database according to the mapping dictionary given at initialization. Keyword Parameters: verbose: If set, information will be printed subsequent to each model save executed on the database. fid_range: May be set with a slice or tuple of (begin, end) feature ID's to map from the data source. In other words, this keyword enables the user to selectively import a subset range of features in the geographic data source. step: If set with an integer, transactions will occur at every step interval. For example, if step=1000, a commit would occur after the 1,000th feature, the 2,000th feature etc. progress: When this keyword is set, status information will be printed giving the number of features processed and successfully saved. By default, progress information will pe printed every 1000 features processed, however, this default may be overridden by setting this keyword with an integer for the desired interval. stream: Status information will be written to this file handle. Defaults to using `sys.stdout`, but any object with a `write` method is supported. silent: By default, non-fatal error notifications are printed to stdout, but this keyword may be set to disable these notifications. strict: Execution of the model mapping will cease upon the first error encountered. The default behavior is to attempt to continue. """ # Getting the default Feature ID range. default_range = self.check_fid_range(fid_range) # Setting the progress interval, if requested. if progress: if progress is True or not isinstance(progress, int): progress_interval = 1000 else: progress_interval = progress def _save(feat_range=default_range, num_feat=0, num_saved=0): if feat_range: layer_iter = self.layer[feat_range] else: layer_iter = self.layer for feat in layer_iter: num_feat += 1 # Getting the keyword arguments try: kwargs = self.feature_kwargs(feat) except LayerMapError as msg: # Something borked the validation if strict: raise elif not silent: stream.write('Ignoring Feature ID %s because: %s\n' % (feat.fid, msg)) else: # Constructing the model using the keyword args is_update = False if self.unique: # If we want unique models on a particular field, handle the # geometry appropriately. try: # Getting the keyword arguments and retrieving # the unique model. u_kwargs = self.unique_kwargs(kwargs) m = self.model.objects.using(self.using).get(**u_kwargs) is_update = True # Getting the geometry (in OGR form), creating # one from the kwargs WKT, adding in additional # geometries, and update the attribute with the # just-updated geometry WKT. geom_value = getattr(m, self.geom_field) if geom_value is None: geom = OGRGeometry(kwargs[self.geom_field]) else: geom = geom_value.ogr new = OGRGeometry(kwargs[self.geom_field]) for g in new: geom.add(g) setattr(m, self.geom_field, geom.wkt) except ObjectDoesNotExist: # No unique model exists yet, create. m = self.model(**kwargs) else: m = self.model(**kwargs) try: # Attempting to save. m.save(using=self.using) num_saved += 1 if verbose: stream.write('%s: %s\n' % ('Updated' if is_update else 'Saved', m)) except Exception as msg: if strict: # Bailing out if the `strict` keyword is set. if not silent: stream.write( 'Failed to save the feature (id: %s) into the ' 'model with the keyword arguments:\n' % feat.fid ) stream.write('%s\n' % kwargs) raise elif not silent: stream.write('Failed to save %s:\n %s\nContinuing\n' % (kwargs, msg)) # Printing progress information, if requested. if progress and num_feat % progress_interval == 0: stream.write('Processed %d features, saved %d ...\n' % (num_feat, num_saved)) # Only used for status output purposes -- incremental saving uses the # values returned here. return num_saved, num_feat if self.transaction_decorator is not None: _save = self.transaction_decorator(_save) nfeat = self.layer.num_feat if step and isinstance(step, int) and step < nfeat: # Incremental saving is requested at the given interval (step) if default_range: raise LayerMapError('The `step` keyword may not be used in conjunction with the `fid_range` keyword.') beg, num_feat, num_saved = (0, 0, 0) indices = range(step, nfeat, step) n_i = len(indices) for i, end in enumerate(indices): # Constructing the slice to use for this step; the last slice is # special (e.g, [100:] instead of [90:100]). if i + 1 == n_i: step_slice = slice(beg, None) else: step_slice = slice(beg, end) try: num_feat, num_saved = _save(step_slice, num_feat, num_saved) beg = end except Exception: # Deliberately catch everything stream.write('%s\nFailed to save slice: %s\n' % ('=-' * 20, step_slice)) raise else: # Otherwise, just calling the previously defined _save() function. _save()
a24be29fdac66b54d0c8f6b633341c1edfe1fd2963af8eb3ef39cf3bc6a49286
""" This module houses the GeoIP2 object, a wrapper for the MaxMind GeoIP2(R) Python API (https://geoip2.readthedocs.io/). This is an alternative to the Python GeoIP2 interface provided by MaxMind. GeoIP(R) is a registered trademark of MaxMind, Inc. For IP-based geolocation, this module requires the GeoLite2 Country and City datasets, in binary format (CSV will not work!). The datasets may be downloaded from MaxMind at https://dev.maxmind.com/geoip/geoip2/geolite2/. Grab GeoLite2-Country.mmdb.gz and GeoLite2-City.mmdb.gz, and unzip them in the directory corresponding to settings.GEOIP_PATH. """ __all__ = ['HAS_GEOIP2'] try: import geoip2 # NOQA except ImportError: HAS_GEOIP2 = False else: from .base import GeoIP2, GeoIP2Exception HAS_GEOIP2 = True __all__ += ['GeoIP2', 'GeoIP2Exception']
545cba268af7bd241fd30cb3c7d852e81407293705c29a6364cabd1dd3889fa5
import socket import geoip2.database from django.conf import settings from django.contrib.gis.geos import Point from django.core.exceptions import ValidationError from django.core.validators import validate_ipv46_address from django.utils._os import to_path from .resources import City, Country # Creating the settings dictionary with any settings, if needed. GEOIP_SETTINGS = { 'GEOIP_PATH': getattr(settings, 'GEOIP_PATH', None), 'GEOIP_CITY': getattr(settings, 'GEOIP_CITY', 'GeoLite2-City.mmdb'), 'GEOIP_COUNTRY': getattr(settings, 'GEOIP_COUNTRY', 'GeoLite2-Country.mmdb'), } class GeoIP2Exception(Exception): pass class GeoIP2: # The flags for GeoIP memory caching. # Try MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that order. MODE_AUTO = 0 # Use the C extension with memory map. MODE_MMAP_EXT = 1 # Read from memory map. Pure Python. MODE_MMAP = 2 # Read database as standard file. Pure Python. MODE_FILE = 4 # Load database into memory. Pure Python. MODE_MEMORY = 8 cache_options = frozenset((MODE_AUTO, MODE_MMAP_EXT, MODE_MMAP, MODE_FILE, MODE_MEMORY)) # Paths to the city & country binary databases. _city_file = '' _country_file = '' # Initially, pointers to GeoIP file references are NULL. _city = None _country = None def __init__(self, path=None, cache=0, country=None, city=None): """ Initialize the GeoIP object. No parameters are required to use default settings. Keyword arguments may be passed in to customize the locations of the GeoIP datasets. * path: Base directory to where GeoIP data is located or the full path to where the city or country data files (*.mmdb) are located. Assumes that both the city and country data sets are located in this directory; overrides the GEOIP_PATH setting. * cache: The cache settings when opening up the GeoIP datasets. May be an integer in (0, 1, 2, 4, 8) corresponding to the MODE_AUTO, MODE_MMAP_EXT, MODE_MMAP, MODE_FILE, and MODE_MEMORY, `GeoIPOptions` C API settings, respectively. Defaults to 0, meaning MODE_AUTO. * country: The name of the GeoIP country data file. Defaults to 'GeoLite2-Country.mmdb'; overrides the GEOIP_COUNTRY setting. * city: The name of the GeoIP city data file. Defaults to 'GeoLite2-City.mmdb'; overrides the GEOIP_CITY setting. """ # Checking the given cache option. if cache not in self.cache_options: raise GeoIP2Exception('Invalid GeoIP caching option: %s' % cache) # Getting the GeoIP data path. path = path or GEOIP_SETTINGS['GEOIP_PATH'] if not path: raise GeoIP2Exception('GeoIP path must be provided via parameter or the GEOIP_PATH setting.') path = to_path(path) if path.is_dir(): # Constructing the GeoIP database filenames using the settings # dictionary. If the database files for the GeoLite country # and/or city datasets exist, then try to open them. country_db = path / (country or GEOIP_SETTINGS['GEOIP_COUNTRY']) if country_db.is_file(): self._country = geoip2.database.Reader(str(country_db), mode=cache) self._country_file = country_db city_db = path / (city or GEOIP_SETTINGS['GEOIP_CITY']) if city_db.is_file(): self._city = geoip2.database.Reader(str(city_db), mode=cache) self._city_file = city_db if not self._reader: raise GeoIP2Exception('Could not load a database from %s.' % path) elif path.is_file(): # Otherwise, some detective work will be needed to figure out # whether the given database path is for the GeoIP country or city # databases. reader = geoip2.database.Reader(str(path), mode=cache) db_type = reader.metadata().database_type if db_type.endswith('City'): # GeoLite City database detected. self._city = reader self._city_file = path elif db_type.endswith('Country'): # GeoIP Country database detected. self._country = reader self._country_file = path else: raise GeoIP2Exception('Unable to recognize database edition: %s' % db_type) else: raise GeoIP2Exception('GeoIP path must be a valid file or directory.') @property def _reader(self): return self._country or self._city @property def _country_or_city(self): if self._country: return self._country.country else: return self._city.city def __del__(self): # Cleanup any GeoIP file handles lying around. if self._reader: self._reader.close() def __repr__(self): meta = self._reader.metadata() version = '[v%s.%s]' % (meta.binary_format_major_version, meta.binary_format_minor_version) return '<%(cls)s %(version)s _country_file="%(country)s", _city_file="%(city)s">' % { 'cls': self.__class__.__name__, 'version': version, 'country': self._country_file, 'city': self._city_file, } def _check_query(self, query, city=False, city_or_country=False): "Check the query and database availability." # Making sure a string was passed in for the query. if not isinstance(query, str): raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__) # Extra checks for the existence of country and city databases. if city_or_country and not (self._country or self._city): raise GeoIP2Exception('Invalid GeoIP country and city data files.') elif city and not self._city: raise GeoIP2Exception('Invalid GeoIP city data file: %s' % self._city_file) # Return the query string back to the caller. GeoIP2 only takes IP addresses. try: validate_ipv46_address(query) except ValidationError: query = socket.gethostbyname(query) return query def city(self, query): """ Return a dictionary of city information for the given IP address or Fully Qualified Domain Name (FQDN). Some information in the dictionary may be undefined (None). """ enc_query = self._check_query(query, city=True) return City(self._city.city(enc_query)) def country_code(self, query): "Return the country code for the given IP Address or FQDN." return self.country(query)['country_code'] def country_name(self, query): "Return the country name for the given IP Address or FQDN." return self.country(query)['country_name'] def country(self, query): """ Return a dictionary with the country code and name when given an IP address or a Fully Qualified Domain Name (FQDN). For example, both '24.124.1.80' and 'djangoproject.com' are valid parameters. """ # Returning the country code and name enc_query = self._check_query(query, city_or_country=True) return Country(self._country_or_city(enc_query)) # #### Coordinate retrieval routines #### def coords(self, query, ordering=('longitude', 'latitude')): cdict = self.city(query) if cdict is None: return None else: return tuple(cdict[o] for o in ordering) def lon_lat(self, query): "Return a tuple of the (longitude, latitude) for the given query." return self.coords(query) def lat_lon(self, query): "Return a tuple of the (latitude, longitude) for the given query." return self.coords(query, ('latitude', 'longitude')) def geos(self, query): "Return a GEOS Point object for the given query." ll = self.lon_lat(query) if ll: return Point(ll, srid=4326) else: return None # #### GeoIP Database Information Routines #### @property def info(self): "Return information about the GeoIP library and databases in use." meta = self._reader.metadata() return 'GeoIP Library:\n\t%s.%s\n' % (meta.binary_format_major_version, meta.binary_format_minor_version) @classmethod def open(cls, full_path, cache): return GeoIP2(full_path, cache)
a63a3a0753732a679ec6b3ef6c6119dda3040b4e291006e0de7cf125e2fe9024
import json from django.contrib.gis.gdal import CoordTransform, SpatialReference from django.core.serializers.base import SerializerDoesNotExist from django.core.serializers.json import Serializer as JSONSerializer class Serializer(JSONSerializer): """ Convert a queryset to GeoJSON, http://geojson.org/ """ def _init_options(self): super()._init_options() self.geometry_field = self.json_kwargs.pop('geometry_field', None) self.srid = self.json_kwargs.pop('srid', 4326) if (self.selected_fields is not None and self.geometry_field is not None and self.geometry_field not in self.selected_fields): self.selected_fields = [*self.selected_fields, self.geometry_field] def start_serialization(self): self._init_options() self._cts = {} # cache of CoordTransform's self.stream.write( '{"type": "FeatureCollection", "crs": {"type": "name", "properties": {"name": "EPSG:%d"}},' ' "features": [' % self.srid) def end_serialization(self): self.stream.write(']}') def start_object(self, obj): super().start_object(obj) self._geometry = None if self.geometry_field is None: # Find the first declared geometry field for field in obj._meta.fields: if hasattr(field, 'geom_type'): self.geometry_field = field.name break def get_dump_object(self, obj): data = { "type": "Feature", "properties": self._current, } if ((self.selected_fields is None or 'pk' in self.selected_fields) and 'pk' not in data["properties"]): data["properties"]["pk"] = obj._meta.pk.value_to_string(obj) if self._geometry: if self._geometry.srid != self.srid: # If needed, transform the geometry in the srid of the global geojson srid if self._geometry.srid not in self._cts: srs = SpatialReference(self.srid) self._cts[self._geometry.srid] = CoordTransform(self._geometry.srs, srs) self._geometry.transform(self._cts[self._geometry.srid]) data["geometry"] = json.loads(self._geometry.geojson) else: data["geometry"] = None return data def handle_field(self, obj, field): if field.name == self.geometry_field: self._geometry = field.value_from_object(obj) else: super().handle_field(obj, field) class Deserializer: def __init__(self, *args, **kwargs): raise SerializerDoesNotExist("geojson is a serialization-only serializer")
58c761e02d37264e4c82de5b4173b9c045cf16091352041d05f59a5a15bb8069
""" DataSource is a wrapper for the OGR Data Source object, which provides an interface for reading vector geometry data from many different file formats (including ESRI shapefiles). When instantiating a DataSource object, use the filename of a GDAL-supported data source. For example, a SHP file or a TIGER/Line file from the government. The ds_driver keyword is used internally when a ctypes pointer is passed in directly. Example: ds = DataSource('/home/foo/bar.shp') for layer in ds: for feature in layer: # Getting the geometry for the feature. g = feature.geom # Getting the 'description' field for the feature. desc = feature['description'] # We can also increment through all of the fields # attached to this feature. for field in feature: # Get the name of the field (e.g. 'description') nm = field.name # Get the type (integer) of the field, e.g. 0 => OFTInteger t = field.type # Returns the value the field; OFTIntegers return ints, # OFTReal returns floats, all else returns string. val = field.value """ from ctypes import byref from pathlib import Path from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.driver import Driver from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.layer import Layer from django.contrib.gis.gdal.prototypes import ds as capi from django.utils.encoding import force_bytes, force_str # For more information, see the OGR C API documentation: # https://gdal.org/api/vector_c_api.html # # The OGR_DS_* routines are relevant here. class DataSource(GDALBase): "Wraps an OGR Data Source object." destructor = capi.destroy_ds def __init__(self, ds_input, ds_driver=False, write=False, encoding='utf-8'): # The write flag. if write: self._write = 1 else: self._write = 0 # See also https://gdal.org/development/rfc/rfc23_ogr_unicode.html self.encoding = encoding Driver.ensure_registered() if isinstance(ds_input, (str, Path)): # The data source driver is a void pointer. ds_driver = Driver.ptr_type() try: # OGROpen will auto-detect the data source type. ds = capi.open_ds(force_bytes(ds_input), self._write, byref(ds_driver)) except GDALException: # Making the error message more clear rather than something # like "Invalid pointer returned from OGROpen". raise GDALException('Could not open the datasource at "%s"' % ds_input) elif isinstance(ds_input, self.ptr_type) and isinstance(ds_driver, Driver.ptr_type): ds = ds_input else: raise GDALException('Invalid data source input type: %s' % type(ds_input)) if ds: self.ptr = ds self.driver = Driver(ds_driver) else: # Raise an exception if the returned pointer is NULL raise GDALException('Invalid data source file "%s"' % ds_input) def __getitem__(self, index): "Allows use of the index [] operator to get a layer at the index." if isinstance(index, str): try: layer = capi.get_layer_by_name(self.ptr, force_bytes(index)) except GDALException: raise IndexError('Invalid OGR layer name given: %s.' % index) elif isinstance(index, int): if 0 <= index < self.layer_count: layer = capi.get_layer(self._ptr, index) else: raise IndexError('Index out of range when accessing layers in a datasource: %s.' % index) else: raise TypeError('Invalid index type: %s' % type(index)) return Layer(layer, self) def __len__(self): "Return the number of layers within the data source." return self.layer_count def __str__(self): "Return OGR GetName and Driver for the Data Source." return '%s (%s)' % (self.name, self.driver) @property def layer_count(self): "Return the number of layers in the data source." return capi.get_layer_count(self._ptr) @property def name(self): "Return the name of the data source." name = capi.get_ds_name(self._ptr) return force_str(name, self.encoding, strings_only=True)
b7cf6fd162fc42ec443471dffaf0455baea26c3a0eadd2b8a865d20c2a0d6460
""" The OGRGeometry is a wrapper for using the OGR Geometry class (see https://gdal.org/api/ogrgeometry_cpp.html#_CPPv411OGRGeometry). OGRGeometry may be instantiated when reading geometries from OGR Data Sources (e.g. SHP files), or when given OGC WKT (a string). While the 'full' API is not present yet, the API is "pythonic" unlike the traditional and "next-generation" OGR Python bindings. One major advantage OGR Geometries have over their GEOS counterparts is support for spatial reference systems and their transformation. Example: >>> from django.contrib.gis.gdal import OGRGeometry, OGRGeomType, SpatialReference >>> wkt1, wkt2 = 'POINT(-90 30)', 'POLYGON((0 0, 5 0, 5 5, 0 5)' >>> pnt = OGRGeometry(wkt1) >>> print(pnt) POINT (-90 30) >>> mpnt = OGRGeometry(OGRGeomType('MultiPoint'), SpatialReference('WGS84')) >>> mpnt.add(wkt1) >>> mpnt.add(wkt1) >>> print(mpnt) MULTIPOINT (-90 30,-90 30) >>> print(mpnt.srs.name) WGS 84 >>> print(mpnt.srs.proj) +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs >>> mpnt.transform(SpatialReference('NAD27')) >>> print(mpnt.proj) +proj=longlat +ellps=clrk66 +datum=NAD27 +no_defs >>> print(mpnt) MULTIPOINT (-89.999930378602485 29.999797886557641,-89.999930378602485 29.999797886557641) The OGRGeomType class is to make it easy to specify an OGR geometry type: >>> from django.contrib.gis.gdal import OGRGeomType >>> gt1 = OGRGeomType(3) # Using an integer for the type >>> gt2 = OGRGeomType('Polygon') # Using a string >>> gt3 = OGRGeomType('POLYGON') # It's case-insensitive >>> print(gt1 == 3, gt1 == 'Polygon') # Equivalence works w/non-OGRGeomType objects True True """ import sys from binascii import b2a_hex from ctypes import byref, c_char_p, c_double, c_ubyte, c_void_p, string_at from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope from django.contrib.gis.gdal.error import GDALException, SRSException from django.contrib.gis.gdal.geomtype import OGRGeomType from django.contrib.gis.gdal.prototypes import geom as capi, srs as srs_api from django.contrib.gis.gdal.srs import CoordTransform, SpatialReference from django.contrib.gis.geometry import hex_regex, json_regex, wkt_regex from django.utils.encoding import force_bytes # For more information, see the OGR C API source code: # https://gdal.org/api/vector_c_api.html # # The OGR_G_* routines are relevant here. class OGRGeometry(GDALBase): """Encapsulate an OGR geometry.""" destructor = capi.destroy_geom def __init__(self, geom_input, srs=None): """Initialize Geometry on either WKT or an OGR pointer as input.""" str_instance = isinstance(geom_input, str) # If HEX, unpack input to a binary buffer. if str_instance and hex_regex.match(geom_input): geom_input = memoryview(bytes.fromhex(geom_input)) str_instance = False # Constructing the geometry, if str_instance: wkt_m = wkt_regex.match(geom_input) json_m = json_regex.match(geom_input) if wkt_m: if wkt_m['srid']: # If there's EWKT, set the SRS w/value of the SRID. srs = int(wkt_m['srid']) if wkt_m['type'].upper() == 'LINEARRING': # OGR_G_CreateFromWkt doesn't work with LINEARRING WKT. # See https://trac.osgeo.org/gdal/ticket/1992. g = capi.create_geom(OGRGeomType(wkt_m['type']).num) capi.import_wkt(g, byref(c_char_p(wkt_m['wkt'].encode()))) else: g = capi.from_wkt(byref(c_char_p(wkt_m['wkt'].encode())), None, byref(c_void_p())) elif json_m: g = self._from_json(geom_input.encode()) else: # Seeing if the input is a valid short-hand string # (e.g., 'Point', 'POLYGON'). OGRGeomType(geom_input) g = capi.create_geom(OGRGeomType(geom_input).num) elif isinstance(geom_input, memoryview): # WKB was passed in g = self._from_wkb(geom_input) elif isinstance(geom_input, OGRGeomType): # OGRGeomType was passed in, an empty geometry will be created. g = capi.create_geom(geom_input.num) elif isinstance(geom_input, self.ptr_type): # OGR pointer (c_void_p) was the input. g = geom_input else: raise GDALException('Invalid input type for OGR Geometry construction: %s' % type(geom_input)) # Now checking the Geometry pointer before finishing initialization # by setting the pointer for the object. if not g: raise GDALException('Cannot create OGR Geometry from input: %s' % geom_input) self.ptr = g # Assigning the SpatialReference object to the geometry, if valid. if srs: self.srs = srs # Setting the class depending upon the OGR Geometry Type self.__class__ = GEO_CLASSES[self.geom_type.num] # Pickle routines def __getstate__(self): srs = self.srs if srs: srs = srs.wkt else: srs = None return bytes(self.wkb), srs def __setstate__(self, state): wkb, srs = state ptr = capi.from_wkb(wkb, None, byref(c_void_p()), len(wkb)) if not ptr: raise GDALException('Invalid OGRGeometry loaded from pickled state.') self.ptr = ptr self.srs = srs @classmethod def _from_wkb(cls, geom_input): return capi.from_wkb(bytes(geom_input), None, byref(c_void_p()), len(geom_input)) @staticmethod def _from_json(geom_input): return capi.from_json(geom_input) @classmethod def from_bbox(cls, bbox): "Construct a Polygon from a bounding box (4-tuple)." x0, y0, x1, y1 = bbox return OGRGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % ( x0, y0, x0, y1, x1, y1, x1, y0, x0, y0)) @staticmethod def from_json(geom_input): return OGRGeometry(OGRGeometry._from_json(force_bytes(geom_input))) @classmethod def from_gml(cls, gml_string): return cls(capi.from_gml(force_bytes(gml_string))) # ### Geometry set-like operations ### # g = g1 | g2 def __or__(self, other): "Return the union of the two geometries." return self.union(other) # g = g1 & g2 def __and__(self, other): "Return the intersection of this Geometry and the other." return self.intersection(other) # g = g1 - g2 def __sub__(self, other): "Return the difference this Geometry and the other." return self.difference(other) # g = g1 ^ g2 def __xor__(self, other): "Return the symmetric difference of this Geometry and the other." return self.sym_difference(other) def __eq__(self, other): "Is this Geometry equal to the other?" return isinstance(other, OGRGeometry) and self.equals(other) def __str__(self): "WKT is used for the string representation." return self.wkt # #### Geometry Properties #### @property def dimension(self): "Return 0 for points, 1 for lines, and 2 for surfaces." return capi.get_dims(self.ptr) def _get_coord_dim(self): "Return the coordinate dimension of the Geometry." return capi.get_coord_dim(self.ptr) def _set_coord_dim(self, dim): "Set the coordinate dimension of this Geometry." if dim not in (2, 3): raise ValueError('Geometry dimension must be either 2 or 3') capi.set_coord_dim(self.ptr, dim) coord_dim = property(_get_coord_dim, _set_coord_dim) @property def geom_count(self): "Return the number of elements in this Geometry." return capi.get_geom_count(self.ptr) @property def point_count(self): "Return the number of Points in this Geometry." return capi.get_point_count(self.ptr) @property def num_points(self): "Alias for `point_count` (same name method in GEOS API.)" return self.point_count @property def num_coords(self): "Alias for `point_count`." return self.point_count @property def geom_type(self): "Return the Type for this Geometry." return OGRGeomType(capi.get_geom_type(self.ptr)) @property def geom_name(self): "Return the Name of this Geometry." return capi.get_geom_name(self.ptr) @property def area(self): "Return the area for a LinearRing, Polygon, or MultiPolygon; 0 otherwise." return capi.get_area(self.ptr) @property def envelope(self): "Return the envelope for this Geometry." # TODO: Fix Envelope() for Point geometries. return Envelope(capi.get_envelope(self.ptr, byref(OGREnvelope()))) @property def empty(self): return capi.is_empty(self.ptr) @property def extent(self): "Return the envelope as a 4-tuple, instead of as an Envelope object." return self.envelope.tuple # #### SpatialReference-related Properties #### # The SRS property def _get_srs(self): "Return the Spatial Reference for this Geometry." try: srs_ptr = capi.get_geom_srs(self.ptr) return SpatialReference(srs_api.clone_srs(srs_ptr)) except SRSException: return None def _set_srs(self, srs): "Set the SpatialReference for this geometry." # Do not have to clone the `SpatialReference` object pointer because # when it is assigned to this `OGRGeometry` it's internal OGR # reference count is incremented, and will likewise be released # (decremented) when this geometry's destructor is called. if isinstance(srs, SpatialReference): srs_ptr = srs.ptr elif isinstance(srs, (int, str)): sr = SpatialReference(srs) srs_ptr = sr.ptr elif srs is None: srs_ptr = None else: raise TypeError('Cannot assign spatial reference with object of type: %s' % type(srs)) capi.assign_srs(self.ptr, srs_ptr) srs = property(_get_srs, _set_srs) # The SRID property def _get_srid(self): srs = self.srs if srs: return srs.srid return None def _set_srid(self, srid): if isinstance(srid, int) or srid is None: self.srs = srid else: raise TypeError('SRID must be set with an integer.') srid = property(_get_srid, _set_srid) # #### Output Methods #### def _geos_ptr(self): from django.contrib.gis.geos import GEOSGeometry return GEOSGeometry._from_wkb(self.wkb) @property def geos(self): "Return a GEOSGeometry object from this OGRGeometry." from django.contrib.gis.geos import GEOSGeometry return GEOSGeometry(self._geos_ptr(), self.srid) @property def gml(self): "Return the GML representation of the Geometry." return capi.to_gml(self.ptr) @property def hex(self): "Return the hexadecimal representation of the WKB (a string)." return b2a_hex(self.wkb).upper() @property def json(self): """ Return the GeoJSON representation of this Geometry. """ return capi.to_json(self.ptr) geojson = json @property def kml(self): "Return the KML representation of the Geometry." return capi.to_kml(self.ptr, None) @property def wkb_size(self): "Return the size of the WKB buffer." return capi.get_wkbsize(self.ptr) @property def wkb(self): "Return the WKB representation of the Geometry." if sys.byteorder == 'little': byteorder = 1 # wkbNDR (from ogr_core.h) else: byteorder = 0 # wkbXDR sz = self.wkb_size # Creating the unsigned character buffer, and passing it in by reference. buf = (c_ubyte * sz)() capi.to_wkb(self.ptr, byteorder, byref(buf)) # Returning a buffer of the string at the pointer. return memoryview(string_at(buf, sz)) @property def wkt(self): "Return the WKT representation of the Geometry." return capi.to_wkt(self.ptr, byref(c_char_p())) @property def ewkt(self): "Return the EWKT representation of the Geometry." srs = self.srs if srs and srs.srid: return 'SRID=%s;%s' % (srs.srid, self.wkt) else: return self.wkt # #### Geometry Methods #### def clone(self): "Clone this OGR Geometry." return OGRGeometry(capi.clone_geom(self.ptr), self.srs) def close_rings(self): """ If there are any rings within this geometry that have not been closed, this routine will do so by adding the starting point at the end. """ # Closing the open rings. capi.geom_close_rings(self.ptr) def transform(self, coord_trans, clone=False): """ Transform this geometry to a different spatial reference system. May take a CoordTransform object, a SpatialReference object, string WKT or PROJ, and/or an integer SRID. By default, return nothing and transform the geometry in-place. However, if the `clone` keyword is set, return a transformed clone of this geometry. """ if clone: klone = self.clone() klone.transform(coord_trans) return klone # Depending on the input type, use the appropriate OGR routine # to perform the transformation. if isinstance(coord_trans, CoordTransform): capi.geom_transform(self.ptr, coord_trans.ptr) elif isinstance(coord_trans, SpatialReference): capi.geom_transform_to(self.ptr, coord_trans.ptr) elif isinstance(coord_trans, (int, str)): sr = SpatialReference(coord_trans) capi.geom_transform_to(self.ptr, sr.ptr) else: raise TypeError('Transform only accepts CoordTransform, ' 'SpatialReference, string, and integer objects.') # #### Topology Methods #### def _topology(self, func, other): """A generalized function for topology operations, takes a GDAL function and the other geometry to perform the operation on.""" if not isinstance(other, OGRGeometry): raise TypeError('Must use another OGRGeometry object for topology operations!') # Returning the output of the given function with the other geometry's # pointer. return func(self.ptr, other.ptr) def intersects(self, other): "Return True if this geometry intersects with the other." return self._topology(capi.ogr_intersects, other) def equals(self, other): "Return True if this geometry is equivalent to the other." return self._topology(capi.ogr_equals, other) def disjoint(self, other): "Return True if this geometry and the other are spatially disjoint." return self._topology(capi.ogr_disjoint, other) def touches(self, other): "Return True if this geometry touches the other." return self._topology(capi.ogr_touches, other) def crosses(self, other): "Return True if this geometry crosses the other." return self._topology(capi.ogr_crosses, other) def within(self, other): "Return True if this geometry is within the other." return self._topology(capi.ogr_within, other) def contains(self, other): "Return True if this geometry contains the other." return self._topology(capi.ogr_contains, other) def overlaps(self, other): "Return True if this geometry overlaps the other." return self._topology(capi.ogr_overlaps, other) # #### Geometry-generation Methods #### def _geomgen(self, gen_func, other=None): "A helper routine for the OGR routines that generate geometries." if isinstance(other, OGRGeometry): return OGRGeometry(gen_func(self.ptr, other.ptr), self.srs) else: return OGRGeometry(gen_func(self.ptr), self.srs) @property def boundary(self): "Return the boundary of this geometry." return self._geomgen(capi.get_boundary) @property def convex_hull(self): """ Return the smallest convex Polygon that contains all the points in this Geometry. """ return self._geomgen(capi.geom_convex_hull) def difference(self, other): """ Return a new geometry consisting of the region which is the difference of this geometry and the other. """ return self._geomgen(capi.geom_diff, other) def intersection(self, other): """ Return a new geometry consisting of the region of intersection of this geometry and the other. """ return self._geomgen(capi.geom_intersection, other) def sym_difference(self, other): """ Return a new geometry which is the symmetric difference of this geometry and the other. """ return self._geomgen(capi.geom_sym_diff, other) def union(self, other): """ Return a new geometry consisting of the region which is the union of this geometry and the other. """ return self._geomgen(capi.geom_union, other) # The subclasses for OGR Geometry. class Point(OGRGeometry): def _geos_ptr(self): from django.contrib.gis import geos return geos.Point._create_empty() if self.empty else super()._geos_ptr() @classmethod def _create_empty(cls): return capi.create_geom(OGRGeomType('point').num) @property def x(self): "Return the X coordinate for this Point." return capi.getx(self.ptr, 0) @property def y(self): "Return the Y coordinate for this Point." return capi.gety(self.ptr, 0) @property def z(self): "Return the Z coordinate for this Point." if self.coord_dim == 3: return capi.getz(self.ptr, 0) @property def tuple(self): "Return the tuple of this point." if self.coord_dim == 2: return (self.x, self.y) elif self.coord_dim == 3: return (self.x, self.y, self.z) coords = tuple class LineString(OGRGeometry): def __getitem__(self, index): "Return the Point at the given index." if 0 <= index < self.point_count: x, y, z = c_double(), c_double(), c_double() capi.get_point(self.ptr, index, byref(x), byref(y), byref(z)) dim = self.coord_dim if dim == 1: return (x.value,) elif dim == 2: return (x.value, y.value) elif dim == 3: return (x.value, y.value, z.value) else: raise IndexError('Index out of range when accessing points of a line string: %s.' % index) def __len__(self): "Return the number of points in the LineString." return self.point_count @property def tuple(self): "Return the tuple representation of this LineString." return tuple(self[i] for i in range(len(self))) coords = tuple def _listarr(self, func): """ Internal routine that returns a sequence (list) corresponding with the given function. """ return [func(self.ptr, i) for i in range(len(self))] @property def x(self): "Return the X coordinates in a list." return self._listarr(capi.getx) @property def y(self): "Return the Y coordinates in a list." return self._listarr(capi.gety) @property def z(self): "Return the Z coordinates in a list." if self.coord_dim == 3: return self._listarr(capi.getz) # LinearRings are used in Polygons. class LinearRing(LineString): pass class Polygon(OGRGeometry): def __len__(self): "Return the number of interior rings in this Polygon." return self.geom_count def __getitem__(self, index): "Get the ring at the specified index." if 0 <= index < self.geom_count: return OGRGeometry(capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs) else: raise IndexError('Index out of range when accessing rings of a polygon: %s.' % index) # Polygon Properties @property def shell(self): "Return the shell of this Polygon." return self[0] # First ring is the shell exterior_ring = shell @property def tuple(self): "Return a tuple of LinearRing coordinate tuples." return tuple(self[i].tuple for i in range(self.geom_count)) coords = tuple @property def point_count(self): "Return the number of Points in this Polygon." # Summing up the number of points in each ring of the Polygon. return sum(self[i].point_count for i in range(self.geom_count)) @property def centroid(self): "Return the centroid (a Point) of this Polygon." # The centroid is a Point, create a geometry for this. p = OGRGeometry(OGRGeomType('Point')) capi.get_centroid(self.ptr, p.ptr) return p # Geometry Collection base class. class GeometryCollection(OGRGeometry): "The Geometry Collection class." def __getitem__(self, index): "Get the Geometry at the specified index." if 0 <= index < self.geom_count: return OGRGeometry(capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs) else: raise IndexError('Index out of range when accessing geometry in a collection: %s.' % index) def __len__(self): "Return the number of geometries in this Geometry Collection." return self.geom_count def add(self, geom): "Add the geometry to this Geometry Collection." if isinstance(geom, OGRGeometry): if isinstance(geom, self.__class__): for g in geom: capi.add_geom(self.ptr, g.ptr) else: capi.add_geom(self.ptr, geom.ptr) elif isinstance(geom, str): tmp = OGRGeometry(geom) capi.add_geom(self.ptr, tmp.ptr) else: raise GDALException('Must add an OGRGeometry.') @property def point_count(self): "Return the number of Points in this Geometry Collection." # Summing up the number of points in each geometry in this collection return sum(self[i].point_count for i in range(self.geom_count)) @property def tuple(self): "Return a tuple representation of this Geometry Collection." return tuple(self[i].tuple for i in range(self.geom_count)) coords = tuple # Multiple Geometry types. class MultiPoint(GeometryCollection): pass class MultiLineString(GeometryCollection): pass class MultiPolygon(GeometryCollection): pass # Class mapping dictionary (using the OGRwkbGeometryType as the key) GEO_CLASSES = { 1: Point, 2: LineString, 3: Polygon, 4: MultiPoint, 5: MultiLineString, 6: MultiPolygon, 7: GeometryCollection, 101: LinearRing, 1 + OGRGeomType.wkb25bit: Point, 2 + OGRGeomType.wkb25bit: LineString, 3 + OGRGeomType.wkb25bit: Polygon, 4 + OGRGeomType.wkb25bit: MultiPoint, 5 + OGRGeomType.wkb25bit: MultiLineString, 6 + OGRGeomType.wkb25bit: MultiPolygon, 7 + OGRGeomType.wkb25bit: GeometryCollection, }
4cfdcdd71239184ab6578b1296008db4efff4e3141c5977a6c49a59061e0a746
from ctypes import byref, c_int from datetime import date, datetime, time from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import ds as capi from django.utils.encoding import force_str # For more information, see the OGR C API source code: # https://gdal.org/api/vector_c_api.html # # The OGR_Fld_* routines are relevant here. class Field(GDALBase): """ Wrap an OGR Field. Needs to be instantiated from a Feature object. """ def __init__(self, feat, index): """ Initialize on the feature object and the integer index of the field within the feature. """ # Setting the feature pointer and index. self._feat = feat self._index = index # Getting the pointer for this field. fld_ptr = capi.get_feat_field_defn(feat.ptr, index) if not fld_ptr: raise GDALException('Cannot create OGR Field, invalid pointer given.') self.ptr = fld_ptr # Setting the class depending upon the OGR Field Type (OFT) self.__class__ = OGRFieldTypes[self.type] def __str__(self): "Return the string representation of the Field." return str(self.value).strip() # #### Field Methods #### def as_double(self): "Retrieve the Field's value as a double (float)." return capi.get_field_as_double(self._feat.ptr, self._index) if self.is_set else None def as_int(self, is_64=False): "Retrieve the Field's value as an integer." if is_64: return capi.get_field_as_integer64(self._feat.ptr, self._index) if self.is_set else None else: return capi.get_field_as_integer(self._feat.ptr, self._index) if self.is_set else None def as_string(self): "Retrieve the Field's value as a string." if not self.is_set: return None string = capi.get_field_as_string(self._feat.ptr, self._index) return force_str(string, encoding=self._feat.encoding, strings_only=True) def as_datetime(self): "Retrieve the Field's value as a tuple of date & time components." if not self.is_set: return None yy, mm, dd, hh, mn, ss, tz = [c_int() for i in range(7)] status = capi.get_field_as_datetime( self._feat.ptr, self._index, byref(yy), byref(mm), byref(dd), byref(hh), byref(mn), byref(ss), byref(tz)) if status: return (yy, mm, dd, hh, mn, ss, tz) else: raise GDALException('Unable to retrieve date & time information from the field.') # #### Field Properties #### @property def is_set(self): "Return True if the value of this field isn't null, False otherwise." return capi.is_field_set(self._feat.ptr, self._index) @property def name(self): "Return the name of this Field." name = capi.get_field_name(self.ptr) return force_str(name, encoding=self._feat.encoding, strings_only=True) @property def precision(self): "Return the precision of this Field." return capi.get_field_precision(self.ptr) @property def type(self): "Return the OGR type of this Field." return capi.get_field_type(self.ptr) @property def type_name(self): "Return the OGR field type name for this Field." return capi.get_field_type_name(self.type) @property def value(self): "Return the value of this Field." # Default is to get the field as a string. return self.as_string() @property def width(self): "Return the width of this Field." return capi.get_field_width(self.ptr) # ### The Field sub-classes for each OGR Field type. ### class OFTInteger(Field): _bit64 = False @property def value(self): "Return an integer contained in this field." return self.as_int(self._bit64) @property def type(self): """ GDAL uses OFTReals to represent OFTIntegers in created shapefiles -- forcing the type here since the underlying field type may actually be OFTReal. """ return 0 class OFTReal(Field): @property def value(self): "Return a float contained in this field." return self.as_double() # String & Binary fields, just subclasses class OFTString(Field): pass class OFTWideString(Field): pass class OFTBinary(Field): pass # OFTDate, OFTTime, OFTDateTime fields. class OFTDate(Field): @property def value(self): "Return a Python `date` object for the OFTDate field." try: yy, mm, dd, hh, mn, ss, tz = self.as_datetime() return date(yy.value, mm.value, dd.value) except (TypeError, ValueError, GDALException): return None class OFTDateTime(Field): @property def value(self): "Return a Python `datetime` object for this OFTDateTime field." # TODO: Adapt timezone information. # See https://lists.osgeo.org/pipermail/gdal-dev/2006-February/007990.html # The `tz` variable has values of: 0=unknown, 1=localtime (ambiguous), # 100=GMT, 104=GMT+1, 80=GMT-5, etc. try: yy, mm, dd, hh, mn, ss, tz = self.as_datetime() return datetime(yy.value, mm.value, dd.value, hh.value, mn.value, ss.value) except (TypeError, ValueError, GDALException): return None class OFTTime(Field): @property def value(self): "Return a Python `time` object for this OFTTime field." try: yy, mm, dd, hh, mn, ss, tz = self.as_datetime() return time(hh.value, mn.value, ss.value) except (ValueError, GDALException): return None class OFTInteger64(OFTInteger): _bit64 = True # List fields are also just subclasses class OFTIntegerList(Field): pass class OFTRealList(Field): pass class OFTStringList(Field): pass class OFTWideStringList(Field): pass class OFTInteger64List(Field): pass # Class mapping dictionary for OFT Types and reverse mapping. OGRFieldTypes = { 0: OFTInteger, 1: OFTIntegerList, 2: OFTReal, 3: OFTRealList, 4: OFTString, 5: OFTStringList, 6: OFTWideString, 7: OFTWideStringList, 8: OFTBinary, 9: OFTDate, 10: OFTTime, 11: OFTDateTime, 12: OFTInteger64, 13: OFTInteger64List, } ROGRFieldTypes = {cls: num for num, cls in OGRFieldTypes.items()}
7a582156510fc7a6664142611e5dbc46f5a2aef11790d22e53a522cb54dedcbc
from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.field import Field from django.contrib.gis.gdal.geometries import OGRGeometry, OGRGeomType from django.contrib.gis.gdal.prototypes import ds as capi, geom as geom_api from django.utils.encoding import force_bytes, force_str # For more information, see the OGR C API source code: # https://gdal.org/api/vector_c_api.html # # The OGR_F_* routines are relevant here. class Feature(GDALBase): """ This class that wraps an OGR Feature, needs to be instantiated from a Layer object. """ destructor = capi.destroy_feature def __init__(self, feat, layer): """ Initialize Feature from a pointer and its Layer object. """ if not feat: raise GDALException('Cannot create OGR Feature, invalid pointer given.') self.ptr = feat self._layer = layer def __getitem__(self, index): """ Get the Field object at the specified index, which may be either an integer or the Field's string label. Note that the Field object is not the field's _value_ -- use the `get` method instead to retrieve the value (e.g. an integer) instead of a Field instance. """ if isinstance(index, str): i = self.index(index) elif 0 <= index < self.num_fields: i = index else: raise IndexError('Index out of range when accessing field in a feature: %s.' % index) return Field(self, i) def __len__(self): "Return the count of fields in this feature." return self.num_fields def __str__(self): "The string name of the feature." return 'Feature FID %d in Layer<%s>' % (self.fid, self.layer_name) def __eq__(self, other): "Do equivalence testing on the features." return bool(capi.feature_equal(self.ptr, other._ptr)) # #### Feature Properties #### @property def encoding(self): return self._layer._ds.encoding @property def fid(self): "Return the feature identifier." return capi.get_fid(self.ptr) @property def layer_name(self): "Return the name of the layer for the feature." name = capi.get_feat_name(self._layer._ldefn) return force_str(name, self.encoding, strings_only=True) @property def num_fields(self): "Return the number of fields in the Feature." return capi.get_feat_field_count(self.ptr) @property def fields(self): "Return a list of fields in the Feature." return [ force_str( capi.get_field_name(capi.get_field_defn(self._layer._ldefn, i)), self.encoding, strings_only=True ) for i in range(self.num_fields) ] @property def geom(self): "Return the OGR Geometry for this Feature." # Retrieving the geometry pointer for the feature. geom_ptr = capi.get_feat_geom_ref(self.ptr) return OGRGeometry(geom_api.clone_geom(geom_ptr)) @property def geom_type(self): "Return the OGR Geometry Type for this Feature." return OGRGeomType(capi.get_fd_geom_type(self._layer._ldefn)) # #### Feature Methods #### def get(self, field): """ Return the value of the field, instead of an instance of the Field object. May take a string of the field name or a Field object as parameters. """ field_name = getattr(field, 'name', field) return self[field_name].value def index(self, field_name): "Return the index of the given field name." i = capi.get_field_index(self.ptr, force_bytes(field_name)) if i < 0: raise IndexError('Invalid OFT field name given: %s.' % field_name) return i
5c8dff0552fc58402272fda1cc303c282644ff840cfe4e2123449f8cdc5d1aa4
""" This module houses the GDAL & SRS Exception objects, and the check_err() routine which checks the status code returned by GDAL/OGR methods. """ # #### GDAL & SRS Exceptions #### class GDALException(Exception): pass class SRSException(Exception): pass # #### GDAL/OGR error checking codes and routine #### # OGR Error Codes OGRERR_DICT = { 1: (GDALException, 'Not enough data.'), 2: (GDALException, 'Not enough memory.'), 3: (GDALException, 'Unsupported geometry type.'), 4: (GDALException, 'Unsupported operation.'), 5: (GDALException, 'Corrupt data.'), 6: (GDALException, 'OGR failure.'), 7: (SRSException, 'Unsupported SRS.'), 8: (GDALException, 'Invalid handle.'), } # CPL Error Codes # https://gdal.org/api/cpl.html#cpl-error-h CPLERR_DICT = { 1: (GDALException, 'AppDefined'), 2: (GDALException, 'OutOfMemory'), 3: (GDALException, 'FileIO'), 4: (GDALException, 'OpenFailed'), 5: (GDALException, 'IllegalArg'), 6: (GDALException, 'NotSupported'), 7: (GDALException, 'AssertionFailed'), 8: (GDALException, 'NoWriteAccess'), 9: (GDALException, 'UserInterrupt'), 10: (GDALException, 'ObjectNull'), } ERR_NONE = 0 def check_err(code, cpl=False): """ Check the given CPL/OGRERR and raise an exception where appropriate. """ err_dict = CPLERR_DICT if cpl else OGRERR_DICT if code == ERR_NONE: return elif code in err_dict: e, msg = err_dict[code] raise e(msg) else: raise GDALException('Unknown error code: "%s"' % code)
4e9f4cba770d4148312abca1ca8e0236774059106750f20a08dfa2a97163560e
""" The GDAL/OGR library uses an Envelope structure to hold the bounding box information for a geometry. The envelope (bounding box) contains two pairs of coordinates, one for the lower left coordinate and one for the upper right coordinate: +----------o Upper right; (max_x, max_y) | | | | | | Lower left (min_x, min_y) o----------+ """ from ctypes import Structure, c_double from django.contrib.gis.gdal.error import GDALException # The OGR definition of an Envelope is a C structure containing four doubles. # See the 'ogr_core.h' source file for more information: # https://gdal.org/doxygen/ogr__core_8h_source.html class OGREnvelope(Structure): "Represent the OGREnvelope C Structure." _fields_ = [("MinX", c_double), ("MaxX", c_double), ("MinY", c_double), ("MaxY", c_double), ] class Envelope: """ The Envelope object is a C structure that contains the minimum and maximum X, Y coordinates for a rectangle bounding box. The naming of the variables is compatible with the OGR Envelope structure. """ def __init__(self, *args): """ The initialization function may take an OGREnvelope structure, 4-element tuple or list, or 4 individual arguments. """ if len(args) == 1: if isinstance(args[0], OGREnvelope): # OGREnvelope (a ctypes Structure) was passed in. self._envelope = args[0] elif isinstance(args[0], (tuple, list)): # A tuple was passed in. if len(args[0]) != 4: raise GDALException('Incorrect number of tuple elements (%d).' % len(args[0])) else: self._from_sequence(args[0]) else: raise TypeError('Incorrect type of argument: %s' % type(args[0])) elif len(args) == 4: # Individual parameters passed in. # Thanks to ww for the help self._from_sequence([float(a) for a in args]) else: raise GDALException('Incorrect number (%d) of arguments.' % len(args)) # Checking the x,y coordinates if self.min_x > self.max_x: raise GDALException('Envelope minimum X > maximum X.') if self.min_y > self.max_y: raise GDALException('Envelope minimum Y > maximum Y.') def __eq__(self, other): """ Return True if the envelopes are equivalent; can compare against other Envelopes and 4-tuples. """ if isinstance(other, Envelope): return (self.min_x == other.min_x) and (self.min_y == other.min_y) and \ (self.max_x == other.max_x) and (self.max_y == other.max_y) elif isinstance(other, tuple) and len(other) == 4: return (self.min_x == other[0]) and (self.min_y == other[1]) and \ (self.max_x == other[2]) and (self.max_y == other[3]) else: raise GDALException('Equivalence testing only works with other Envelopes.') def __str__(self): "Return a string representation of the tuple." return str(self.tuple) def _from_sequence(self, seq): "Initialize the C OGR Envelope structure from the given sequence." self._envelope = OGREnvelope() self._envelope.MinX = seq[0] self._envelope.MinY = seq[1] self._envelope.MaxX = seq[2] self._envelope.MaxY = seq[3] def expand_to_include(self, *args): """ Modify the envelope to expand to include the boundaries of the passed-in 2-tuple (a point), 4-tuple (an extent) or envelope. """ # We provide a number of different signatures for this method, # and the logic here is all about converting them into a # 4-tuple single parameter which does the actual work of # expanding the envelope. if len(args) == 1: if isinstance(args[0], Envelope): return self.expand_to_include(args[0].tuple) elif hasattr(args[0], 'x') and hasattr(args[0], 'y'): return self.expand_to_include(args[0].x, args[0].y, args[0].x, args[0].y) elif isinstance(args[0], (tuple, list)): # A tuple was passed in. if len(args[0]) == 2: return self.expand_to_include((args[0][0], args[0][1], args[0][0], args[0][1])) elif len(args[0]) == 4: (minx, miny, maxx, maxy) = args[0] if minx < self._envelope.MinX: self._envelope.MinX = minx if miny < self._envelope.MinY: self._envelope.MinY = miny if maxx > self._envelope.MaxX: self._envelope.MaxX = maxx if maxy > self._envelope.MaxY: self._envelope.MaxY = maxy else: raise GDALException('Incorrect number of tuple elements (%d).' % len(args[0])) else: raise TypeError('Incorrect type of argument: %s' % type(args[0])) elif len(args) == 2: # An x and an y parameter were passed in return self.expand_to_include((args[0], args[1], args[0], args[1])) elif len(args) == 4: # Individual parameters passed in. return self.expand_to_include(args) else: raise GDALException('Incorrect number (%d) of arguments.' % len(args[0])) @property def min_x(self): "Return the value of the minimum X coordinate." return self._envelope.MinX @property def min_y(self): "Return the value of the minimum Y coordinate." return self._envelope.MinY @property def max_x(self): "Return the value of the maximum X coordinate." return self._envelope.MaxX @property def max_y(self): "Return the value of the maximum Y coordinate." return self._envelope.MaxY @property def ur(self): "Return the upper-right coordinate." return (self.max_x, self.max_y) @property def ll(self): "Return the lower-left coordinate." return (self.min_x, self.min_y) @property def tuple(self): "Return a tuple representing the envelope." return (self.min_x, self.min_y, self.max_x, self.max_y) @property def wkt(self): "Return WKT representing a Polygon for this envelope." # TODO: Fix significant figures. return 'POLYGON((%s %s,%s %s,%s %s,%s %s,%s %s))' % \ (self.min_x, self.min_y, self.min_x, self.max_y, self.max_x, self.max_y, self.max_x, self.min_y, self.min_x, self.min_y)
dc53f90a36ab6645e579a954cc8fbb6a4572f2bc96efd9c278f06baf0cf443ea
from ctypes import c_void_p from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import ds as vcapi, raster as rcapi from django.utils.encoding import force_bytes, force_str class Driver(GDALBase): """ Wrap a GDAL/OGR Data Source Driver. For more information, see the C API documentation: https://gdal.org/api/vector_c_api.html https://gdal.org/api/raster_c_api.html """ # Case-insensitive aliases for some GDAL/OGR Drivers. # For a complete list of original driver names see # https://gdal.org/drivers/vector/ # https://gdal.org/drivers/raster/ _alias = { # vector 'esri': 'ESRI Shapefile', 'shp': 'ESRI Shapefile', 'shape': 'ESRI Shapefile', 'tiger': 'TIGER', 'tiger/line': 'TIGER', # raster 'tiff': 'GTiff', 'tif': 'GTiff', 'jpeg': 'JPEG', 'jpg': 'JPEG', } def __init__(self, dr_input): """ Initialize an GDAL/OGR driver on either a string or integer input. """ if isinstance(dr_input, str): # If a string name of the driver was passed in self.ensure_registered() # Checking the alias dictionary (case-insensitive) to see if an # alias exists for the given driver. if dr_input.lower() in self._alias: name = self._alias[dr_input.lower()] else: name = dr_input # Attempting to get the GDAL/OGR driver by the string name. for iface in (vcapi, rcapi): driver = c_void_p(iface.get_driver_by_name(force_bytes(name))) if driver: break elif isinstance(dr_input, int): self.ensure_registered() for iface in (vcapi, rcapi): driver = iface.get_driver(dr_input) if driver: break elif isinstance(dr_input, c_void_p): driver = dr_input else: raise GDALException('Unrecognized input type for GDAL/OGR Driver: %s' % type(dr_input)) # Making sure we get a valid pointer to the OGR Driver if not driver: raise GDALException('Could not initialize GDAL/OGR Driver on input: %s' % dr_input) self.ptr = driver def __str__(self): return self.name @classmethod def ensure_registered(cls): """ Attempt to register all the data source drivers. """ # Only register all if the driver counts are 0 (or else all drivers # will be registered over and over again) if not vcapi.get_driver_count(): vcapi.register_all() if not rcapi.get_driver_count(): rcapi.register_all() @classmethod def driver_count(cls): """ Return the number of GDAL/OGR data source drivers registered. """ return vcapi.get_driver_count() + rcapi.get_driver_count() @property def name(self): """ Return description/name string for this driver. """ return force_str(rcapi.get_driver_description(self.ptr))
ee13aac254e616fd090b2817021f030cb65ec641abffbecbed3a25cadbdc1fa5
import logging import os import re from ctypes import CDLL, CFUNCTYPE, c_char_p, c_int from ctypes.util import find_library from django.contrib.gis.gdal.error import GDALException from django.core.exceptions import ImproperlyConfigured logger = logging.getLogger('django.contrib.gis') # Custom library path set? try: from django.conf import settings lib_path = settings.GDAL_LIBRARY_PATH except (AttributeError, ImportError, ImproperlyConfigured, OSError): lib_path = None if lib_path: lib_names = None elif os.name == 'nt': # Windows NT shared libraries lib_names = [ 'gdal303', 'gdal302', 'gdal301', 'gdal300', 'gdal204', 'gdal203', 'gdal202', ] elif os.name == 'posix': # *NIX library names. lib_names = [ 'gdal', 'GDAL', 'gdal3.3.0', 'gdal3.2.0', 'gdal3.1.0', 'gdal3.0.0', 'gdal2.4.0', 'gdal2.3.0', 'gdal2.2.0', ] else: raise ImproperlyConfigured('GDAL is unsupported on OS "%s".' % os.name) # Using the ctypes `find_library` utility to find the # path to the GDAL library from the list of library names. if lib_names: for lib_name in lib_names: lib_path = find_library(lib_name) if lib_path is not None: break if lib_path is None: raise ImproperlyConfigured( 'Could not find the GDAL library (tried "%s"). Is GDAL installed? ' 'If it is, try setting GDAL_LIBRARY_PATH in your settings.' % '", "'.join(lib_names) ) # This loads the GDAL/OGR C library lgdal = CDLL(lib_path) # On Windows, the GDAL binaries have some OSR routines exported with # STDCALL, while others are not. Thus, the library will also need to # be loaded up as WinDLL for said OSR functions that require the # different calling convention. if os.name == 'nt': from ctypes import WinDLL lwingdal = WinDLL(lib_path) def std_call(func): """ Return the correct STDCALL function for certain OSR routines on Win32 platforms. """ if os.name == 'nt': return lwingdal[func] else: return lgdal[func] # #### Version-information functions. #### # Return GDAL library version information with the given key. _version_info = std_call('GDALVersionInfo') _version_info.argtypes = [c_char_p] _version_info.restype = c_char_p def gdal_version(): "Return only the GDAL version number information." return _version_info(b'RELEASE_NAME') def gdal_full_version(): "Return the full GDAL version information." return _version_info(b'') def gdal_version_info(): ver = gdal_version() m = re.match(br'^(?P<major>\d+)\.(?P<minor>\d+)(?:\.(?P<subminor>\d+))?', ver) if not m: raise GDALException('Could not parse GDAL version string "%s"' % ver) major, minor, subminor = m.groups() return (int(major), int(minor), subminor and int(subminor)) GDAL_VERSION = gdal_version_info() # Set library error handling so as errors are logged CPLErrorHandler = CFUNCTYPE(None, c_int, c_int, c_char_p) def err_handler(error_class, error_number, message): logger.error('GDAL_ERROR %d: %s', error_number, message) err_handler = CPLErrorHandler(err_handler) def function(name, args, restype): func = std_call(name) func.argtypes = args func.restype = restype return func set_error_handler = function('CPLSetErrorHandler', [CPLErrorHandler], CPLErrorHandler) set_error_handler(err_handler)
b79a55a208be0c5bbf6597bc4e980ae6503bd8bada1315fa1933301d95f5afcb
from ctypes import byref, c_double from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope from django.contrib.gis.gdal.error import GDALException, SRSException from django.contrib.gis.gdal.feature import Feature from django.contrib.gis.gdal.field import OGRFieldTypes from django.contrib.gis.gdal.geometries import OGRGeometry from django.contrib.gis.gdal.geomtype import OGRGeomType from django.contrib.gis.gdal.prototypes import ( ds as capi, geom as geom_api, srs as srs_api, ) from django.contrib.gis.gdal.srs import SpatialReference from django.utils.encoding import force_bytes, force_str # For more information, see the OGR C API source code: # https://gdal.org/api/vector_c_api.html # # The OGR_L_* routines are relevant here. class Layer(GDALBase): "A class that wraps an OGR Layer, needs to be instantiated from a DataSource object." def __init__(self, layer_ptr, ds): """ Initialize on an OGR C pointer to the Layer and the `DataSource` object that owns this layer. The `DataSource` object is required so that a reference to it is kept with this Layer. This prevents garbage collection of the `DataSource` while this Layer is still active. """ if not layer_ptr: raise GDALException('Cannot create Layer, invalid pointer given') self.ptr = layer_ptr self._ds = ds self._ldefn = capi.get_layer_defn(self._ptr) # Does the Layer support random reading? self._random_read = self.test_capability(b'RandomRead') def __getitem__(self, index): "Get the Feature at the specified index." if isinstance(index, int): # An integer index was given -- we cannot do a check based on the # number of features because the beginning and ending feature IDs # are not guaranteed to be 0 and len(layer)-1, respectively. if index < 0: raise IndexError('Negative indices are not allowed on OGR Layers.') return self._make_feature(index) elif isinstance(index, slice): # A slice was given start, stop, stride = index.indices(self.num_feat) return [self._make_feature(fid) for fid in range(start, stop, stride)] else: raise TypeError('Integers and slices may only be used when indexing OGR Layers.') def __iter__(self): "Iterate over each Feature in the Layer." # ResetReading() must be called before iteration is to begin. capi.reset_reading(self._ptr) for i in range(self.num_feat): yield Feature(capi.get_next_feature(self._ptr), self) def __len__(self): "The length is the number of features." return self.num_feat def __str__(self): "The string name of the layer." return self.name def _make_feature(self, feat_id): """ Helper routine for __getitem__ that constructs a Feature from the given Feature ID. If the OGR Layer does not support random-access reading, then each feature of the layer will be incremented through until the a Feature is found matching the given feature ID. """ if self._random_read: # If the Layer supports random reading, return. try: return Feature(capi.get_feature(self.ptr, feat_id), self) except GDALException: pass else: # Random access isn't supported, have to increment through # each feature until the given feature ID is encountered. for feat in self: if feat.fid == feat_id: return feat # Should have returned a Feature, raise an IndexError. raise IndexError('Invalid feature id: %s.' % feat_id) # #### Layer properties #### @property def extent(self): "Return the extent (an Envelope) of this layer." env = OGREnvelope() capi.get_extent(self.ptr, byref(env), 1) return Envelope(env) @property def name(self): "Return the name of this layer in the Data Source." name = capi.get_fd_name(self._ldefn) return force_str(name, self._ds.encoding, strings_only=True) @property def num_feat(self, force=1): "Return the number of features in the Layer." return capi.get_feature_count(self.ptr, force) @property def num_fields(self): "Return the number of fields in the Layer." return capi.get_field_count(self._ldefn) @property def geom_type(self): "Return the geometry type (OGRGeomType) of the Layer." return OGRGeomType(capi.get_fd_geom_type(self._ldefn)) @property def srs(self): "Return the Spatial Reference used in this Layer." try: ptr = capi.get_layer_srs(self.ptr) return SpatialReference(srs_api.clone_srs(ptr)) except SRSException: return None @property def fields(self): """ Return a list of string names corresponding to each of the Fields available in this Layer. """ return [force_str( capi.get_field_name(capi.get_field_defn(self._ldefn, i)), self._ds.encoding, strings_only=True, ) for i in range(self.num_fields)] @property def field_types(self): """ Return a list of the types of fields in this Layer. For example, return the list [OFTInteger, OFTReal, OFTString] for an OGR layer that has an integer, a floating-point, and string fields. """ return [OGRFieldTypes[capi.get_field_type(capi.get_field_defn(self._ldefn, i))] for i in range(self.num_fields)] @property def field_widths(self): "Return a list of the maximum field widths for the features." return [capi.get_field_width(capi.get_field_defn(self._ldefn, i)) for i in range(self.num_fields)] @property def field_precisions(self): "Return the field precisions for the features." return [capi.get_field_precision(capi.get_field_defn(self._ldefn, i)) for i in range(self.num_fields)] def _get_spatial_filter(self): try: return OGRGeometry(geom_api.clone_geom(capi.get_spatial_filter(self.ptr))) except GDALException: return None def _set_spatial_filter(self, filter): if isinstance(filter, OGRGeometry): capi.set_spatial_filter(self.ptr, filter.ptr) elif isinstance(filter, (tuple, list)): if not len(filter) == 4: raise ValueError('Spatial filter list/tuple must have 4 elements.') # Map c_double onto params -- if a bad type is passed in it # will be caught here. xmin, ymin, xmax, ymax = map(c_double, filter) capi.set_spatial_filter_rect(self.ptr, xmin, ymin, xmax, ymax) elif filter is None: capi.set_spatial_filter(self.ptr, None) else: raise TypeError('Spatial filter must be either an OGRGeometry instance, a 4-tuple, or None.') spatial_filter = property(_get_spatial_filter, _set_spatial_filter) # #### Layer Methods #### def get_fields(self, field_name): """ Return a list containing the given field name for every Feature in the Layer. """ if field_name not in self.fields: raise GDALException('invalid field name: %s' % field_name) return [feat.get(field_name) for feat in self] def get_geoms(self, geos=False): """ Return a list containing the OGRGeometry for every Feature in the Layer. """ if geos: from django.contrib.gis.geos import GEOSGeometry return [GEOSGeometry(feat.geom.wkb) for feat in self] else: return [feat.geom for feat in self] def test_capability(self, capability): """ Return a bool indicating whether the this Layer supports the given capability (a string). Valid capability strings include: 'RandomRead', 'SequentialWrite', 'RandomWrite', 'FastSpatialFilter', 'FastFeatureCount', 'FastGetExtent', 'CreateField', 'Transactions', 'DeleteFeature', and 'FastSetNextByIndex'. """ return bool(capi.test_capability(self.ptr, force_bytes(capability)))
a5ec1a4a2a4ae1f935cfae2717f32c87cba60b17202d2c7ab4c9b3f1b067a84c
from django.contrib.admin import ( HORIZONTAL, VERTICAL, AdminSite, ModelAdmin, StackedInline, TabularInline, action, autodiscover, display, register, site, ) from django.contrib.gis.admin.options import ( GeoModelAdmin, GISModelAdmin, OSMGeoAdmin, ) from django.contrib.gis.admin.widgets import OpenLayersWidget __all__ = [ 'HORIZONTAL', 'VERTICAL', 'AdminSite', 'ModelAdmin', 'StackedInline', 'TabularInline', 'action', 'autodiscover', 'display', 'register', 'site', 'GISModelAdmin', 'OpenLayersWidget', # RemovedInDjango50Warning. 'GeoModelAdmin', 'OSMGeoAdmin', ]
3b607c917994de8b2f10d8022c562475ce9583a6475fae2b4d5d12b3de655d8e
import warnings from django.contrib.admin import ModelAdmin from django.contrib.gis.admin.widgets import OpenLayersWidget from django.contrib.gis.db import models from django.contrib.gis.forms import OSMWidget from django.contrib.gis.gdal import OGRGeomType from django.forms import Media from django.utils.deprecation import RemovedInDjango50Warning class GeoModelAdminMixin: gis_widget = OSMWidget gis_widget_kwargs = {} def formfield_for_dbfield(self, db_field, request, **kwargs): if ( isinstance(db_field, models.GeometryField) and (db_field.dim < 3 or self.gis_widget.supports_3d) ): kwargs['widget'] = self.gis_widget(**self.gis_widget_kwargs) return db_field.formfield(**kwargs) else: return super().formfield_for_dbfield(db_field, request, **kwargs) class GISModelAdmin(GeoModelAdminMixin, ModelAdmin): pass # RemovedInDjango50Warning. spherical_mercator_srid = 3857 # RemovedInDjango50Warning. class GeoModelAdmin(ModelAdmin): """ The administration options class for Geographic models. Map settings may be overloaded from their defaults to create custom maps. """ # The default map settings that may be overloaded -- still subject # to API changes. default_lon = 0 default_lat = 0 default_zoom = 4 display_wkt = False display_srid = False extra_js = [] num_zoom = 18 max_zoom = False min_zoom = False units = False max_resolution = False max_extent = False modifiable = True mouse_position = True scale_text = True layerswitcher = True scrollable = True map_width = 600 map_height = 400 map_srid = 4326 map_template = 'gis/admin/openlayers.html' openlayers_url = 'https://cdnjs.cloudflare.com/ajax/libs/openlayers/2.13.1/OpenLayers.js' point_zoom = num_zoom - 6 wms_url = 'http://vmap0.tiles.osgeo.org/wms/vmap0' wms_layer = 'basic' wms_name = 'OpenLayers WMS' wms_options = {'format': 'image/jpeg'} debug = False widget = OpenLayersWidget def __init__(self, *args, **kwargs): warnings.warn( 'django.contrib.gis.admin.GeoModelAdmin and OSMGeoAdmin are ' 'deprecated in favor of django.contrib.admin.ModelAdmin and ' 'django.contrib.gis.admin.GISModelAdmin.', RemovedInDjango50Warning, stacklevel=2, ) super().__init__(*args, **kwargs) @property def media(self): "Injects OpenLayers JavaScript into the admin." return super().media + Media(js=[self.openlayers_url] + self.extra_js) def formfield_for_dbfield(self, db_field, request, **kwargs): """ Overloaded from ModelAdmin so that an OpenLayersWidget is used for viewing/editing 2D GeometryFields (OpenLayers 2 does not support 3D editing). """ if isinstance(db_field, models.GeometryField) and db_field.dim < 3: # Setting the widget with the newly defined widget. kwargs['widget'] = self.get_map_widget(db_field) return db_field.formfield(**kwargs) else: return super().formfield_for_dbfield(db_field, request, **kwargs) def get_map_widget(self, db_field): """ Return a subclass of the OpenLayersWidget (or whatever was specified in the `widget` attribute) using the settings from the attributes set in this class. """ is_collection = db_field.geom_type in ('MULTIPOINT', 'MULTILINESTRING', 'MULTIPOLYGON', 'GEOMETRYCOLLECTION') if is_collection: if db_field.geom_type == 'GEOMETRYCOLLECTION': collection_type = 'Any' else: collection_type = OGRGeomType(db_field.geom_type.replace('MULTI', '')) else: collection_type = 'None' class OLMap(self.widget): template_name = self.map_template geom_type = db_field.geom_type wms_options = '' if self.wms_options: wms_options = ["%s: '%s'" % pair for pair in self.wms_options.items()] wms_options = ', %s' % ', '.join(wms_options) params = { 'default_lon': self.default_lon, 'default_lat': self.default_lat, 'default_zoom': self.default_zoom, 'display_wkt': self.debug or self.display_wkt, 'geom_type': OGRGeomType(db_field.geom_type), 'field_name': db_field.name, 'is_collection': is_collection, 'scrollable': self.scrollable, 'layerswitcher': self.layerswitcher, 'collection_type': collection_type, 'is_generic': db_field.geom_type == 'GEOMETRY', 'is_linestring': db_field.geom_type in ('LINESTRING', 'MULTILINESTRING'), 'is_polygon': db_field.geom_type in ('POLYGON', 'MULTIPOLYGON'), 'is_point': db_field.geom_type in ('POINT', 'MULTIPOINT'), 'num_zoom': self.num_zoom, 'max_zoom': self.max_zoom, 'min_zoom': self.min_zoom, 'units': self.units, # likely should get from object 'max_resolution': self.max_resolution, 'max_extent': self.max_extent, 'modifiable': self.modifiable, 'mouse_position': self.mouse_position, 'scale_text': self.scale_text, 'map_width': self.map_width, 'map_height': self.map_height, 'point_zoom': self.point_zoom, 'srid': self.map_srid, 'display_srid': self.display_srid, 'wms_url': self.wms_url, 'wms_layer': self.wms_layer, 'wms_name': self.wms_name, 'wms_options': wms_options, 'debug': self.debug, } return OLMap # RemovedInDjango50Warning. class OSMGeoAdmin(GeoModelAdmin): map_template = 'gis/admin/osm.html' num_zoom = 20 map_srid = spherical_mercator_srid max_extent = '-20037508,-20037508,20037508,20037508' max_resolution = '156543.0339' point_zoom = num_zoom - 6 units = 'm'
c2f664887b9d995cfce8e861ddeec3ab7515d8e6d6cfeca7cb78a9e90ed61ed9
""" This module houses the Geometry Collection objects: GeometryCollection, MultiPoint, MultiLineString, and MultiPolygon """ from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.geometry import GEOSGeometry, LinearGeometryMixin from django.contrib.gis.geos.libgeos import GEOM_PTR from django.contrib.gis.geos.linestring import LinearRing, LineString from django.contrib.gis.geos.point import Point from django.contrib.gis.geos.polygon import Polygon class GeometryCollection(GEOSGeometry): _typeid = 7 def __init__(self, *args, **kwargs): "Initialize a Geometry Collection from a sequence of Geometry objects." # Checking the arguments if len(args) == 1: # If only one geometry provided or a list of geometries is provided # in the first argument. if isinstance(args[0], (tuple, list)): init_geoms = args[0] else: init_geoms = args else: init_geoms = args # Ensuring that only the permitted geometries are allowed in this collection # this is moved to list mixin super class self._check_allowed(init_geoms) # Creating the geometry pointer array. collection = self._create_collection(len(init_geoms), init_geoms) super().__init__(collection, **kwargs) def __iter__(self): "Iterate over each Geometry in the Collection." for i in range(len(self)): yield self[i] def __len__(self): "Return the number of geometries in this Collection." return self.num_geom # ### Methods for compatibility with ListMixin ### def _create_collection(self, length, items): # Creating the geometry pointer array. geoms = (GEOM_PTR * length)(*[ # this is a little sloppy, but makes life easier # allow GEOSGeometry types (python wrappers) or pointer types capi.geom_clone(getattr(g, 'ptr', g)) for g in items ]) return capi.create_collection(self._typeid, geoms, length) def _get_single_internal(self, index): return capi.get_geomn(self.ptr, index) def _get_single_external(self, index): "Return the Geometry from this Collection at the given index (0-based)." # Checking the index and returning the corresponding GEOS geometry. return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid) def _set_list(self, length, items): "Create a new collection, and destroy the contents of the previous pointer." prev_ptr = self.ptr srid = self.srid self.ptr = self._create_collection(length, items) if srid: self.srid = srid capi.destroy_geom(prev_ptr) _set_single = GEOSGeometry._set_single_rebuild _assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild @property def kml(self): "Return the KML for this Geometry Collection." return '<MultiGeometry>%s</MultiGeometry>' % ''.join(g.kml for g in self) @property def tuple(self): "Return a tuple of all the coordinates in this Geometry Collection" return tuple(g.tuple for g in self) coords = tuple # MultiPoint, MultiLineString, and MultiPolygon class definitions. class MultiPoint(GeometryCollection): _allowed = Point _typeid = 4 class MultiLineString(LinearGeometryMixin, GeometryCollection): _allowed = (LineString, LinearRing) _typeid = 5 class MultiPolygon(GeometryCollection): _allowed = Polygon _typeid = 6 # Setting the allowed types here since GeometryCollection is defined before # its subclasses. GeometryCollection._allowed = (Point, LineString, LinearRing, Polygon, MultiPoint, MultiLineString, MultiPolygon)
1892894df73f1589eb6189473cb135a036c147f40bc60d9126b134b97498219b
""" This module contains the 'base' GEOSGeometry object -- all GEOS Geometries inherit from this object. """ import re from ctypes import addressof, byref, c_double from django.contrib.gis import gdal from django.contrib.gis.geometry import hex_regex, json_regex, wkt_regex from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.base import GEOSBase from django.contrib.gis.geos.coordseq import GEOSCoordSeq from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.libgeos import GEOM_PTR, geos_version_tuple from django.contrib.gis.geos.mutable_list import ListMixin from django.contrib.gis.geos.prepared import PreparedGeometry from django.contrib.gis.geos.prototypes.io import ( ewkb_w, wkb_r, wkb_w, wkt_r, wkt_w, ) from django.utils.deconstruct import deconstructible from django.utils.encoding import force_bytes, force_str class GEOSGeometryBase(GEOSBase): _GEOS_CLASSES = None ptr_type = GEOM_PTR destructor = capi.destroy_geom has_cs = False # Only Point, LineString, LinearRing have coordinate sequences def __init__(self, ptr, cls): self._ptr = ptr # Setting the class type (e.g., Point, Polygon, etc.) if type(self) in (GEOSGeometryBase, GEOSGeometry): if cls is None: if GEOSGeometryBase._GEOS_CLASSES is None: # Inner imports avoid import conflicts with GEOSGeometry. from .collections import ( GeometryCollection, MultiLineString, MultiPoint, MultiPolygon, ) from .linestring import LinearRing, LineString from .point import Point from .polygon import Polygon GEOSGeometryBase._GEOS_CLASSES = { 0: Point, 1: LineString, 2: LinearRing, 3: Polygon, 4: MultiPoint, 5: MultiLineString, 6: MultiPolygon, 7: GeometryCollection, } cls = GEOSGeometryBase._GEOS_CLASSES[self.geom_typeid] self.__class__ = cls self._post_init() def _post_init(self): "Perform post-initialization setup." # Setting the coordinate sequence for the geometry (will be None on # geometries that do not have coordinate sequences) self._cs = GEOSCoordSeq(capi.get_cs(self.ptr), self.hasz) if self.has_cs else None def __copy__(self): """ Return a clone because the copy of a GEOSGeometry may contain an invalid pointer location if the original is garbage collected. """ return self.clone() def __deepcopy__(self, memodict): """ The `deepcopy` routine is used by the `Node` class of django.utils.tree; thus, the protocol routine needs to be implemented to return correct copies (clones) of these GEOS objects, which use C pointers. """ return self.clone() def __str__(self): "EWKT is used for the string representation." return self.ewkt def __repr__(self): "Short-hand representation because WKT may be very large." return '<%s object at %s>' % (self.geom_type, hex(addressof(self.ptr))) # Pickling support def _to_pickle_wkb(self): return bytes(self.wkb) def _from_pickle_wkb(self, wkb): return wkb_r().read(memoryview(wkb)) def __getstate__(self): # The pickled state is simply a tuple of the WKB (in string form) # and the SRID. return self._to_pickle_wkb(), self.srid def __setstate__(self, state): # Instantiating from the tuple state that was pickled. wkb, srid = state ptr = self._from_pickle_wkb(wkb) if not ptr: raise GEOSException('Invalid Geometry loaded from pickled state.') self.ptr = ptr self._post_init() self.srid = srid @classmethod def _from_wkb(cls, wkb): return wkb_r().read(wkb) @staticmethod def from_ewkt(ewkt): ewkt = force_bytes(ewkt) srid = None parts = ewkt.split(b';', 1) if len(parts) == 2: srid_part, wkt = parts match = re.match(br'SRID=(?P<srid>\-?\d+)', srid_part) if not match: raise ValueError('EWKT has invalid SRID part.') srid = int(match['srid']) else: wkt = ewkt if not wkt: raise ValueError('Expected WKT but got an empty string.') return GEOSGeometry(GEOSGeometry._from_wkt(wkt), srid=srid) @staticmethod def _from_wkt(wkt): return wkt_r().read(wkt) @classmethod def from_gml(cls, gml_string): return gdal.OGRGeometry.from_gml(gml_string).geos # Comparison operators def __eq__(self, other): """ Equivalence testing, a Geometry may be compared with another Geometry or an EWKT representation. """ if isinstance(other, str): try: other = GEOSGeometry.from_ewkt(other) except (ValueError, GEOSException): return False return isinstance(other, GEOSGeometry) and self.srid == other.srid and self.equals_exact(other) def __hash__(self): return hash((self.srid, self.wkt)) # ### Geometry set-like operations ### # Thanks to Sean Gillies for inspiration: # http://lists.gispython.org/pipermail/community/2007-July/001034.html # g = g1 | g2 def __or__(self, other): "Return the union of this Geometry and the other." return self.union(other) # g = g1 & g2 def __and__(self, other): "Return the intersection of this Geometry and the other." return self.intersection(other) # g = g1 - g2 def __sub__(self, other): "Return the difference this Geometry and the other." return self.difference(other) # g = g1 ^ g2 def __xor__(self, other): "Return the symmetric difference of this Geometry and the other." return self.sym_difference(other) # #### Coordinate Sequence Routines #### @property def coord_seq(self): "Return a clone of the coordinate sequence for this Geometry." if self.has_cs: return self._cs.clone() # #### Geometry Info #### @property def geom_type(self): "Return a string representing the Geometry type, e.g. 'Polygon'" return capi.geos_type(self.ptr).decode() @property def geom_typeid(self): "Return an integer representing the Geometry type." return capi.geos_typeid(self.ptr) @property def num_geom(self): "Return the number of geometries in the Geometry." return capi.get_num_geoms(self.ptr) @property def num_coords(self): "Return the number of coordinates in the Geometry." return capi.get_num_coords(self.ptr) @property def num_points(self): "Return the number points, or coordinates, in the Geometry." return self.num_coords @property def dims(self): "Return the dimension of this Geometry (0=point, 1=line, 2=surface)." return capi.get_dims(self.ptr) def normalize(self): "Convert this Geometry to normal form (or canonical form)." capi.geos_normalize(self.ptr) def make_valid(self): """ Attempt to create a valid representation of a given invalid geometry without losing any of the input vertices. """ if geos_version_tuple() < (3, 8): raise GEOSException('GEOSGeometry.make_valid() requires GEOS >= 3.8.0.') return GEOSGeometry(capi.geos_makevalid(self.ptr), srid=self.srid) # #### Unary predicates #### @property def empty(self): """ Return a boolean indicating whether the set of points in this Geometry are empty. """ return capi.geos_isempty(self.ptr) @property def hasz(self): "Return whether the geometry has a 3D dimension." return capi.geos_hasz(self.ptr) @property def ring(self): "Return whether or not the geometry is a ring." return capi.geos_isring(self.ptr) @property def simple(self): "Return false if the Geometry isn't simple." return capi.geos_issimple(self.ptr) @property def valid(self): "Test the validity of this Geometry." return capi.geos_isvalid(self.ptr) @property def valid_reason(self): """ Return a string containing the reason for any invalidity. """ return capi.geos_isvalidreason(self.ptr).decode() # #### Binary predicates. #### def contains(self, other): "Return true if other.within(this) returns true." return capi.geos_contains(self.ptr, other.ptr) def covers(self, other): """ Return True if the DE-9IM Intersection Matrix for the two geometries is T*****FF*, *T****FF*, ***T**FF*, or ****T*FF*. If either geometry is empty, return False. """ return capi.geos_covers(self.ptr, other.ptr) def crosses(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is T*T****** (for a point and a curve,a point and an area or a line and an area) 0******** (for two curves). """ return capi.geos_crosses(self.ptr, other.ptr) def disjoint(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is FF*FF****. """ return capi.geos_disjoint(self.ptr, other.ptr) def equals(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is T*F**FFF*. """ return capi.geos_equals(self.ptr, other.ptr) def equals_exact(self, other, tolerance=0): """ Return true if the two Geometries are exactly equal, up to a specified tolerance. """ return capi.geos_equalsexact(self.ptr, other.ptr, float(tolerance)) def intersects(self, other): "Return true if disjoint return false." return capi.geos_intersects(self.ptr, other.ptr) def overlaps(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is T*T***T** (for two points or two surfaces) 1*T***T** (for two curves). """ return capi.geos_overlaps(self.ptr, other.ptr) def relate_pattern(self, other, pattern): """ Return true if the elements in the DE-9IM intersection matrix for the two Geometries match the elements in pattern. """ if not isinstance(pattern, str) or len(pattern) > 9: raise GEOSException('invalid intersection matrix pattern') return capi.geos_relatepattern(self.ptr, other.ptr, force_bytes(pattern)) def touches(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is FT*******, F**T***** or F***T****. """ return capi.geos_touches(self.ptr, other.ptr) def within(self, other): """ Return true if the DE-9IM intersection matrix for the two Geometries is T*F**F***. """ return capi.geos_within(self.ptr, other.ptr) # #### SRID Routines #### @property def srid(self): "Get the SRID for the geometry. Return None if no SRID is set." s = capi.geos_get_srid(self.ptr) if s == 0: return None else: return s @srid.setter def srid(self, srid): "Set the SRID for the geometry." capi.geos_set_srid(self.ptr, 0 if srid is None else srid) # #### Output Routines #### @property def ewkt(self): """ Return the EWKT (SRID + WKT) of the Geometry. """ srid = self.srid return 'SRID=%s;%s' % (srid, self.wkt) if srid else self.wkt @property def wkt(self): "Return the WKT (Well-Known Text) representation of this Geometry." return wkt_w(dim=3 if self.hasz else 2, trim=True).write(self).decode() @property def hex(self): """ Return the WKB of this Geometry in hexadecimal form. Please note that the SRID is not included in this representation because it is not a part of the OGC specification (use the `hexewkb` property instead). """ # A possible faster, all-python, implementation: # str(self.wkb).encode('hex') return wkb_w(dim=3 if self.hasz else 2).write_hex(self) @property def hexewkb(self): """ Return the EWKB of this Geometry in hexadecimal form. This is an extension of the WKB specification that includes SRID value that are a part of this geometry. """ return ewkb_w(dim=3 if self.hasz else 2).write_hex(self) @property def json(self): """ Return GeoJSON representation of this Geometry. """ return self.ogr.json geojson = json @property def wkb(self): """ Return the WKB (Well-Known Binary) representation of this Geometry as a Python buffer. SRID and Z values are not included, use the `ewkb` property instead. """ return wkb_w(3 if self.hasz else 2).write(self) @property def ewkb(self): """ Return the EWKB representation of this Geometry as a Python buffer. This is an extension of the WKB specification that includes any SRID value that are a part of this geometry. """ return ewkb_w(3 if self.hasz else 2).write(self) @property def kml(self): "Return the KML representation of this Geometry." gtype = self.geom_type return '<%s>%s</%s>' % (gtype, self.coord_seq.kml, gtype) @property def prepared(self): """ Return a PreparedGeometry corresponding to this geometry -- it is optimized for the contains, intersects, and covers operations. """ return PreparedGeometry(self) # #### GDAL-specific output routines #### def _ogr_ptr(self): return gdal.OGRGeometry._from_wkb(self.wkb) @property def ogr(self): "Return the OGR Geometry for this Geometry." return gdal.OGRGeometry(self._ogr_ptr(), self.srs) @property def srs(self): "Return the OSR SpatialReference for SRID of this Geometry." if self.srid: try: return gdal.SpatialReference(self.srid) except (gdal.GDALException, gdal.SRSException): pass return None @property def crs(self): "Alias for `srs` property." return self.srs def transform(self, ct, clone=False): """ Requires GDAL. Transform the geometry according to the given transformation object, which may be an integer SRID, and WKT or PROJ string. By default, transform the geometry in-place and return nothing. However if the `clone` keyword is set, don't modify the geometry and return a transformed clone instead. """ srid = self.srid if ct == srid: # short-circuit where source & dest SRIDs match if clone: return self.clone() else: return if isinstance(ct, gdal.CoordTransform): # We don't care about SRID because CoordTransform presupposes # source SRS. srid = None elif srid is None or srid < 0: raise GEOSException("Calling transform() with no SRID set is not supported") # Creating an OGR Geometry, which is then transformed. g = gdal.OGRGeometry(self._ogr_ptr(), srid) g.transform(ct) # Getting a new GEOS pointer ptr = g._geos_ptr() if clone: # User wants a cloned transformed geometry returned. return GEOSGeometry(ptr, srid=g.srid) if ptr: # Reassigning pointer, and performing post-initialization setup # again due to the reassignment. capi.destroy_geom(self.ptr) self.ptr = ptr self._post_init() self.srid = g.srid else: raise GEOSException('Transformed WKB was invalid.') # #### Topology Routines #### def _topology(self, gptr): "Return Geometry from the given pointer." return GEOSGeometry(gptr, srid=self.srid) @property def boundary(self): "Return the boundary as a newly allocated Geometry object." return self._topology(capi.geos_boundary(self.ptr)) def buffer(self, width, quadsegs=8): """ Return a geometry that represents all points whose distance from this Geometry is less than or equal to distance. Calculations are in the Spatial Reference System of this Geometry. The optional third parameter sets the number of segment used to approximate a quarter circle (defaults to 8). (Text from PostGIS documentation at ch. 6.1.3) """ return self._topology(capi.geos_buffer(self.ptr, width, quadsegs)) def buffer_with_style(self, width, quadsegs=8, end_cap_style=1, join_style=1, mitre_limit=5.0): """ Same as buffer() but allows customizing the style of the buffer. End cap style can be round (1), flat (2), or square (3). Join style can be round (1), mitre (2), or bevel (3). Mitre ratio limit only affects mitered join style. """ return self._topology( capi.geos_bufferwithstyle(self.ptr, width, quadsegs, end_cap_style, join_style, mitre_limit), ) @property def centroid(self): """ The centroid is equal to the centroid of the set of component Geometries of highest dimension (since the lower-dimension geometries contribute zero "weight" to the centroid). """ return self._topology(capi.geos_centroid(self.ptr)) @property def convex_hull(self): """ Return the smallest convex Polygon that contains all the points in the Geometry. """ return self._topology(capi.geos_convexhull(self.ptr)) def difference(self, other): """ Return a Geometry representing the points making up this Geometry that do not make up other. """ return self._topology(capi.geos_difference(self.ptr, other.ptr)) @property def envelope(self): "Return the envelope for this geometry (a polygon)." return self._topology(capi.geos_envelope(self.ptr)) def intersection(self, other): "Return a Geometry representing the points shared by this Geometry and other." return self._topology(capi.geos_intersection(self.ptr, other.ptr)) @property def point_on_surface(self): "Compute an interior point of this Geometry." return self._topology(capi.geos_pointonsurface(self.ptr)) def relate(self, other): "Return the DE-9IM intersection matrix for this Geometry and the other." return capi.geos_relate(self.ptr, other.ptr).decode() def simplify(self, tolerance=0.0, preserve_topology=False): """ Return the Geometry, simplified using the Douglas-Peucker algorithm to the specified tolerance (higher tolerance => less points). If no tolerance provided, defaults to 0. By default, don't preserve topology - e.g. polygons can be split, collapse to lines or disappear holes can be created or disappear, and lines can cross. By specifying preserve_topology=True, the result will have the same dimension and number of components as the input. This is significantly slower. """ if preserve_topology: return self._topology(capi.geos_preservesimplify(self.ptr, tolerance)) else: return self._topology(capi.geos_simplify(self.ptr, tolerance)) def sym_difference(self, other): """ Return a set combining the points in this Geometry not in other, and the points in other not in this Geometry. """ return self._topology(capi.geos_symdifference(self.ptr, other.ptr)) @property def unary_union(self): "Return the union of all the elements of this geometry." return self._topology(capi.geos_unary_union(self.ptr)) def union(self, other): "Return a Geometry representing all the points in this Geometry and other." return self._topology(capi.geos_union(self.ptr, other.ptr)) # #### Other Routines #### @property def area(self): "Return the area of the Geometry." return capi.geos_area(self.ptr, byref(c_double())) def distance(self, other): """ Return the distance between the closest points on this Geometry and the other. Units will be in those of the coordinate system of the Geometry. """ if not isinstance(other, GEOSGeometry): raise TypeError('distance() works only on other GEOS Geometries.') return capi.geos_distance(self.ptr, other.ptr, byref(c_double())) @property def extent(self): """ Return the extent of this geometry as a 4-tuple, consisting of (xmin, ymin, xmax, ymax). """ from .point import Point env = self.envelope if isinstance(env, Point): xmin, ymin = env.tuple xmax, ymax = xmin, ymin else: xmin, ymin = env[0][0] xmax, ymax = env[0][2] return (xmin, ymin, xmax, ymax) @property def length(self): """ Return the length of this Geometry (e.g., 0 for point, or the circumference of a Polygon). """ return capi.geos_length(self.ptr, byref(c_double())) def clone(self): "Clone this Geometry." return GEOSGeometry(capi.geom_clone(self.ptr)) class LinearGeometryMixin: """ Used for LineString and MultiLineString. """ def interpolate(self, distance): return self._topology(capi.geos_interpolate(self.ptr, distance)) def interpolate_normalized(self, distance): return self._topology(capi.geos_interpolate_normalized(self.ptr, distance)) def project(self, point): from .point import Point if not isinstance(point, Point): raise TypeError('locate_point argument must be a Point') return capi.geos_project(self.ptr, point.ptr) def project_normalized(self, point): from .point import Point if not isinstance(point, Point): raise TypeError('locate_point argument must be a Point') return capi.geos_project_normalized(self.ptr, point.ptr) @property def merged(self): """ Return the line merge of this Geometry. """ return self._topology(capi.geos_linemerge(self.ptr)) @property def closed(self): """ Return whether or not this Geometry is closed. """ return capi.geos_isclosed(self.ptr) @deconstructible class GEOSGeometry(GEOSGeometryBase, ListMixin): "A class that, generally, encapsulates a GEOS geometry." def __init__(self, geo_input, srid=None): """ The base constructor for GEOS geometry objects. It may take the following inputs: * strings: - WKT - HEXEWKB (a PostGIS-specific canonical form) - GeoJSON (requires GDAL) * buffer: - WKB The `srid` keyword specifies the Source Reference Identifier (SRID) number for this Geometry. If not provided, it defaults to None. """ input_srid = None if isinstance(geo_input, bytes): geo_input = force_str(geo_input) if isinstance(geo_input, str): wkt_m = wkt_regex.match(geo_input) if wkt_m: # Handle WKT input. if wkt_m['srid']: input_srid = int(wkt_m['srid']) g = self._from_wkt(force_bytes(wkt_m['wkt'])) elif hex_regex.match(geo_input): # Handle HEXEWKB input. g = wkb_r().read(force_bytes(geo_input)) elif json_regex.match(geo_input): # Handle GeoJSON input. ogr = gdal.OGRGeometry.from_json(geo_input) g = ogr._geos_ptr() input_srid = ogr.srid else: raise ValueError('String input unrecognized as WKT EWKT, and HEXEWKB.') elif isinstance(geo_input, GEOM_PTR): # When the input is a pointer to a geometry (GEOM_PTR). g = geo_input elif isinstance(geo_input, memoryview): # When the input is a buffer (WKB). g = wkb_r().read(geo_input) elif isinstance(geo_input, GEOSGeometry): g = capi.geom_clone(geo_input.ptr) else: raise TypeError('Improper geometry input type: %s' % type(geo_input)) if not g: raise GEOSException('Could not initialize GEOS Geometry with given input.') input_srid = input_srid or capi.geos_get_srid(g) or None if input_srid and srid and input_srid != srid: raise ValueError('Input geometry already has SRID: %d.' % input_srid) super().__init__(g, None) # Set the SRID, if given. srid = input_srid or srid if srid and isinstance(srid, int): self.srid = srid
381b8a169579a6efee755ac900c1ff6ceb0c2f91c432e4bd2109ddb578f7d991
""" This module houses the ctypes initialization procedures, as well as the notice and error handler function callbacks (get called when an error occurs in GEOS). This module also houses GEOS Pointer utilities, including get_pointer_arr(), and GEOM_PTR. """ import logging import os from ctypes import CDLL, CFUNCTYPE, POINTER, Structure, c_char_p from ctypes.util import find_library from django.core.exceptions import ImproperlyConfigured from django.utils.functional import SimpleLazyObject, cached_property from django.utils.version import get_version_tuple logger = logging.getLogger('django.contrib.gis') def load_geos(): # Custom library path set? try: from django.conf import settings lib_path = settings.GEOS_LIBRARY_PATH except (AttributeError, ImportError, ImproperlyConfigured, OSError): lib_path = None # Setting the appropriate names for the GEOS-C library. if lib_path: lib_names = None elif os.name == 'nt': # Windows NT libraries lib_names = ['geos_c', 'libgeos_c-1'] elif os.name == 'posix': # *NIX libraries lib_names = ['geos_c', 'GEOS'] else: raise ImportError('Unsupported OS "%s"' % os.name) # Using the ctypes `find_library` utility to find the path to the GEOS # shared library. This is better than manually specifying each library name # and extension (e.g., libgeos_c.[so|so.1|dylib].). if lib_names: for lib_name in lib_names: lib_path = find_library(lib_name) if lib_path is not None: break # No GEOS library could be found. if lib_path is None: raise ImportError( 'Could not find the GEOS library (tried "%s"). ' 'Try setting GEOS_LIBRARY_PATH in your settings.' % '", "'.join(lib_names) ) # Getting the GEOS C library. The C interface (CDLL) is used for # both *NIX and Windows. # See the GEOS C API source code for more details on the library function calls: # https://libgeos.org/doxygen/geos__c_8h_source.html _lgeos = CDLL(lib_path) # Here we set up the prototypes for the initGEOS_r and finishGEOS_r # routines. These functions aren't actually called until they are # attached to a GEOS context handle -- this actually occurs in # geos/prototypes/threadsafe.py. _lgeos.initGEOS_r.restype = CONTEXT_PTR _lgeos.finishGEOS_r.argtypes = [CONTEXT_PTR] # Set restype for compatibility across 32 and 64-bit platforms. _lgeos.GEOSversion.restype = c_char_p return _lgeos # The notice and error handler C function callback definitions. # Supposed to mimic the GEOS message handler (C below): # typedef void (*GEOSMessageHandler)(const char *fmt, ...); NOTICEFUNC = CFUNCTYPE(None, c_char_p, c_char_p) def notice_h(fmt, lst): fmt, lst = fmt.decode(), lst.decode() try: warn_msg = fmt % lst except TypeError: warn_msg = fmt logger.warning('GEOS_NOTICE: %s\n', warn_msg) notice_h = NOTICEFUNC(notice_h) ERRORFUNC = CFUNCTYPE(None, c_char_p, c_char_p) def error_h(fmt, lst): fmt, lst = fmt.decode(), lst.decode() try: err_msg = fmt % lst except TypeError: err_msg = fmt logger.error('GEOS_ERROR: %s\n', err_msg) error_h = ERRORFUNC(error_h) # #### GEOS Geometry C data structures, and utility functions. #### # Opaque GEOS geometry structures, used for GEOM_PTR and CS_PTR class GEOSGeom_t(Structure): pass class GEOSPrepGeom_t(Structure): pass class GEOSCoordSeq_t(Structure): pass class GEOSContextHandle_t(Structure): pass # Pointers to opaque GEOS geometry structures. GEOM_PTR = POINTER(GEOSGeom_t) PREPGEOM_PTR = POINTER(GEOSPrepGeom_t) CS_PTR = POINTER(GEOSCoordSeq_t) CONTEXT_PTR = POINTER(GEOSContextHandle_t) lgeos = SimpleLazyObject(load_geos) class GEOSFuncFactory: """ Lazy loading of GEOS functions. """ argtypes = None restype = None errcheck = None def __init__(self, func_name, *, restype=None, errcheck=None, argtypes=None): self.func_name = func_name if restype is not None: self.restype = restype if errcheck is not None: self.errcheck = errcheck if argtypes is not None: self.argtypes = argtypes def __call__(self, *args): return self.func(*args) @cached_property def func(self): from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc func = GEOSFunc(self.func_name) func.argtypes = self.argtypes or [] func.restype = self.restype if self.errcheck: func.errcheck = self.errcheck return func def geos_version(): """Return the string version of the GEOS library.""" return lgeos.GEOSversion() def geos_version_tuple(): """Return the GEOS version as a tuple (major, minor, subminor).""" return get_version_tuple(geos_version().decode())
a09246767a2018870d2860ac6c3839cbd0856978b7937d79c3ec23d1aa6cfcd0
from django.contrib.gis.geos import prototypes as capi from django.contrib.gis.geos.geometry import GEOSGeometry from django.contrib.gis.geos.libgeos import GEOM_PTR from django.contrib.gis.geos.linestring import LinearRing class Polygon(GEOSGeometry): _minlength = 1 def __init__(self, *args, **kwargs): """ Initialize on an exterior ring and a sequence of holes (both instances may be either LinearRing instances, or a tuple/list that may be constructed into a LinearRing). Examples of initialization, where shell, hole1, and hole2 are valid LinearRing geometries: >>> from django.contrib.gis.geos import LinearRing, Polygon >>> shell = hole1 = hole2 = LinearRing() >>> poly = Polygon(shell, hole1, hole2) >>> poly = Polygon(shell, (hole1, hole2)) >>> # Example where a tuple parameters are used: >>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)), ... ((4, 4), (4, 6), (6, 6), (6, 4), (4, 4))) """ if not args: super().__init__(self._create_polygon(0, None), **kwargs) return # Getting the ext_ring and init_holes parameters from the argument list ext_ring, *init_holes = args n_holes = len(init_holes) # If initialized as Polygon(shell, (LinearRing, LinearRing)) [for backward-compatibility] if n_holes == 1 and isinstance(init_holes[0], (tuple, list)): if not init_holes[0]: init_holes = () n_holes = 0 elif isinstance(init_holes[0][0], LinearRing): init_holes = init_holes[0] n_holes = len(init_holes) polygon = self._create_polygon(n_holes + 1, [ext_ring, *init_holes]) super().__init__(polygon, **kwargs) def __iter__(self): "Iterate over each ring in the polygon." for i in range(len(self)): yield self[i] def __len__(self): "Return the number of rings in this Polygon." return self.num_interior_rings + 1 @classmethod def from_bbox(cls, bbox): "Construct a Polygon from a bounding box (4-tuple)." x0, y0, x1, y1 = bbox for z in bbox: if not isinstance(z, (float, int)): return GEOSGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (x0, y0, x0, y1, x1, y1, x1, y0, x0, y0)) return Polygon(((x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0))) # ### These routines are needed for list-like operation w/ListMixin ### def _create_polygon(self, length, items): # Instantiate LinearRing objects if necessary, but don't clone them yet # _construct_ring will throw a TypeError if a parameter isn't a valid ring # If we cloned the pointers here, we wouldn't be able to clean up # in case of error. if not length: return capi.create_empty_polygon() rings = [] for r in items: if isinstance(r, GEOM_PTR): rings.append(r) else: rings.append(self._construct_ring(r)) shell = self._clone(rings.pop(0)) n_holes = length - 1 if n_holes: holes_param = (GEOM_PTR * n_holes)(*[self._clone(r) for r in rings]) else: holes_param = None return capi.create_polygon(shell, holes_param, n_holes) def _clone(self, g): if isinstance(g, GEOM_PTR): return capi.geom_clone(g) else: return capi.geom_clone(g.ptr) def _construct_ring(self, param, msg=( 'Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings')): "Try to construct a ring from the given parameter." if isinstance(param, LinearRing): return param try: ring = LinearRing(param) return ring except TypeError: raise TypeError(msg) def _set_list(self, length, items): # Getting the current pointer, replacing with the newly constructed # geometry, and destroying the old geometry. prev_ptr = self.ptr srid = self.srid self.ptr = self._create_polygon(length, items) if srid: self.srid = srid capi.destroy_geom(prev_ptr) def _get_single_internal(self, index): """ Return the ring at the specified index. The first index, 0, will always return the exterior ring. Indices > 0 will return the interior ring at the given index (e.g., poly[1] and poly[2] would return the first and second interior ring, respectively). CAREFUL: Internal/External are not the same as Interior/Exterior! Return a pointer from the existing geometries for use internally by the object's methods. _get_single_external() returns a clone of the same geometry for use by external code. """ if index == 0: return capi.get_extring(self.ptr) else: # Getting the interior ring, have to subtract 1 from the index. return capi.get_intring(self.ptr, index - 1) def _get_single_external(self, index): return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid) _set_single = GEOSGeometry._set_single_rebuild _assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild # #### Polygon Properties #### @property def num_interior_rings(self): "Return the number of interior rings." # Getting the number of rings return capi.get_nrings(self.ptr) def _get_ext_ring(self): "Get the exterior ring of the Polygon." return self[0] def _set_ext_ring(self, ring): "Set the exterior ring of the Polygon." self[0] = ring # Properties for the exterior ring/shell. exterior_ring = property(_get_ext_ring, _set_ext_ring) shell = exterior_ring @property def tuple(self): "Get the tuple for each ring in this Polygon." return tuple(self[i].tuple for i in range(len(self))) coords = tuple @property def kml(self): "Return the KML representation of this Polygon." inner_kml = ''.join( "<innerBoundaryIs>%s</innerBoundaryIs>" % self[i + 1].kml for i in range(self.num_interior_rings) ) return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)
664e8debefede1f313c3b2c21b742585692d1e7a26dd358d9ff9f547a79bf6e9
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures from django.db.backends.oracle.features import ( DatabaseFeatures as OracleDatabaseFeatures, ) from django.utils.functional import cached_property class DatabaseFeatures(BaseSpatialFeatures, OracleDatabaseFeatures): supports_add_srs_entry = False supports_geometry_field_introspection = False supports_geometry_field_unique_index = False supports_perimeter_geodetic = True supports_dwithin_distance_expr = False supports_tolerance_parameter = True unsupported_geojson_options = {'bbox', 'crs', 'precision'} @cached_property def django_test_skips(self): skips = super().django_test_skips skips.update({ "Oracle doesn't support spatial operators in constraints.": { 'gis_tests.gis_migrations.test_operations.OperationTests.test_add_check_constraint', }, }) return skips
bd476512064d4638bf12f213b30589069bdddfed7eef3e6bc69720f156e76e52
from django.contrib.gis.db.models import GeometryField from django.db.backends.oracle.schema import DatabaseSchemaEditor from django.db.backends.utils import strip_quotes, truncate_name class OracleGISSchemaEditor(DatabaseSchemaEditor): sql_add_geometry_metadata = (""" INSERT INTO USER_SDO_GEOM_METADATA ("TABLE_NAME", "COLUMN_NAME", "DIMINFO", "SRID") VALUES ( %(table)s, %(column)s, MDSYS.SDO_DIM_ARRAY( MDSYS.SDO_DIM_ELEMENT('LONG', %(dim0)s, %(dim2)s, %(tolerance)s), MDSYS.SDO_DIM_ELEMENT('LAT', %(dim1)s, %(dim3)s, %(tolerance)s) ), %(srid)s )""") sql_add_spatial_index = 'CREATE INDEX %(index)s ON %(table)s(%(column)s) INDEXTYPE IS MDSYS.SPATIAL_INDEX' sql_drop_spatial_index = 'DROP INDEX %(index)s' sql_clear_geometry_table_metadata = 'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s' sql_clear_geometry_field_metadata = ( 'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s ' 'AND COLUMN_NAME = %(column)s' ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.geometry_sql = [] def geo_quote_name(self, name): return self.connection.ops.geo_quote_name(name) def quote_value(self, value): if isinstance(value, self.connection.ops.Adapter): return super().quote_value(str(value)) return super().quote_value(value) def column_sql(self, model, field, include_default=False): column_sql = super().column_sql(model, field, include_default) if isinstance(field, GeometryField): db_table = model._meta.db_table self.geometry_sql.append( self.sql_add_geometry_metadata % { 'table': self.geo_quote_name(db_table), 'column': self.geo_quote_name(field.column), 'dim0': field._extent[0], 'dim1': field._extent[1], 'dim2': field._extent[2], 'dim3': field._extent[3], 'tolerance': field._tolerance, 'srid': field.srid, } ) if field.spatial_index: self.geometry_sql.append( self.sql_add_spatial_index % { 'index': self.quote_name(self._create_spatial_index_name(model, field)), 'table': self.quote_name(db_table), 'column': self.quote_name(field.column), } ) return column_sql def create_model(self, model): super().create_model(model) self.run_geometry_sql() def delete_model(self, model): super().delete_model(model) self.execute(self.sql_clear_geometry_table_metadata % { 'table': self.geo_quote_name(model._meta.db_table), }) def add_field(self, model, field): super().add_field(model, field) self.run_geometry_sql() def remove_field(self, model, field): if isinstance(field, GeometryField): self.execute(self.sql_clear_geometry_field_metadata % { 'table': self.geo_quote_name(model._meta.db_table), 'column': self.geo_quote_name(field.column), }) if field.spatial_index: self.execute(self.sql_drop_spatial_index % { 'index': self.quote_name(self._create_spatial_index_name(model, field)), }) super().remove_field(model, field) def run_geometry_sql(self): for sql in self.geometry_sql: self.execute(sql) self.geometry_sql = [] def _create_spatial_index_name(self, model, field): # Oracle doesn't allow object names > 30 characters. Use this scheme # instead of self._create_index_name() for backwards compatibility. return truncate_name('%s_%s_id' % (strip_quotes(model._meta.db_table), field.column), 30)
b667be91d99b428e613b6ce56dc083e5098da72703a1ec773558253e2fa0808c
from MySQLdb.constants import FIELD_TYPE from django.contrib.gis.gdal import OGRGeomType from django.db.backends.mysql.introspection import DatabaseIntrospection class MySQLIntrospection(DatabaseIntrospection): # Updating the data_types_reverse dictionary with the appropriate # type for Geometry fields. data_types_reverse = DatabaseIntrospection.data_types_reverse.copy() data_types_reverse[FIELD_TYPE.GEOMETRY] = 'GeometryField' def get_geometry_type(self, table_name, description): with self.connection.cursor() as cursor: # In order to get the specific geometry type of the field, # we introspect on the table definition using `DESCRIBE`. cursor.execute('DESCRIBE %s' % self.connection.ops.quote_name(table_name)) # Increment over description info until we get to the geometry # column. for column, typ, null, key, default, extra in cursor.fetchall(): if column == description.name: # Using OGRGeomType to convert from OGC name to Django field. # MySQL does not support 3D or SRIDs, so the field params # are empty. field_type = OGRGeomType(typ).django field_params = {} break return field_type, field_params def supports_spatial_index(self, cursor, table_name): # Supported with MyISAM/Aria, or InnoDB on MySQL 5.7.5+/MariaDB. storage_engine = self.get_storage_engine(cursor, table_name) if storage_engine == 'InnoDB': if self.connection.mysql_is_mariadb: return True return self.connection.mysql_version >= (5, 7, 5) return storage_engine in ('MyISAM', 'Aria')
d44a31f4982bd9e320a585a15fc8174dd1a67d87eb781a36b82659190c32249e
from django.contrib.gis.db import models from django.contrib.gis.db.backends.base.adapter import WKTAdapter from django.contrib.gis.db.backends.base.operations import ( BaseSpatialOperations, ) from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.geos.geometry import GEOSGeometryBase from django.contrib.gis.geos.prototypes.io import wkb_r from django.contrib.gis.measure import Distance from django.db.backends.mysql.operations import DatabaseOperations from django.utils.functional import cached_property class MySQLOperations(BaseSpatialOperations, DatabaseOperations): name = 'mysql' geom_func_prefix = 'ST_' Adapter = WKTAdapter @cached_property def mariadb(self): return self.connection.mysql_is_mariadb @cached_property def mysql(self): return not self.connection.mysql_is_mariadb @cached_property def select(self): return self.geom_func_prefix + 'AsBinary(%s)' @cached_property def from_text(self): return self.geom_func_prefix + 'GeomFromText' @cached_property def gis_operators(self): operators = { 'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API 'bboverlaps': SpatialOperator(func='MBROverlaps'), # ... 'contained': SpatialOperator(func='MBRWithin'), # ... 'contains': SpatialOperator(func='ST_Contains'), 'crosses': SpatialOperator(func='ST_Crosses'), 'disjoint': SpatialOperator(func='ST_Disjoint'), 'equals': SpatialOperator(func='ST_Equals'), 'exact': SpatialOperator(func='ST_Equals'), 'intersects': SpatialOperator(func='ST_Intersects'), 'overlaps': SpatialOperator(func='ST_Overlaps'), 'same_as': SpatialOperator(func='ST_Equals'), 'touches': SpatialOperator(func='ST_Touches'), 'within': SpatialOperator(func='ST_Within'), } if self.connection.mysql_is_mariadb: operators['relate'] = SpatialOperator(func='ST_Relate') return operators disallowed_aggregates = ( models.Collect, models.Extent, models.Extent3D, models.MakeLine, models.Union, ) @cached_property def unsupported_functions(self): unsupported = { 'AsGML', 'AsKML', 'AsSVG', 'Azimuth', 'BoundingCircle', 'ForcePolygonCW', 'GeometryDistance', 'LineLocatePoint', 'MakeValid', 'MemSize', 'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid', 'Transform', 'Translate', } if self.connection.mysql_is_mariadb: unsupported.remove('PointOnSurface') unsupported.update({'GeoHash', 'IsValid'}) elif self.connection.mysql_version < (5, 7, 5): unsupported.update({'AsGeoJSON', 'GeoHash', 'IsValid'}) return unsupported def geo_db_type(self, f): return f.geom_type def get_distance(self, f, value, lookup_type): value = value[0] if isinstance(value, Distance): if f.geodetic(self.connection): raise ValueError( 'Only numeric values of degree units are allowed on ' 'geodetic distance queries.' ) dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection))) else: dist_param = value return [dist_param] def get_geometry_converter(self, expression): read = wkb_r().read srid = expression.output_field.srid if srid == -1: srid = None geom_class = expression.output_field.geom_class def converter(value, expression, connection): if value is not None: geom = GEOSGeometryBase(read(memoryview(value)), geom_class) if srid: geom.srid = srid return geom return converter
87246dfb4682ebbde6354b4c392ac78212be445452610b681fbd014deff60e8b
import logging from django.contrib.gis.db.models import GeometryField from django.db import OperationalError from django.db.backends.mysql.schema import DatabaseSchemaEditor logger = logging.getLogger('django.contrib.gis') class MySQLGISSchemaEditor(DatabaseSchemaEditor): sql_add_spatial_index = 'CREATE SPATIAL INDEX %(index)s ON %(table)s(%(column)s)' sql_drop_spatial_index = 'DROP INDEX %(index)s ON %(table)s' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.geometry_sql = [] def skip_default(self, field): # Geometry fields are stored as BLOB/TEXT, for which MySQL < 8.0.13 # doesn't support defaults. if isinstance(field, GeometryField) and not self._supports_limited_data_type_defaults: return True return super().skip_default(field) def quote_value(self, value): if isinstance(value, self.connection.ops.Adapter): return super().quote_value(str(value)) return super().quote_value(value) def column_sql(self, model, field, include_default=False): column_sql = super().column_sql(model, field, include_default) # MySQL doesn't support spatial indexes on NULL columns if isinstance(field, GeometryField) and field.spatial_index and not field.null: qn = self.connection.ops.quote_name db_table = model._meta.db_table self.geometry_sql.append( self.sql_add_spatial_index % { 'index': qn(self._create_spatial_index_name(model, field)), 'table': qn(db_table), 'column': qn(field.column), } ) return column_sql def create_model(self, model): super().create_model(model) self.create_spatial_indexes() def add_field(self, model, field): super().add_field(model, field) self.create_spatial_indexes() def remove_field(self, model, field): if isinstance(field, GeometryField) and field.spatial_index: qn = self.connection.ops.quote_name sql = self.sql_drop_spatial_index % { 'index': qn(self._create_spatial_index_name(model, field)), 'table': qn(model._meta.db_table), } try: self.execute(sql) except OperationalError: logger.error( "Couldn't remove spatial index: %s (may be expected " "if your storage engine doesn't support them).", sql ) super().remove_field(model, field) def _create_spatial_index_name(self, model, field): return '%s_%s_id' % (model._meta.db_table, field.column) def create_spatial_indexes(self): for sql in self.geometry_sql: try: self.execute(sql) except OperationalError: logger.error( "Cannot create SPATIAL INDEX %s. Only MyISAM and (as of " "MySQL 5.7.5) InnoDB support them.", sql ) self.geometry_sql = []
2f4bca65e31516b3e3459d342ac759c55e678287ca98588415508efae7c0fb5f
""" This object provides quoting for GEOS geometries into PostgreSQL/PostGIS. """ from psycopg2 import Binary from psycopg2.extensions import ISQLQuote from django.contrib.gis.db.backends.postgis.pgraster import to_pgraster from django.contrib.gis.geos import GEOSGeometry class PostGISAdapter: def __init__(self, obj, geography=False): """ Initialize on the spatial object. """ self.is_geometry = isinstance(obj, (GEOSGeometry, PostGISAdapter)) # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry or raster. if self.is_geometry: self.ewkb = bytes(obj.ewkb) self._adapter = Binary(self.ewkb) else: self.ewkb = to_pgraster(obj) self.srid = obj.srid self.geography = geography def __conform__(self, proto): """Does the given protocol conform to what Psycopg2 expects?""" if proto == ISQLQuote: return self else: raise Exception('Error implementing psycopg2 protocol. Is psycopg2 installed?') def __eq__(self, other): return isinstance(other, PostGISAdapter) and self.ewkb == other.ewkb def __hash__(self): return hash(self.ewkb) def __str__(self): return self.getquoted() @classmethod def _fix_polygon(cls, poly): return poly def prepare(self, conn): """ This method allows escaping the binary in the style required by the server's `standard_conforming_string` setting. """ if self.is_geometry: self._adapter.prepare(conn) def getquoted(self): """ Return a properly quoted string for use in PostgreSQL/PostGIS. """ if self.is_geometry: # Psycopg will figure out whether to use E'\\000' or '\000'. return b'%s(%s)' % ( b'ST_GeogFromWKB' if self.geography else b'ST_GeomFromEWKB', self._adapter.getquoted() ) else: # For rasters, add explicit type cast to WKB string. return b"'%s'::raster" % self.ewkb.encode()
40e674913c28caa7917043bf82d92e8a93035777936b740b06fd4d5fe5d03533
""" GDAL - Constant definitions """ from ctypes import ( c_double, c_float, c_int16, c_int32, c_ubyte, c_uint16, c_uint32, ) # See https://gdal.org/api/raster_c_api.html#_CPPv412GDALDataType GDAL_PIXEL_TYPES = { 0: 'GDT_Unknown', # Unknown or unspecified type 1: 'GDT_Byte', # Eight bit unsigned integer 2: 'GDT_UInt16', # Sixteen bit unsigned integer 3: 'GDT_Int16', # Sixteen bit signed integer 4: 'GDT_UInt32', # Thirty-two bit unsigned integer 5: 'GDT_Int32', # Thirty-two bit signed integer 6: 'GDT_Float32', # Thirty-two bit floating point 7: 'GDT_Float64', # Sixty-four bit floating point 8: 'GDT_CInt16', # Complex Int16 9: 'GDT_CInt32', # Complex Int32 10: 'GDT_CFloat32', # Complex Float32 11: 'GDT_CFloat64', # Complex Float64 } # A list of gdal datatypes that are integers. GDAL_INTEGER_TYPES = [1, 2, 3, 4, 5] # Lookup values to convert GDAL pixel type indices into ctypes objects. # The GDAL band-io works with ctypes arrays to hold data to be written # or to hold the space for data to be read into. The lookup below helps # selecting the right ctypes object for a given gdal pixel type. GDAL_TO_CTYPES = [ None, c_ubyte, c_uint16, c_int16, c_uint32, c_int32, c_float, c_double, None, None, None, None ] # List of resampling algorithms that can be used to warp a GDALRaster. GDAL_RESAMPLE_ALGORITHMS = { 'NearestNeighbour': 0, 'Bilinear': 1, 'Cubic': 2, 'CubicSpline': 3, 'Lanczos': 4, 'Average': 5, 'Mode': 6, } # See https://gdal.org/api/raster_c_api.html#_CPPv415GDALColorInterp GDAL_COLOR_TYPES = { 0: 'GCI_Undefined', # Undefined, default value, i.e. not known 1: 'GCI_GrayIndex', # Grayscale 2: 'GCI_PaletteIndex', # Paletted 3: 'GCI_RedBand', # Red band of RGBA image 4: 'GCI_GreenBand', # Green band of RGBA image 5: 'GCI_BlueBand', # Blue band of RGBA image 6: 'GCI_AlphaBand', # Alpha (0=transparent, 255=opaque) 7: 'GCI_HueBand', # Hue band of HLS image 8: 'GCI_SaturationBand', # Saturation band of HLS image 9: 'GCI_LightnessBand', # Lightness band of HLS image 10: 'GCI_CyanBand', # Cyan band of CMYK image 11: 'GCI_MagentaBand', # Magenta band of CMYK image 12: 'GCI_YellowBand', # Yellow band of CMYK image 13: 'GCI_BlackBand', # Black band of CMLY image 14: 'GCI_YCbCr_YBand', # Y Luminance 15: 'GCI_YCbCr_CbBand', # Cb Chroma 16: 'GCI_YCbCr_CrBand', # Cr Chroma, also GCI_Max } # GDAL virtual filesystems prefix. VSI_FILESYSTEM_PREFIX = '/vsi' # Fixed base path for buffer-based GDAL in-memory files. VSI_MEM_FILESYSTEM_BASE_PATH = '/vsimem/' # Should the memory file system take ownership of the buffer, freeing it when # the file is deleted? (No, GDALRaster.__del__() will delete the buffer.) VSI_TAKE_BUFFER_OWNERSHIP = False # Should a VSI file be removed when retrieving its buffer? VSI_DELETE_BUFFER_ON_READ = False
a5b70923c048036f4d12fa8fef749578cdefa1db450be3b9e460b1a4c7e15b47
""" This module houses the ctypes function prototypes for GDAL DataSource (raster) related data structures. """ from ctypes import POINTER, c_bool, c_char_p, c_double, c_int, c_void_p from functools import partial from django.contrib.gis.gdal.libgdal import std_call from django.contrib.gis.gdal.prototypes.generation import ( chararray_output, const_string_output, double_output, int_output, void_output, voidptr_output, ) # For more detail about c function names and definitions see # https://gdal.org/api/raster_c_api.html # https://gdal.org/doxygen/gdalwarper_8h.html # https://gdal.org/api/gdal_utils.html # Prepare partial functions that use cpl error codes void_output = partial(void_output, cpl=True) const_string_output = partial(const_string_output, cpl=True) double_output = partial(double_output, cpl=True) # Raster Driver Routines register_all = void_output(std_call('GDALAllRegister'), [], errcheck=False) get_driver = voidptr_output(std_call('GDALGetDriver'), [c_int]) get_driver_by_name = voidptr_output(std_call('GDALGetDriverByName'), [c_char_p], errcheck=False) get_driver_count = int_output(std_call('GDALGetDriverCount'), []) get_driver_description = const_string_output(std_call('GDALGetDescription'), [c_void_p]) # Raster Data Source Routines create_ds = voidptr_output(std_call('GDALCreate'), [c_void_p, c_char_p, c_int, c_int, c_int, c_int, c_void_p]) open_ds = voidptr_output(std_call('GDALOpen'), [c_char_p, c_int]) close_ds = void_output(std_call('GDALClose'), [c_void_p], errcheck=False) flush_ds = int_output(std_call('GDALFlushCache'), [c_void_p]) copy_ds = voidptr_output( std_call('GDALCreateCopy'), [c_void_p, c_char_p, c_void_p, c_int, POINTER(c_char_p), c_void_p, c_void_p] ) add_band_ds = void_output(std_call('GDALAddBand'), [c_void_p, c_int]) get_ds_description = const_string_output(std_call('GDALGetDescription'), [c_void_p]) get_ds_driver = voidptr_output(std_call('GDALGetDatasetDriver'), [c_void_p]) get_ds_info = const_string_output(std_call('GDALInfo'), [c_void_p, c_void_p]) get_ds_xsize = int_output(std_call('GDALGetRasterXSize'), [c_void_p]) get_ds_ysize = int_output(std_call('GDALGetRasterYSize'), [c_void_p]) get_ds_raster_count = int_output(std_call('GDALGetRasterCount'), [c_void_p]) get_ds_raster_band = voidptr_output(std_call('GDALGetRasterBand'), [c_void_p, c_int]) get_ds_projection_ref = const_string_output(std_call('GDALGetProjectionRef'), [c_void_p]) set_ds_projection_ref = void_output(std_call('GDALSetProjection'), [c_void_p, c_char_p]) get_ds_geotransform = void_output(std_call('GDALGetGeoTransform'), [c_void_p, POINTER(c_double * 6)], errcheck=False) set_ds_geotransform = void_output(std_call('GDALSetGeoTransform'), [c_void_p, POINTER(c_double * 6)]) get_ds_metadata = chararray_output(std_call('GDALGetMetadata'), [c_void_p, c_char_p], errcheck=False) set_ds_metadata = void_output(std_call('GDALSetMetadata'), [c_void_p, POINTER(c_char_p), c_char_p]) get_ds_metadata_domain_list = chararray_output(std_call('GDALGetMetadataDomainList'), [c_void_p], errcheck=False) get_ds_metadata_item = const_string_output(std_call('GDALGetMetadataItem'), [c_void_p, c_char_p, c_char_p]) set_ds_metadata_item = const_string_output(std_call('GDALSetMetadataItem'), [c_void_p, c_char_p, c_char_p, c_char_p]) free_dsl = void_output(std_call('CSLDestroy'), [POINTER(c_char_p)], errcheck=False) # Raster Band Routines band_io = void_output( std_call('GDALRasterIO'), [c_void_p, c_int, c_int, c_int, c_int, c_int, c_void_p, c_int, c_int, c_int, c_int, c_int] ) get_band_xsize = int_output(std_call('GDALGetRasterBandXSize'), [c_void_p]) get_band_ysize = int_output(std_call('GDALGetRasterBandYSize'), [c_void_p]) get_band_index = int_output(std_call('GDALGetBandNumber'), [c_void_p]) get_band_description = const_string_output(std_call('GDALGetDescription'), [c_void_p]) get_band_ds = voidptr_output(std_call('GDALGetBandDataset'), [c_void_p]) get_band_datatype = int_output(std_call('GDALGetRasterDataType'), [c_void_p]) get_band_color_interp = int_output(std_call('GDALGetRasterColorInterpretation'), [c_void_p]) get_band_nodata_value = double_output(std_call('GDALGetRasterNoDataValue'), [c_void_p, POINTER(c_int)]) set_band_nodata_value = void_output(std_call('GDALSetRasterNoDataValue'), [c_void_p, c_double]) delete_band_nodata_value = void_output(std_call('GDALDeleteRasterNoDataValue'), [c_void_p]) get_band_statistics = void_output( std_call('GDALGetRasterStatistics'), [ c_void_p, c_int, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double), POINTER(c_double), c_void_p, c_void_p, ], ) compute_band_statistics = void_output( std_call('GDALComputeRasterStatistics'), [c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double), POINTER(c_double), c_void_p, c_void_p], ) # Reprojection routine reproject_image = void_output( std_call('GDALReprojectImage'), [c_void_p, c_char_p, c_void_p, c_char_p, c_int, c_double, c_double, c_void_p, c_void_p, c_void_p] ) auto_create_warped_vrt = voidptr_output( std_call('GDALAutoCreateWarpedVRT'), [c_void_p, c_char_p, c_char_p, c_int, c_double, c_void_p] ) # Create VSI gdal raster files from in-memory buffers. # https://gdal.org/api/cpl.html#cpl-vsi-h create_vsi_file_from_mem_buffer = voidptr_output(std_call('VSIFileFromMemBuffer'), [c_char_p, c_void_p, c_int, c_int]) get_mem_buffer_from_vsi_file = voidptr_output(std_call('VSIGetMemFileBuffer'), [c_char_p, POINTER(c_int), c_bool]) unlink_vsi_file = int_output(std_call('VSIUnlink'), [c_char_p])
adeb4feae8ecf169b3909c1cdb731463edfb8e6eadee397cef333e3d17bea476
""" This module houses the ctypes function prototypes for OGR DataSource related data structures. OGR_Dr_*, OGR_DS_*, OGR_L_*, OGR_F_*, OGR_Fld_* routines are relevant here. """ from ctypes import POINTER, c_char_p, c_double, c_int, c_long, c_void_p from django.contrib.gis.gdal.envelope import OGREnvelope from django.contrib.gis.gdal.libgdal import lgdal from django.contrib.gis.gdal.prototypes.generation import ( bool_output, const_string_output, double_output, geom_output, int64_output, int_output, srs_output, void_output, voidptr_output, ) c_int_p = POINTER(c_int) # shortcut type # Driver Routines register_all = void_output(lgdal.OGRRegisterAll, [], errcheck=False) cleanup_all = void_output(lgdal.OGRCleanupAll, [], errcheck=False) get_driver = voidptr_output(lgdal.OGRGetDriver, [c_int]) get_driver_by_name = voidptr_output(lgdal.OGRGetDriverByName, [c_char_p], errcheck=False) get_driver_count = int_output(lgdal.OGRGetDriverCount, []) get_driver_name = const_string_output(lgdal.OGR_Dr_GetName, [c_void_p], decoding='ascii') # DataSource open_ds = voidptr_output(lgdal.OGROpen, [c_char_p, c_int, POINTER(c_void_p)]) destroy_ds = void_output(lgdal.OGR_DS_Destroy, [c_void_p], errcheck=False) release_ds = void_output(lgdal.OGRReleaseDataSource, [c_void_p]) get_ds_name = const_string_output(lgdal.OGR_DS_GetName, [c_void_p]) get_layer = voidptr_output(lgdal.OGR_DS_GetLayer, [c_void_p, c_int]) get_layer_by_name = voidptr_output(lgdal.OGR_DS_GetLayerByName, [c_void_p, c_char_p]) get_layer_count = int_output(lgdal.OGR_DS_GetLayerCount, [c_void_p]) # Layer Routines get_extent = void_output(lgdal.OGR_L_GetExtent, [c_void_p, POINTER(OGREnvelope), c_int]) get_feature = voidptr_output(lgdal.OGR_L_GetFeature, [c_void_p, c_long]) get_feature_count = int_output(lgdal.OGR_L_GetFeatureCount, [c_void_p, c_int]) get_layer_defn = voidptr_output(lgdal.OGR_L_GetLayerDefn, [c_void_p]) get_layer_srs = srs_output(lgdal.OGR_L_GetSpatialRef, [c_void_p]) get_next_feature = voidptr_output(lgdal.OGR_L_GetNextFeature, [c_void_p]) reset_reading = void_output(lgdal.OGR_L_ResetReading, [c_void_p], errcheck=False) test_capability = int_output(lgdal.OGR_L_TestCapability, [c_void_p, c_char_p]) get_spatial_filter = geom_output(lgdal.OGR_L_GetSpatialFilter, [c_void_p]) set_spatial_filter = void_output(lgdal.OGR_L_SetSpatialFilter, [c_void_p, c_void_p], errcheck=False) set_spatial_filter_rect = void_output( lgdal.OGR_L_SetSpatialFilterRect, [c_void_p, c_double, c_double, c_double, c_double], errcheck=False ) # Feature Definition Routines get_fd_geom_type = int_output(lgdal.OGR_FD_GetGeomType, [c_void_p]) get_fd_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p]) get_feat_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p]) get_field_count = int_output(lgdal.OGR_FD_GetFieldCount, [c_void_p]) get_field_defn = voidptr_output(lgdal.OGR_FD_GetFieldDefn, [c_void_p, c_int]) # Feature Routines clone_feature = voidptr_output(lgdal.OGR_F_Clone, [c_void_p]) destroy_feature = void_output(lgdal.OGR_F_Destroy, [c_void_p], errcheck=False) feature_equal = int_output(lgdal.OGR_F_Equal, [c_void_p, c_void_p]) get_feat_geom_ref = geom_output(lgdal.OGR_F_GetGeometryRef, [c_void_p]) get_feat_field_count = int_output(lgdal.OGR_F_GetFieldCount, [c_void_p]) get_feat_field_defn = voidptr_output(lgdal.OGR_F_GetFieldDefnRef, [c_void_p, c_int]) get_fid = int_output(lgdal.OGR_F_GetFID, [c_void_p]) get_field_as_datetime = int_output( lgdal.OGR_F_GetFieldAsDateTime, [c_void_p, c_int, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p] ) get_field_as_double = double_output(lgdal.OGR_F_GetFieldAsDouble, [c_void_p, c_int]) get_field_as_integer = int_output(lgdal.OGR_F_GetFieldAsInteger, [c_void_p, c_int]) get_field_as_integer64 = int64_output(lgdal.OGR_F_GetFieldAsInteger64, [c_void_p, c_int]) is_field_set = bool_output(lgdal.OGR_F_IsFieldSetAndNotNull, [c_void_p, c_int]) get_field_as_string = const_string_output(lgdal.OGR_F_GetFieldAsString, [c_void_p, c_int]) get_field_index = int_output(lgdal.OGR_F_GetFieldIndex, [c_void_p, c_char_p]) # Field Routines get_field_name = const_string_output(lgdal.OGR_Fld_GetNameRef, [c_void_p]) get_field_precision = int_output(lgdal.OGR_Fld_GetPrecision, [c_void_p]) get_field_type = int_output(lgdal.OGR_Fld_GetType, [c_void_p]) get_field_type_name = const_string_output(lgdal.OGR_GetFieldTypeName, [c_int]) get_field_width = int_output(lgdal.OGR_Fld_GetWidth, [c_void_p])
fb2fae6576fd698c47cb045f4016535a557a2c04b1b836b69f23790e1fd9bfa6
from ctypes import POINTER, c_char_p, c_int, c_void_p from django.contrib.gis.gdal.libgdal import GDAL_VERSION, lgdal, std_call from django.contrib.gis.gdal.prototypes.generation import ( const_string_output, double_output, int_output, srs_output, string_output, void_output, ) # Shortcut generation for routines with known parameters. def srs_double(f): """ Create a function prototype for the OSR routines that take the OSRSpatialReference object and return a double value. """ return double_output(f, [c_void_p, POINTER(c_int)], errcheck=True) def units_func(f): """ Create a ctypes function prototype for OSR units functions, e.g., OSRGetAngularUnits, OSRGetLinearUnits. """ return double_output(f, [c_void_p, POINTER(c_char_p)], strarg=True) # Creation & destruction. clone_srs = srs_output(std_call('OSRClone'), [c_void_p]) new_srs = srs_output(std_call('OSRNewSpatialReference'), [c_char_p]) release_srs = void_output(lgdal.OSRRelease, [c_void_p], errcheck=False) destroy_srs = void_output(std_call('OSRDestroySpatialReference'), [c_void_p], errcheck=False) srs_validate = void_output(lgdal.OSRValidate, [c_void_p]) if GDAL_VERSION >= (3, 0): set_axis_strategy = void_output(lgdal.OSRSetAxisMappingStrategy, [c_void_p, c_int], errcheck=False) # Getting the semi_major, semi_minor, and flattening functions. semi_major = srs_double(lgdal.OSRGetSemiMajor) semi_minor = srs_double(lgdal.OSRGetSemiMinor) invflattening = srs_double(lgdal.OSRGetInvFlattening) # WKT, PROJ, EPSG, XML importation routines. from_wkt = void_output(lgdal.OSRImportFromWkt, [c_void_p, POINTER(c_char_p)]) from_proj = void_output(lgdal.OSRImportFromProj4, [c_void_p, c_char_p]) from_epsg = void_output(std_call('OSRImportFromEPSG'), [c_void_p, c_int]) from_xml = void_output(lgdal.OSRImportFromXML, [c_void_p, c_char_p]) from_user_input = void_output(std_call('OSRSetFromUserInput'), [c_void_p, c_char_p]) # Morphing to/from ESRI WKT. morph_to_esri = void_output(lgdal.OSRMorphToESRI, [c_void_p]) morph_from_esri = void_output(lgdal.OSRMorphFromESRI, [c_void_p]) # Identifying the EPSG identify_epsg = void_output(lgdal.OSRAutoIdentifyEPSG, [c_void_p]) # Getting the angular_units, linear_units functions linear_units = units_func(lgdal.OSRGetLinearUnits) angular_units = units_func(lgdal.OSRGetAngularUnits) # For exporting to WKT, PROJ, "Pretty" WKT, and XML. to_wkt = string_output(std_call('OSRExportToWkt'), [c_void_p, POINTER(c_char_p)], decoding='utf-8') to_proj = string_output(std_call('OSRExportToProj4'), [c_void_p, POINTER(c_char_p)], decoding='ascii') to_pretty_wkt = string_output( std_call('OSRExportToPrettyWkt'), [c_void_p, POINTER(c_char_p), c_int], offset=-2, decoding='utf-8' ) to_xml = string_output(lgdal.OSRExportToXML, [c_void_p, POINTER(c_char_p), c_char_p], offset=-2, decoding='utf-8') # String attribute retrieval routines. get_attr_value = const_string_output(std_call('OSRGetAttrValue'), [c_void_p, c_char_p, c_int], decoding='utf-8') get_auth_name = const_string_output(lgdal.OSRGetAuthorityName, [c_void_p, c_char_p], decoding='ascii') get_auth_code = const_string_output(lgdal.OSRGetAuthorityCode, [c_void_p, c_char_p], decoding='ascii') # SRS Properties isgeographic = int_output(lgdal.OSRIsGeographic, [c_void_p]) islocal = int_output(lgdal.OSRIsLocal, [c_void_p]) isprojected = int_output(lgdal.OSRIsProjected, [c_void_p]) # Coordinate transformation new_ct = srs_output(std_call('OCTNewCoordinateTransformation'), [c_void_p, c_void_p]) destroy_ct = void_output(std_call('OCTDestroyCoordinateTransformation'), [c_void_p], errcheck=False)
47adae552a3951e7bc445ce77c157f132e753e07600dd4ab3055d04c6321c1f1
from ctypes import POINTER, c_char_p, c_int, c_ubyte, c_uint from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR, GEOSFuncFactory from django.contrib.gis.geos.prototypes.errcheck import ( check_geom, check_minus_one, check_string, ) # This is the return type used by binary output (WKB, HEX) routines. c_uchar_p = POINTER(c_ubyte) # We create a simple subclass of c_char_p here because when the response # type is set to c_char_p, you get a _Python_ string and there's no way # to access the string's address inside the error checking function. # In other words, you can't free the memory allocated inside GEOS. Previously, # the return type would just be omitted and the integer address would be # used -- but this allows us to be specific in the function definition and # keeps the reference so it may be free'd. class geos_char_p(c_char_p): pass # ### ctypes factory classes ### class GeomOutput(GEOSFuncFactory): "For GEOS routines that return a geometry." restype = GEOM_PTR errcheck = staticmethod(check_geom) class IntFromGeom(GEOSFuncFactory): "Argument is a geometry, return type is an integer." argtypes = [GEOM_PTR] restype = c_int errcheck = staticmethod(check_minus_one) class StringFromGeom(GEOSFuncFactory): "Argument is a Geometry, return type is a string." argtypes = [GEOM_PTR] restype = geos_char_p errcheck = staticmethod(check_string) # ### ctypes prototypes ### # The GEOS geometry type, typeid, num_coordinates and number of geometries geos_makevalid = GeomOutput('GEOSMakeValid', argtypes=[GEOM_PTR]) geos_normalize = IntFromGeom('GEOSNormalize') geos_type = StringFromGeom('GEOSGeomType') geos_typeid = IntFromGeom('GEOSGeomTypeId') get_dims = GEOSFuncFactory('GEOSGeom_getDimensions', argtypes=[GEOM_PTR], restype=c_int) get_num_coords = IntFromGeom('GEOSGetNumCoordinates') get_num_geoms = IntFromGeom('GEOSGetNumGeometries') # Geometry creation factories create_point = GeomOutput('GEOSGeom_createPoint', argtypes=[CS_PTR]) create_linestring = GeomOutput('GEOSGeom_createLineString', argtypes=[CS_PTR]) create_linearring = GeomOutput('GEOSGeom_createLinearRing', argtypes=[CS_PTR]) # Polygon and collection creation routines need argument types defined # for compatibility with some platforms, e.g. macOS ARM64. With argtypes # defined, arrays are automatically cast and byref() calls are not needed. create_polygon = GeomOutput( 'GEOSGeom_createPolygon', argtypes=[GEOM_PTR, POINTER(GEOM_PTR), c_uint], ) create_empty_polygon = GeomOutput('GEOSGeom_createEmptyPolygon', argtypes=[]) create_collection = GeomOutput( 'GEOSGeom_createCollection', argtypes=[c_int, POINTER(GEOM_PTR), c_uint], ) # Ring routines get_extring = GeomOutput('GEOSGetExteriorRing', argtypes=[GEOM_PTR]) get_intring = GeomOutput('GEOSGetInteriorRingN', argtypes=[GEOM_PTR, c_int]) get_nrings = IntFromGeom('GEOSGetNumInteriorRings') # Collection Routines get_geomn = GeomOutput('GEOSGetGeometryN', argtypes=[GEOM_PTR, c_int]) # Cloning geom_clone = GEOSFuncFactory('GEOSGeom_clone', argtypes=[GEOM_PTR], restype=GEOM_PTR) # Destruction routine. destroy_geom = GEOSFuncFactory('GEOSGeom_destroy', argtypes=[GEOM_PTR]) # SRID routines geos_get_srid = GEOSFuncFactory('GEOSGetSRID', argtypes=[GEOM_PTR], restype=c_int) geos_set_srid = GEOSFuncFactory('GEOSSetSRID', argtypes=[GEOM_PTR, c_int])
52f15c467bca9fd0b3316eb3f72e9a5876e6d13a0e83e02d209c46eaa13e51de
""" This module contains all of the GEOS ctypes function prototypes. Each prototype handles the interaction between the GEOS library and Python via ctypes. """ from django.contrib.gis.geos.prototypes.coordseq import ( # NOQA create_cs, cs_clone, cs_getdims, cs_getordinate, cs_getsize, cs_getx, cs_gety, cs_getz, cs_is_ccw, cs_setordinate, cs_setx, cs_sety, cs_setz, get_cs, ) from django.contrib.gis.geos.prototypes.geom import ( # NOQA create_collection, create_empty_polygon, create_linearring, create_linestring, create_point, create_polygon, destroy_geom, geom_clone, geos_get_srid, geos_makevalid, geos_normalize, geos_set_srid, geos_type, geos_typeid, get_dims, get_extring, get_geomn, get_intring, get_nrings, get_num_coords, get_num_geoms, ) from django.contrib.gis.geos.prototypes.misc import * # NOQA from django.contrib.gis.geos.prototypes.predicates import ( # NOQA geos_contains, geos_covers, geos_crosses, geos_disjoint, geos_equals, geos_equalsexact, geos_hasz, geos_intersects, geos_isclosed, geos_isempty, geos_isring, geos_issimple, geos_isvalid, geos_overlaps, geos_relatepattern, geos_touches, geos_within, ) from django.contrib.gis.geos.prototypes.topology import * # NOQA
d8358957ca73198d9072e8facccf82d0c73c4742ad2db01fd8f1fcc7ce58cffe
""" Tests for django.core.servers. """ import errno import os import socket import threading from http.client import HTTPConnection from urllib.error import HTTPError from urllib.parse import urlencode from urllib.request import urlopen from django.conf import settings from django.core.servers.basehttp import ThreadedWSGIServer, WSGIServer from django.db import DEFAULT_DB_ALIAS, connections from django.test import LiveServerTestCase, override_settings from django.test.testcases import LiveServerThread, QuietWSGIRequestHandler from .models import Person TEST_ROOT = os.path.dirname(__file__) TEST_SETTINGS = { 'MEDIA_URL': 'media/', 'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'), 'STATIC_URL': 'static/', 'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'), } @override_settings(ROOT_URLCONF='servers.urls', **TEST_SETTINGS) class LiveServerBase(LiveServerTestCase): available_apps = [ 'servers', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', ] fixtures = ['testdata.json'] def urlopen(self, url): return urlopen(self.live_server_url + url) class CloseConnectionTestServer(ThreadedWSGIServer): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # This event is set right after the first time a request closes its # database connections. self._connections_closed = threading.Event() def _close_connections(self): super()._close_connections() self._connections_closed.set() class CloseConnectionTestLiveServerThread(LiveServerThread): server_class = CloseConnectionTestServer def _create_server(self, connections_override=None): return super()._create_server(connections_override=self.connections_override) class LiveServerTestCloseConnectionTest(LiveServerBase): server_thread_class = CloseConnectionTestLiveServerThread @classmethod def _make_connections_override(cls): conn = connections[DEFAULT_DB_ALIAS] cls.conn = conn cls.old_conn_max_age = conn.settings_dict['CONN_MAX_AGE'] # Set the connection's CONN_MAX_AGE to None to simulate the # CONN_MAX_AGE setting being set to None on the server. This prevents # Django from closing the connection and allows testing that # ThreadedWSGIServer closes connections. conn.settings_dict['CONN_MAX_AGE'] = None # Pass a database connection through to the server to check it is being # closed by ThreadedWSGIServer. return {DEFAULT_DB_ALIAS: conn} @classmethod def tearDownConnectionTest(cls): cls.conn.settings_dict['CONN_MAX_AGE'] = cls.old_conn_max_age @classmethod def tearDownClass(cls): cls.tearDownConnectionTest() super().tearDownClass() def test_closes_connections(self): # The server's request thread sets this event after closing # its database connections. closed_event = self.server_thread.httpd._connections_closed conn = self.conn # Open a connection to the database. conn.connect() self.assertIsNotNone(conn.connection) with self.urlopen('/model_view/') as f: # The server can access the database. self.assertEqual(f.read().splitlines(), [b'jane', b'robert']) # Wait for the server's request thread to close the connection. # A timeout of 0.1 seconds should be more than enough. If the wait # times out, the assertion after should fail. closed_event.wait(timeout=0.1) self.assertIsNone(conn.connection) class FailingLiveServerThread(LiveServerThread): def _create_server(self): raise RuntimeError('Error creating server.') class LiveServerTestCaseSetupTest(LiveServerBase): server_thread_class = FailingLiveServerThread @classmethod def check_allowed_hosts(cls, expected): if settings.ALLOWED_HOSTS != expected: raise RuntimeError(f'{settings.ALLOWED_HOSTS} != {expected}') @classmethod def setUpClass(cls): cls.check_allowed_hosts(['testserver']) try: super().setUpClass() except RuntimeError: # LiveServerTestCase's change to ALLOWED_HOSTS should be reverted. cls.doClassCleanups() cls.check_allowed_hosts(['testserver']) else: raise RuntimeError('Server did not fail.') cls.set_up_called = True def test_set_up_class(self): self.assertIs(self.set_up_called, True) class LiveServerAddress(LiveServerBase): @classmethod def setUpClass(cls): super().setUpClass() # put it in a list to prevent descriptor lookups in test cls.live_server_url_test = [cls.live_server_url] def test_live_server_url_is_class_property(self): self.assertIsInstance(self.live_server_url_test[0], str) self.assertEqual(self.live_server_url_test[0], self.live_server_url) class LiveServerSingleThread(LiveServerThread): def _create_server(self): return WSGIServer((self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False) class SingleThreadLiveServerTestCase(LiveServerTestCase): server_thread_class = LiveServerSingleThread class LiveServerViews(LiveServerBase): def test_protocol(self): """Launched server serves with HTTP 1.1.""" with self.urlopen('/example_view/') as f: self.assertEqual(f.version, 11) def test_closes_connection_without_content_length(self): """ An HTTP 1.1 server is supposed to support keep-alive. Since our development server is rather simple we support it only in cases where we can detect a content length from the response. This should be doable for all simple views and streaming responses where an iterable with length of one is passed. The latter follows as result of `set_content_length` from https://github.com/python/cpython/blob/master/Lib/wsgiref/handlers.py. If we cannot detect a content length we explicitly set the `Connection` header to `close` to notify the client that we do not actually support it. """ conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port, timeout=1) try: conn.request('GET', '/streaming_example_view/', headers={'Connection': 'keep-alive'}) response = conn.getresponse() self.assertTrue(response.will_close) self.assertEqual(response.read(), b'Iamastream') self.assertEqual(response.status, 200) self.assertEqual(response.getheader('Connection'), 'close') conn.request('GET', '/streaming_example_view/', headers={'Connection': 'close'}) response = conn.getresponse() self.assertTrue(response.will_close) self.assertEqual(response.read(), b'Iamastream') self.assertEqual(response.status, 200) self.assertEqual(response.getheader('Connection'), 'close') finally: conn.close() def test_keep_alive_on_connection_with_content_length(self): """ See `test_closes_connection_without_content_length` for details. This is a follow up test, which ensure that we do not close the connection if not needed, hence allowing us to take advantage of keep-alive. """ conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port) try: conn.request('GET', '/example_view/', headers={"Connection": "keep-alive"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.read(), b'example view') self.assertEqual(response.status, 200) self.assertIsNone(response.getheader('Connection')) conn.request('GET', '/example_view/', headers={"Connection": "close"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.read(), b'example view') self.assertEqual(response.status, 200) self.assertIsNone(response.getheader('Connection')) finally: conn.close() def test_keep_alive_connection_clears_previous_request_data(self): conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port) try: conn.request('POST', '/method_view/', b'{}', headers={"Connection": "keep-alive"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.status, 200) self.assertEqual(response.read(), b'POST') conn.request('POST', '/method_view/', b'{}', headers={"Connection": "close"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.status, 200) self.assertEqual(response.read(), b'POST') finally: conn.close() def test_404(self): with self.assertRaises(HTTPError) as err: self.urlopen('/') err.exception.close() self.assertEqual(err.exception.code, 404, 'Expected 404 response') def test_view(self): with self.urlopen('/example_view/') as f: self.assertEqual(f.read(), b'example view') def test_static_files(self): with self.urlopen('/static/example_static_file.txt') as f: self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file') def test_no_collectstatic_emulation(self): """ LiveServerTestCase reports a 404 status code when HTTP client tries to access a static file that isn't explicitly put under STATIC_ROOT. """ with self.assertRaises(HTTPError) as err: self.urlopen('/static/another_app/another_app_static_file.txt') err.exception.close() self.assertEqual(err.exception.code, 404, 'Expected 404 response') def test_media_files(self): with self.urlopen('/media/example_media_file.txt') as f: self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file') def test_environ(self): with self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'})) as f: self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read()) @override_settings(ROOT_URLCONF='servers.urls') class SingleThreadLiveServerViews(SingleThreadLiveServerTestCase): available_apps = ['servers'] def test_closes_connection_with_content_length(self): """ Contrast to LiveServerViews.test_keep_alive_on_connection_with_content_length(). Persistent connections require threading server. """ conn = HTTPConnection( SingleThreadLiveServerViews.server_thread.host, SingleThreadLiveServerViews.server_thread.port, timeout=1, ) try: conn.request('GET', '/example_view/', headers={'Connection': 'keep-alive'}) response = conn.getresponse() self.assertTrue(response.will_close) self.assertEqual(response.read(), b'example view') self.assertEqual(response.status, 200) self.assertEqual(response.getheader('Connection'), 'close') finally: conn.close() class LiveServerDatabase(LiveServerBase): def test_fixtures_loaded(self): """ Fixtures are properly loaded and visible to the live server thread. """ with self.urlopen('/model_view/') as f: self.assertEqual(f.read().splitlines(), [b'jane', b'robert']) def test_database_writes(self): """ Data written to the database by a view can be read. """ with self.urlopen('/create_model_instance/'): pass self.assertQuerysetEqual( Person.objects.all().order_by('pk'), ['jane', 'robert', 'emily'], lambda b: b.name ) class LiveServerPort(LiveServerBase): def test_port_bind(self): """ Each LiveServerTestCase binds to a unique port or fails to start a server thread when run concurrently (#26011). """ TestCase = type("TestCase", (LiveServerBase,), {}) try: TestCase._start_server_thread() except OSError as e: if e.errno == errno.EADDRINUSE: # We're out of ports, LiveServerTestCase correctly fails with # an OSError. return # Unexpected error. raise self.assertNotEqual( self.live_server_url, TestCase.live_server_url, f'Acquired duplicate server addresses for server threads: ' f'{self.live_server_url}', ) def test_specified_port_bind(self): """LiveServerTestCase.port customizes the server's port.""" TestCase = type('TestCase', (LiveServerBase,), {}) # Find an open port and tell TestCase to use it. s = socket.socket() s.bind(('', 0)) TestCase.port = s.getsockname()[1] s.close() TestCase._start_server_thread() self.assertEqual( TestCase.port, TestCase.server_thread.port, f'Did not use specified port for LiveServerTestCase thread: ' f'{TestCase.port}', ) class LiveServerThreadedTests(LiveServerBase): """If LiveServerTestCase isn't threaded, these tests will hang.""" def test_view_calls_subview(self): url = '/subview_calling_view/?%s' % urlencode({'url': self.live_server_url}) with self.urlopen(url) as f: self.assertEqual(f.read(), b'subview calling view: subview') def test_check_model_instance_from_subview(self): url = '/check_model_instance_from_subview/?%s' % urlencode({ 'url': self.live_server_url, }) with self.urlopen(url) as f: self.assertIn(b'emily', f.read())
4eeb9574e57338aa06e6466223cdab410a2a2846193481a126befb214996ca50
import datetime from io import StringIO from unittest import mock from django.core.management.base import OutputWrapper from django.db.migrations.questioner import ( InteractiveMigrationQuestioner, MigrationQuestioner, ) from django.db.models import NOT_PROVIDED from django.test import SimpleTestCase from django.test.utils import override_settings class QuestionerTests(SimpleTestCase): @override_settings( INSTALLED_APPS=['migrations'], MIGRATION_MODULES={'migrations': None}, ) def test_ask_initial_with_disabled_migrations(self): questioner = MigrationQuestioner() self.assertIs(False, questioner.ask_initial('migrations')) def test_ask_not_null_alteration(self): questioner = MigrationQuestioner() self.assertIsNone(questioner.ask_not_null_alteration('field_name', 'model_name')) @mock.patch('builtins.input', return_value='2') def test_ask_not_null_alteration_not_provided(self, mock): questioner = InteractiveMigrationQuestioner(prompt_output=OutputWrapper(StringIO())) question = questioner.ask_not_null_alteration('field_name', 'model_name') self.assertEqual(question, NOT_PROVIDED) class QuestionerHelperMethodsTests(SimpleTestCase): def setUp(self): self.prompt = OutputWrapper(StringIO()) self.questioner = InteractiveMigrationQuestioner(prompt_output=self.prompt) @mock.patch('builtins.input', return_value='datetime.timedelta(days=1)') def test_questioner_default_timedelta(self, mock_input): value = self.questioner._ask_default() self.assertEqual(value, datetime.timedelta(days=1)) @mock.patch('builtins.input', return_value='') def test_questioner_default_no_user_entry(self, mock_input): value = self.questioner._ask_default(default='datetime.timedelta(days=1)') self.assertEqual(value, datetime.timedelta(days=1)) @mock.patch('builtins.input', side_effect=['', 'exit']) def test_questioner_no_default_no_user_entry(self, mock_input): with self.assertRaises(SystemExit): self.questioner._ask_default() self.assertIn( "Please enter some code, or 'exit' (without quotes) to exit.", self.prompt.getvalue(), ) @mock.patch('builtins.input', side_effect=['bad code', 'exit']) def test_questioner_no_default_bad_user_entry_code(self, mock_input): with self.assertRaises(SystemExit): self.questioner._ask_default() self.assertIn('Invalid input: ', self.prompt.getvalue()) @mock.patch('builtins.input', side_effect=['', 'n']) def test_questioner_no_default_no_user_entry_boolean(self, mock_input): value = self.questioner._boolean_input('Proceed?') self.assertIs(value, False) @mock.patch('builtins.input', return_value='') def test_questioner_default_no_user_entry_boolean(self, mock_input): value = self.questioner._boolean_input('Proceed?', default=True) self.assertIs(value, True) @mock.patch('builtins.input', side_effect=[10, 'garbage', 1]) def test_questioner_bad_user_choice(self, mock_input): question = 'Make a choice:' value = self.questioner._choice_input(question, choices='abc') expected_msg = ( f'{question}\n' f' 1) a\n' f' 2) b\n' f' 3) c\n' ) self.assertIn(expected_msg, self.prompt.getvalue()) self.assertEqual(value, 1)
8bd1ddf879af7c9755c9f460085d92ad922823384ca6c3620d04e9cc429035f4
from unittest import mock from django.apps.registry import apps as global_apps from django.db import DatabaseError, connection, migrations, models from django.db.migrations.exceptions import InvalidMigrationPlan from django.db.migrations.executor import MigrationExecutor from django.db.migrations.graph import MigrationGraph from django.db.migrations.recorder import MigrationRecorder from django.db.migrations.state import ProjectState from django.test import ( SimpleTestCase, modify_settings, override_settings, skipUnlessDBFeature, ) from django.test.utils import isolate_lru_cache from .test_base import MigrationTestBase @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) class ExecutorTests(MigrationTestBase): """ Tests the migration executor (full end-to-end running). Bear in mind that if these are failing you should fix the other test failures first, as they may be propagating into here. """ available_apps = ["migrations", "migrations2", "django.contrib.auth", "django.contrib.contenttypes"] @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_run(self): """ Tests running a simple set of migrations. """ executor = MigrationExecutor(connection) # Let's look at the plan first and make sure it's up to scratch plan = executor.migration_plan([("migrations", "0002_second")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), ], ) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") # Alright, let's try running it executor.migrate([("migrations", "0002_second")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Alright, let's undo what we did plan = executor.migration_plan([("migrations", None)]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0002_second"], True), (executor.loader.graph.nodes["migrations", "0001_initial"], True), ], ) executor.migrate([("migrations", None)]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_run_with_squashed(self): """ Tests running a squashed migration from zero (should ignore what it replaces) """ executor = MigrationExecutor(connection) # Check our leaf node is the squashed one leaves = [key for key in executor.loader.graph.leaf_nodes() if key[0] == "migrations"] self.assertEqual(leaves, [("migrations", "0001_squashed_0002")]) # Check the plan plan = executor.migration_plan([("migrations", "0001_squashed_0002")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_squashed_0002"], False), ], ) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") # Alright, let's try running it executor.migrate([("migrations", "0001_squashed_0002")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Alright, let's undo what we did. Should also just use squashed. plan = executor.migration_plan([("migrations", None)]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_squashed_0002"], True), ], ) executor.migrate([("migrations", None)]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}, ) def test_migrate_backward_to_squashed_migration(self): executor = MigrationExecutor(connection) try: self.assertTableNotExists('migrations_author') self.assertTableNotExists('migrations_book') executor.migrate([('migrations', '0001_squashed_0002')]) self.assertTableExists('migrations_author') self.assertTableExists('migrations_book') executor.loader.build_graph() # Migrate backward to a squashed migration. executor.migrate([('migrations', '0001_initial')]) self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_book') finally: # Unmigrate everything. executor = MigrationExecutor(connection) executor.migrate([('migrations', None)]) self.assertTableNotExists('migrations_author') self.assertTableNotExists('migrations_book') @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_non_atomic_migration(self): """ Applying a non-atomic migration works as expected. """ executor = MigrationExecutor(connection) with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", "0001_initial")]) self.assertTableExists("migrations_publisher") migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps Publisher = migrations_apps.get_model("migrations", "Publisher") self.assertTrue(Publisher.objects.exists()) self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_atomic_operation"}) def test_atomic_operation_in_non_atomic_migration(self): """ An atomic operation is properly rolled back inside a non-atomic migration. """ executor = MigrationExecutor(connection) with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", "0001_initial")]) migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps Editor = migrations_apps.get_model("migrations", "Editor") self.assertFalse(Editor.objects.exists()) # Record previous migration as successful. executor.migrate([("migrations", "0001_initial")], fake=True) # Rebuild the graph to reflect the new DB state. executor.loader.build_graph() # Migrating backwards is also atomic. with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", None)]) self.assertFalse(Editor.objects.exists()) @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations", "migrations2": "migrations2.test_migrations_2", }) def test_empty_plan(self): """ Re-planning a full migration of a fully-migrated set doesn't perform spurious unmigrations and remigrations. There was previously a bug where the executor just always performed the backwards plan for applied migrations - which even for the most recent migration in an app, might include other, dependent apps, and these were being unmigrated. """ # Make the initial plan, check it executor = MigrationExecutor(connection) plan = executor.migration_plan([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), (executor.loader.graph.nodes["migrations2", "0001_initial"], False), ], ) # Fake-apply all migrations executor.migrate([ ("migrations", "0002_second"), ("migrations2", "0001_initial") ], fake=True) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Now plan a second time and make sure it's empty plan = executor.migration_plan([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertEqual(plan, []) # The resulting state should include applied migrations. state = executor.migrate([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertIn(('migrations', 'book'), state.models) self.assertIn(('migrations', 'author'), state.models) self.assertIn(('migrations2', 'otherauthor'), state.models) # Erase all the fake records executor.recorder.record_unapplied("migrations2", "0001_initial") executor.recorder.record_unapplied("migrations", "0002_second") executor.recorder.record_unapplied("migrations", "0001_initial") @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations", "migrations2": "migrations2.test_migrations_2_no_deps", }) def test_mixed_plan_not_supported(self): """ Although the MigrationExecutor interfaces allows for mixed migration plans (combined forwards and backwards migrations) this is not supported. """ # Prepare for mixed plan executor = MigrationExecutor(connection) plan = executor.migration_plan([("migrations", "0002_second")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), ], ) executor.migrate(None, plan) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() self.assertIn(('migrations', '0001_initial'), executor.loader.applied_migrations) self.assertIn(('migrations', '0002_second'), executor.loader.applied_migrations) self.assertNotIn(('migrations2', '0001_initial'), executor.loader.applied_migrations) # Generate mixed plan plan = executor.migration_plan([ ("migrations", None), ("migrations2", "0001_initial"), ]) msg = ( 'Migration plans with both forwards and backwards migrations are ' 'not supported. Please split your migration process into separate ' 'plans of only forwards OR backwards migrations.' ) with self.assertRaisesMessage(InvalidMigrationPlan, msg) as cm: executor.migrate(None, plan) self.assertEqual( cm.exception.args[1], [ (executor.loader.graph.nodes["migrations", "0002_second"], True), (executor.loader.graph.nodes["migrations", "0001_initial"], True), (executor.loader.graph.nodes["migrations2", "0001_initial"], False), ], ) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() executor.migrate([ ("migrations", None), ("migrations2", None), ]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") self.assertTableNotExists("migrations2_otherauthor") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_soft_apply(self): """ Tests detection of initial migrations already having been applied. """ state = {"faked": None} def fake_storer(phase, migration=None, fake=None): state["faked"] = fake executor = MigrationExecutor(connection, progress_callback=fake_storer) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run it normally self.assertEqual( executor.migration_plan([("migrations", "0001_initial")]), [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), ], ) executor.migrate([("migrations", "0001_initial")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # We shouldn't have faked that one self.assertIs(state["faked"], False) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Fake-reverse that executor.migrate([("migrations", None)], fake=True) # Are the tables still there? self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # Make sure that was faked self.assertIs(state["faked"], True) # Finally, migrate forwards; this should fake-apply our initial migration executor.loader.build_graph() self.assertEqual( executor.migration_plan([("migrations", "0001_initial")]), [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), ], ) # Applying the migration should raise a database level error # because we haven't given the --fake-initial option with self.assertRaises(DatabaseError): executor.migrate([("migrations", "0001_initial")]) # Reset the faked state state = {"faked": None} # Allow faking of initial CreateModel operations executor.migrate([("migrations", "0001_initial")], fake_initial=True) self.assertIs(state["faked"], True) # And migrate back to clean up the database executor.loader.build_graph() executor.migrate([("migrations", None)]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") @override_settings( MIGRATION_MODULES={ "migrations": "migrations.test_migrations_custom_user", "django.contrib.auth": "django.contrib.auth.migrations", }, AUTH_USER_MODEL="migrations.Author", ) def test_custom_user(self): """ Regression test for #22325 - references to a custom user model defined in the same app are not resolved correctly. """ with isolate_lru_cache(global_apps.get_swappable_settings_name): executor = MigrationExecutor(connection) self.assertTableNotExists('migrations_author') self.assertTableNotExists('migrations_tribble') # Migrate forwards executor.migrate([('migrations', '0001_initial')]) self.assertTableExists('migrations_author') self.assertTableExists('migrations_tribble') # The soft-application detection works. # Change table_names to not return auth_user during this as it # wouldn't be there in a normal run, and ensure migrations.Author # exists in the global app registry temporarily. old_table_names = connection.introspection.table_names connection.introspection.table_names = lambda c: [ x for x in old_table_names(c) if x != 'auth_user' ] migrations_apps = executor.loader.project_state( ('migrations', '0001_initial'), ).apps global_apps.get_app_config('migrations').models['author'] = ( migrations_apps.get_model('migrations', 'author') ) try: migration = executor.loader.get_migration('auth', '0001_initial') self.assertIs(executor.detect_soft_applied(None, migration)[0], True) finally: connection.introspection.table_names = old_table_names del global_apps.get_app_config('migrations').models['author'] # Migrate back to clean up the database. executor.loader.build_graph() executor.migrate([('migrations', None)]) self.assertTableNotExists('migrations_author') self.assertTableNotExists('migrations_tribble') @override_settings( MIGRATION_MODULES={ "migrations": "migrations.test_add_many_to_many_field_initial", }, ) def test_detect_soft_applied_add_field_manytomanyfield(self): """ executor.detect_soft_applied() detects ManyToManyField tables from an AddField operation. This checks the case of AddField in a migration with other operations (0001) and the case of AddField in its own migration (0002). """ tables = [ # from 0001 "migrations_project", "migrations_task", "migrations_project_tasks", # from 0002 "migrations_task_projects", ] executor = MigrationExecutor(connection) # Create the tables for 0001 but make it look like the migration hasn't # been applied. executor.migrate([("migrations", "0001_initial")]) executor.migrate([("migrations", None)], fake=True) for table in tables[:3]: self.assertTableExists(table) # Table detection sees 0001 is applied but not 0002. migration = executor.loader.get_migration("migrations", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) migration = executor.loader.get_migration("migrations", "0002_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], False) # Create the tables for both migrations but make it look like neither # has been applied. executor.loader.build_graph() executor.migrate([("migrations", "0001_initial")], fake=True) executor.migrate([("migrations", "0002_initial")]) executor.loader.build_graph() executor.migrate([("migrations", None)], fake=True) # Table detection sees 0002 is applied. migration = executor.loader.get_migration("migrations", "0002_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) # Leave the tables for 0001 except the many-to-many table. That missing # table should cause detect_soft_applied() to return False. with connection.schema_editor() as editor: for table in tables[2:]: editor.execute(editor.sql_delete_table % {"table": table}) migration = executor.loader.get_migration("migrations", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], False) # Cleanup by removing the remaining tables. with connection.schema_editor() as editor: for table in tables[:2]: editor.execute(editor.sql_delete_table % {"table": table}) for table in tables: self.assertTableNotExists(table) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.lookuperror_a", "migrations.migrations_test_apps.lookuperror_b", "migrations.migrations_test_apps.lookuperror_c" ] ) def test_unrelated_model_lookups_forwards(self): """ #24123 - All models of apps already applied which are unrelated to the first app being applied are part of the initial model state. """ try: executor = MigrationExecutor(connection) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") executor.migrate([("lookuperror_b", "0003_b3")]) self.assertTableExists("lookuperror_b_b3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Migrate forwards -- This led to a lookup LookupErrors because # lookuperror_b.B2 is already applied executor.migrate([ ("lookuperror_a", "0004_a4"), ("lookuperror_c", "0003_c3"), ]) self.assertTableExists("lookuperror_a_a4") self.assertTableExists("lookuperror_c_c3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # Cleanup executor.migrate([ ("lookuperror_a", None), ("lookuperror_b", None), ("lookuperror_c", None), ]) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.lookuperror_a", "migrations.migrations_test_apps.lookuperror_b", "migrations.migrations_test_apps.lookuperror_c" ] ) def test_unrelated_model_lookups_backwards(self): """ #24123 - All models of apps being unapplied which are unrelated to the first app being unapplied are part of the initial model state. """ try: executor = MigrationExecutor(connection) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") executor.migrate([ ("lookuperror_a", "0004_a4"), ("lookuperror_b", "0003_b3"), ("lookuperror_c", "0003_c3"), ]) self.assertTableExists("lookuperror_b_b3") self.assertTableExists("lookuperror_a_a4") self.assertTableExists("lookuperror_c_c3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Migrate backwards -- This led to a lookup LookupErrors because # lookuperror_b.B2 is not in the initial state (unrelated to app c) executor.migrate([("lookuperror_a", None)]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # Cleanup executor.migrate([ ("lookuperror_b", None), ("lookuperror_c", None) ]) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") @override_settings( INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_a', 'migrations.migrations_test_apps.mutate_state_b', ] ) def test_unrelated_applied_migrations_mutate_state(self): """ #26647 - Unrelated applied migrations should be part of the final state in both directions. """ executor = MigrationExecutor(connection) executor.migrate([ ('mutate_state_b', '0002_add_field'), ]) # Migrate forward. executor.loader.build_graph() state = executor.migrate([ ('mutate_state_a', '0001_initial'), ]) self.assertIn('added', state.models['mutate_state_b', 'b'].fields) executor.loader.build_graph() # Migrate backward. state = executor.migrate([ ('mutate_state_a', None), ]) self.assertIn('added', state.models['mutate_state_b', 'b'].fields) executor.migrate([ ('mutate_state_b', None), ]) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_process_callback(self): """ #24129 - Tests callback process """ call_args_list = [] def callback(*args): call_args_list.append(args) executor = MigrationExecutor(connection, progress_callback=callback) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") executor.migrate([ ("migrations", "0001_initial"), ("migrations", "0002_second"), ]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() executor.migrate([ ("migrations", None), ("migrations", None), ]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") migrations = executor.loader.graph.nodes expected = [ ("render_start",), ("render_success",), ("apply_start", migrations['migrations', '0001_initial'], False), ("apply_success", migrations['migrations', '0001_initial'], False), ("apply_start", migrations['migrations', '0002_second'], False), ("apply_success", migrations['migrations', '0002_second'], False), ("render_start",), ("render_success",), ("unapply_start", migrations['migrations', '0002_second'], False), ("unapply_success", migrations['migrations', '0002_second'], False), ("unapply_start", migrations['migrations', '0001_initial'], False), ("unapply_success", migrations['migrations', '0001_initial'], False), ] self.assertEqual(call_args_list, expected) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.alter_fk.author_app", "migrations.migrations_test_apps.alter_fk.book_app", ] ) def test_alter_id_type_with_fk(self): try: executor = MigrationExecutor(connection) self.assertTableNotExists("author_app_author") self.assertTableNotExists("book_app_book") # Apply initial migrations executor.migrate([ ("author_app", "0001_initial"), ("book_app", "0001_initial"), ]) self.assertTableExists("author_app_author") self.assertTableExists("book_app_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Apply PK type alteration executor.migrate([("author_app", "0002_alter_id")]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # We can't simply unapply the migrations here because there is no # implicit cast from VARCHAR to INT on the database level. with connection.schema_editor() as editor: editor.execute(editor.sql_delete_table % {"table": "book_app_book"}) editor.execute(editor.sql_delete_table % {"table": "author_app_author"}) self.assertTableNotExists("author_app_author") self.assertTableNotExists("book_app_book") executor.migrate([("author_app", None)], fake=True) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_apply_all_replaced_marks_replacement_as_applied(self): """ Applying all replaced migrations marks replacement as applied (#24628). """ recorder = MigrationRecorder(connection) # Place the database in a state where the replaced migrations are # partially applied: 0001 is applied, 0002 is not. recorder.record_applied("migrations", "0001_initial") executor = MigrationExecutor(connection) # Use fake because we don't actually have the first migration # applied, so the second will fail. And there's no need to actually # create/modify tables here, we're just testing the # MigrationRecord, which works the same with or without fake. executor.migrate([("migrations", "0002_second")], fake=True) # Because we've now applied 0001 and 0002 both, their squashed # replacement should be marked as applied. self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_marks_replacement_applied_even_if_it_did_nothing(self): """ A new squash migration will be marked as applied even if all its replaced migrations were previously already applied (#24628). """ recorder = MigrationRecorder(connection) # Record all replaced migrations as applied recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") executor = MigrationExecutor(connection) executor.migrate([("migrations", "0001_squashed_0002")]) # Because 0001 and 0002 are both applied, even though this migrate run # didn't apply anything new, their squashed replacement should be # marked as applied. self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations(), ) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_migrate_marks_replacement_unapplied(self): executor = MigrationExecutor(connection) executor.migrate([('migrations', '0001_squashed_0002')]) try: self.assertIn( ('migrations', '0001_squashed_0002'), executor.recorder.applied_migrations(), ) finally: executor.loader.build_graph() executor.migrate([('migrations', None)]) self.assertNotIn( ('migrations', '0001_squashed_0002'), executor.recorder.applied_migrations(), ) # When the feature is False, the operation and the record won't be # performed in a transaction and the test will systematically pass. @skipUnlessDBFeature('can_rollback_ddl') def test_migrations_applied_and_recorded_atomically(self): """Migrations are applied and recorded atomically.""" class Migration(migrations.Migration): operations = [ migrations.CreateModel('model', [ ('id', models.AutoField(primary_key=True)), ]), ] executor = MigrationExecutor(connection) with mock.patch('django.db.migrations.executor.MigrationExecutor.record_migration') as record_migration: record_migration.side_effect = RuntimeError('Recording migration failed.') with self.assertRaisesMessage(RuntimeError, 'Recording migration failed.'): executor.apply_migration( ProjectState(), Migration('0001_initial', 'record_migration'), ) executor.migrate([('migrations', '0001_initial')]) # The migration isn't recorded as applied since it failed. migration_recorder = MigrationRecorder(connection) self.assertIs( migration_recorder.migration_qs.filter( app='record_migration', name='0001_initial', ).exists(), False, ) self.assertTableNotExists('record_migration_model') def test_migrations_not_applied_on_deferred_sql_failure(self): """Migrations are not recorded if deferred SQL application fails.""" class DeferredSQL: def __str__(self): raise DatabaseError('Failed to apply deferred SQL') class Migration(migrations.Migration): atomic = False def apply(self, project_state, schema_editor, collect_sql=False): schema_editor.deferred_sql.append(DeferredSQL()) executor = MigrationExecutor(connection) with self.assertRaisesMessage(DatabaseError, 'Failed to apply deferred SQL'): executor.apply_migration( ProjectState(), Migration('0001_initial', 'deferred_sql'), ) # The migration isn't recorded as applied since it failed. migration_recorder = MigrationRecorder(connection) self.assertIs( migration_recorder.migration_qs.filter( app='deferred_sql', name='0001_initial', ).exists(), False, ) @mock.patch.object(MigrationRecorder, 'has_table', return_value=False) def test_migrate_skips_schema_creation(self, mocked_has_table): """ The django_migrations table is not created if there are no migrations to record. """ executor = MigrationExecutor(connection) # 0 queries, since the query for has_table is being mocked. with self.assertNumQueries(0): executor.migrate([], plan=[]) class FakeLoader: def __init__(self, graph, applied): self.graph = graph self.applied_migrations = applied self.replace_migrations = True class FakeMigration: """Really all we need is any object with a debug-useful repr.""" def __init__(self, name): self.name = name def __repr__(self): return 'M<%s>' % self.name class ExecutorUnitTests(SimpleTestCase): """(More) isolated unit tests for executor methods.""" def test_minimize_rollbacks(self): """ Minimize unnecessary rollbacks in connected apps. When you say "./manage.py migrate appA 0001", rather than migrating to just after appA-0001 in the linearized migration plan (which could roll back migrations in other apps that depend on appA 0001, but don't need to be rolled back since we're not rolling back appA 0001), we migrate to just before appA-0002. """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') b1_impl = FakeMigration('b1') b1 = ('b', '1') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(b1, b1_impl) graph.add_dependency(None, b1, a1) graph.add_dependency(None, a2, a1) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, { a1: a1_impl, b1: b1_impl, a2: a2_impl, }) plan = executor.migration_plan({a1}) self.assertEqual(plan, [(a2_impl, True)]) def test_minimize_rollbacks_branchy(self): r""" Minimize rollbacks when target has multiple in-app children. a: 1 <---- 3 <--\ \ \- 2 <--- 4 \ \ b: \- 1 <--- 2 """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') a3_impl = FakeMigration('a3') a3 = ('a', '3') a4_impl = FakeMigration('a4') a4 = ('a', '4') b1_impl = FakeMigration('b1') b1 = ('b', '1') b2_impl = FakeMigration('b2') b2 = ('b', '2') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(a3, a3_impl) graph.add_node(a4, a4_impl) graph.add_node(b1, b1_impl) graph.add_node(b2, b2_impl) graph.add_dependency(None, a2, a1) graph.add_dependency(None, a3, a1) graph.add_dependency(None, a4, a2) graph.add_dependency(None, a4, a3) graph.add_dependency(None, b2, b1) graph.add_dependency(None, b1, a1) graph.add_dependency(None, b2, a2) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, { a1: a1_impl, b1: b1_impl, a2: a2_impl, b2: b2_impl, a3: a3_impl, a4: a4_impl, }) plan = executor.migration_plan({a1}) should_be_rolled_back = [b2_impl, a4_impl, a2_impl, a3_impl] exp = [(m, True) for m in should_be_rolled_back] self.assertEqual(plan, exp) def test_backwards_nothing_to_do(self): r""" If the current state satisfies the given target, do nothing. a: 1 <--- 2 b: \- 1 c: \- 1 If a1 is applied already and a2 is not, and we're asked to migrate to a1, don't apply or unapply b1 or c1, regardless of their current state. """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') b1_impl = FakeMigration('b1') b1 = ('b', '1') c1_impl = FakeMigration('c1') c1 = ('c', '1') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(b1, b1_impl) graph.add_node(c1, c1_impl) graph.add_dependency(None, a2, a1) graph.add_dependency(None, b1, a1) graph.add_dependency(None, c1, a1) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, { a1: a1_impl, b1: b1_impl, }) plan = executor.migration_plan({a1}) self.assertEqual(plan, [])
48ed6566427f90539e313ff8b3f506c9623463ca857e6270cecf4e09c9abf164
import datetime import importlib import io import os import sys from unittest import mock from django.apps import apps from django.core.management import CommandError, call_command from django.db import ( ConnectionHandler, DatabaseError, OperationalError, connection, connections, models, ) from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.utils import truncate_name from django.db.migrations.exceptions import InconsistentMigrationHistory from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, override_settings, skipUnlessDBFeature from django.test.utils import captured_stdout from django.utils import timezone from django.utils.version import get_docs_version from .models import UnicodeModel, UnserializableModel from .routers import TestRouter from .test_base import MigrationTestBase class MigrateTests(MigrationTestBase): """ Tests running the migrate command. """ databases = {'default', 'other'} @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_migrate(self): """ Tests basic usage of the migrate command. """ # No tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run the migrations to 0001 only stdout = io.StringIO() call_command('migrate', 'migrations', '0001', verbosity=2, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Target specific migration: 0001_initial, from migrations', stdout) self.assertIn('Applying migrations.0001_initial... OK', stdout) self.assertIn('Running pre-migrate handlers for application migrations', stdout) self.assertIn('Running post-migrate handlers for application migrations', stdout) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run migrations all the way call_command("migrate", verbosity=0) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Unmigrate everything stdout = io.StringIO() call_command('migrate', 'migrations', 'zero', verbosity=2, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Unapply all migrations: migrations', stdout) self.assertIn('Unapplying migrations.0002_second... OK', stdout) self.assertIn('Running pre-migrate handlers for application migrations', stdout) self.assertIn('Running post-migrate handlers for application migrations', stdout) # Tables are gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings(INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_with_system_checks(self): out = io.StringIO() call_command('migrate', skip_checks=False, no_color=True, stdout=out) self.assertIn('Apply all migrations: migrated_app', out.getvalue()) @override_settings(INSTALLED_APPS=['migrations', 'migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_app_without_migrations(self): msg = "App 'unmigrated_app_syncdb' does not have migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='unmigrated_app_syncdb') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_clashing_prefix'}) def test_ambiguous_prefix(self): msg = ( "More than one migration matches 'a' in app 'migrations'. Please " "be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='a') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_unknown_prefix(self): msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='nonexistent') @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}) def test_migrate_initial_false(self): """ `Migration.initial = False` skips fake-initial detection. """ # Make sure no tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) # Fake rollback call_command("migrate", "migrations", "zero", fake=True, verbosity=0) # Make sure fake-initial detection does not run with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0) call_command("migrate", "migrations", "0001", fake=True, verbosity=0) # Real rollback call_command("migrate", "migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings( MIGRATION_MODULES={"migrations": "migrations.test_migrations"}, DATABASE_ROUTERS=['migrations.routers.TestRouter'], ) def test_migrate_fake_initial(self): """ --fake-initial only works if all tables created in the initial migration of an app exists. Database routers must be obeyed when doing that check. """ # Make sure no tables are created for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) call_command("migrate", "migrations", "0001", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Also check the "other" database self.assertTableNotExists("migrations_author", using="other") self.assertTableExists("migrations_tribble", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble", using="other") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", verbosity=0) # Run initial migration with an explicit --fake-initial out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1) call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0, database="other") self.assertIn( "migrations.0001_initial... faked", out.getvalue().lower() ) try: # Run migrations all the way. call_command('migrate', verbosity=0) call_command('migrate', verbosity=0, database="other") self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') self.assertTableNotExists('migrations_author', using='other') self.assertTableNotExists('migrations_tribble', using='other') self.assertTableNotExists('migrations_book', using='other') # Fake a roll-back. call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0, database='other') self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') # Run initial migration. with self.assertRaises(DatabaseError): call_command('migrate', 'migrations', verbosity=0) # Run initial migration with an explicit --fake-initial. with self.assertRaises(DatabaseError): # Fails because "migrations_tribble" does not exist but needs # to in order to make --fake-initial work. call_command('migrate', 'migrations', fake_initial=True, verbosity=0) # Fake an apply. call_command('migrate', 'migrations', fake=True, verbosity=0) call_command('migrate', 'migrations', fake=True, verbosity=0, database='other') finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0, database='other') # Make sure it's all gone for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) self.assertTableNotExists("migrations_book", using=db) @skipUnlessDBFeature('ignores_table_name_case') def test_migrate_fake_initial_case_insensitive(self): with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.initial', }): call_command('migrate', 'migrations', '0001', verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.fake_initial', }): out = io.StringIO() call_command( 'migrate', 'migrations', '0001', fake_initial=True, stdout=out, verbosity=1, no_color=True, ) self.assertIn( 'migrations.0001_initial... faked', out.getvalue().lower(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"}) def test_migrate_fake_split_initial(self): """ Split initial migrations can be faked with --fake-initial. """ try: call_command('migrate', 'migrations', '0002', verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command('migrate', 'migrations', '0002', fake_initial=True, stdout=out, verbosity=1) value = out.getvalue().lower() self.assertIn('migrations.0001_initial... faked', value) self.assertIn('migrations.0002_second... faked', value) finally: # Fake an apply. call_command('migrate', 'migrations', fake=True, verbosity=0) # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}) def test_migrate_conflict_exit(self): """ migrate exits if it detects a conflict. """ msg = ( "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (0002_conflicting_second, 0002_second in " "migrations).\n" "To fix them run 'python manage.py makemigrations --merge'" ) with self.assertRaisesMessage(CommandError, msg): call_command("migrate", "migrations") @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations', }) def test_migrate_check(self): with self.assertRaises(SystemExit): call_command('migrate', 'migrations', '0001', check_unapplied=True) self.assertTableNotExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableNotExists('migrations_book') @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations_plan', }) def test_migrate_check_plan(self): out = io.StringIO() with self.assertRaises(SystemExit): call_command( 'migrate', 'migrations', '0001', check_unapplied=True, plan=True, stdout=out, no_color=True, ) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n', out.getvalue(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_showmigrations_list(self): """ showmigrations --list displays migrations and whether or not they're applied. """ out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: True): call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False) self.assertEqual( '\x1b[1mmigrations\n\x1b[0m' ' [ ] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) call_command("migrate", "migrations", "0001", verbosity=0) out = io.StringIO() # Giving the explicit app_label tests for selective `show_list` in the command call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) out = io.StringIO() # Applied datetimes are displayed at verbosity 2+. call_command('showmigrations', 'migrations', stdout=out, verbosity=2, no_color=True) migration1 = MigrationRecorder(connection).migration_qs.get(app='migrations', name='0001_initial') self.assertEqual( 'migrations\n' ' [x] 0001_initial (applied at %s)\n' ' [ ] 0002_second\n' % migration1.applied.strftime('%Y-%m-%d %H:%M:%S'), out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_showmigrations_list_squashed(self): out = io.StringIO() call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True) self.assertEqual( 'migrations\n' ' [ ] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower(), ) out = io.StringIO() call_command( 'migrate', 'migrations', '0001_squashed_0002', stdout=out, verbosity=2, no_color=True, ) try: self.assertIn( 'operations to perform:\n' ' target specific migration: 0001_squashed_0002, from migrations\n' 'running pre-migrate handlers for application migrations\n' 'running migrations:\n' ' applying migrations.0001_squashed_0002... ok (', out.getvalue().lower(), ) out = io.StringIO() call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower(), ) finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_showmigrations_plan(self): """ Tests --plan output of showmigrations command """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "0003", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_plan'}) def test_migrate_plan(self): """Tests migrate --plan output.""" out = io.StringIO() # Show the plan up to the third migration. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n' 'migrations.0002_second\n' ' Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0003_third\n' ' Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_author']\n", out.getvalue() ) try: # Migrate to the third migration. call_command('migrate', 'migrations', '0003', verbosity=0) out = io.StringIO() # Show the plan for when there is nothing to apply. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' ' No planned migration operations.\n', out.getvalue() ) out = io.StringIO() # Show the plan for reverse migration back to 0001. call_command('migrate', 'migrations', '0001', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0003_third\n' ' Undo Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0002_second\n' ' Undo Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n", out.getvalue() ) out = io.StringIO() # Show the migration plan to fourth, with truncated details. call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> SELECT * FROM migrations_author WHE…\n', out.getvalue() ) # Show the plan when an operation is irreversible. # Migrate to the fourth migration. call_command('migrate', 'migrations', '0004', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> IRREVERSIBLE\n', out.getvalue() ) out = io.StringIO() call_command('migrate', 'migrations', '0005', plan=True, stdout=out, no_color=True) # Operation is marked as irreversible only in the revert plan. self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation\n' ' Raw Python operation\n' ' Raw Python operation -> Feed salamander.\n', out.getvalue() ) call_command('migrate', 'migrations', '0005', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation\n', out.getvalue() ) finally: # Cleanup by unmigrating everything: fake the irreversible, then # migrate all to zero. call_command('migrate', 'migrations', '0003', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_empty'}) def test_showmigrations_no_migrations(self): out = io.StringIO() call_command('showmigrations', stdout=out, no_color=True) self.assertEqual('migrations\n (no migrations)\n', out.getvalue().lower()) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_unmigrated_app(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', stdout=out, no_color=True) try: self.assertEqual('unmigrated_app\n (no migrations)\n', out.getvalue().lower()) finally: # unmigrated_app.SillyModel has a foreign key to # 'migrations.Tribble', but that model is only defined in a # migration, so the global app registry never sees it and the # reference is left dangling. Remove it to avoid problems in # subsequent tests. apps._pending_operations.pop(('migrations', 'tribble'), None) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}) def test_showmigrations_plan_no_migrations(self): """ Tests --plan output of showmigrations command without migrations """ out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, verbosity=2, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_showmigrations_plan_squashed(self): """ Tests --plan output of showmigrations command with squashed migrations. """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto\n" "[ ] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto ... (migrations.1_auto)\n" "[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "3_squashed_5", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto\n" "[x] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto ... (migrations.1_auto)\n" "[x] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_single_app_label(self): """ `showmigrations --plan app_label` output with a single app_label. """ # Single app with no dependencies on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Single app with dependencies. out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Some migrations already applied. call_command('migrate', 'author_app', '0001', verbosity=0) out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[X] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Cleanup by unmigrating author_app. call_command('migrate', 'author_app', 'zero', verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_multiple_app_labels(self): """ `showmigrations --plan app_label` output with multiple app_labels. """ # Multiple apps: author_app depends on book_app; mutate_state_b doesn't # depend on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Multiple apps: args order shouldn't matter (the same result is # expected as above). out = io.StringIO() call_command('showmigrations', 'author_app', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_plan_app_label_no_migrations(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', format='plan', stdout=out, no_color=True) try: self.assertEqual('(no migrations)\n', out.getvalue()) finally: # unmigrated_app.SillyModel has a foreign key to # 'migrations.Tribble', but that model is only defined in a # migration, so the global app registry never sees it and the # reference is left dangling. Remove it to avoid problems in # subsequent tests. apps._pending_operations.pop(('migrations', 'tribble'), None) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_forwards(self): """ sqlmigrate outputs forward looking SQL. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_author = output.find('-- create model author') index_create_table = output.find('create table') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_unique_together = output.find('-- alter unique_together') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_tx_start, "Operation description (author) not found or found before transaction start" ) self.assertGreater( index_create_table, index_op_desc_author, "CREATE TABLE not found or found before operation description (author)" ) self.assertGreater( index_op_desc_tribble, index_create_table, "Operation description (tribble) not found or found before CREATE TABLE (author)" ) self.assertGreater( index_op_desc_unique_together, index_op_desc_tribble, "Operation description (unique_together) not found or found before operation description (tribble)" ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_backwards(self): """ sqlmigrate outputs reverse looking SQL. """ # Cannot generate the reverse SQL unless we've applied the migration. call_command("migrate", "migrations", verbosity=0) out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_unique_together = output.find('-- alter unique_together') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_author = output.find('-- create model author') index_drop_table = output.rfind('drop table') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before DROP TABLE" ) self.assertGreater( index_op_desc_unique_together, index_tx_start, "Operation description (unique_together) not found or found before transaction start" ) self.assertGreater( index_op_desc_tribble, index_op_desc_unique_together, "Operation description (tribble) not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_op_desc_tribble, "Operation description (author) not found or found before operation description (tribble)" ) self.assertGreater( index_drop_table, index_op_desc_author, "DROP TABLE not found or found before operation description (author)" ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_sqlmigrate_for_non_atomic_migration(self): """ Transaction wrappers aren't shown for non-atomic migrations. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] if connection.ops.start_transaction_sql(): self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_sqlmigrate_for_non_transactional_databases(self): """ Transaction wrappers aren't shown for databases that don't support transactional DDL. """ out = io.StringIO() with mock.patch.object(connection.features, 'can_rollback_ddl', False): call_command('sqlmigrate', 'migrations', '0001', stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] start_transaction_sql = connection.ops.start_transaction_sql() if start_transaction_sql: self.assertNotIn(start_transaction_sql.lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_ambiguous_prefix_squashed_migrations(self): msg = ( "More than one migration matches '0001' in app 'migrations'. " "Please be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('sqlmigrate', 'migrations', '0001') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_squashed_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_squashed_0002', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model book', output) self.assertNotIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_replaced_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_no_operations'}) def test_migrations_no_operations(self): err = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stderr=err) self.assertEqual(err.getvalue(), 'No operations found.\n') @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.migrated_unapplied_app", "migrations.migrations_test_apps.unmigrated_app", ], ) def test_regression_22823_unmigrated_fk_to_migrated_model(self): """ Assuming you have 3 apps, `A`, `B`, and `C`, such that: * `A` has migrations * `B` has a migration we want to apply * `C` has no migrations, but has an FK to `A` When we try to migrate "B", an exception occurs because the "B" was not included in the ProjectState that is used to detect soft-applied migrations (#22823). """ call_command('migrate', 'migrated_unapplied_app', verbosity=0) # unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble', # but that model is only defined in a migration, so the global app # registry never sees it and the reference is left dangling. Remove it # to avoid problems in subsequent tests. apps._pending_operations.pop(('migrations', 'tribble'), None) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self): """ For an app without migrations, editor.execute() is used for executing the syncdb deferred SQL. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', run_syncdb=True, verbosity=1, stdout=stdout, no_color=True) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) # There's at least one deferred SQL for creating the foreign key # index. self.assertGreater(len(execute.mock_calls), 2) stdout = stdout.getvalue() self.assertIn('Synchronize unmigrated apps: unmigrated_app_syncdb', stdout) self.assertIn('Creating tables...', stdout) table_name = truncate_name('unmigrated_app_syncdb_classroom', connection.ops.max_name_length()) self.assertIn('Creating table %s' % table_name, stdout) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_syncdb_app_with_migrations(self): msg = "Can't use run_syncdb with app 'migrations' as it has migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', 'migrations', run_syncdb=True, verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.unmigrated_app_syncdb', 'migrations.migrations_test_apps.unmigrated_app_simple', ]) def test_migrate_syncdb_app_label(self): """ Running migrate --run-syncdb with an app_label only creates tables for the specified app. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', 'unmigrated_app_syncdb', run_syncdb=True, stdout=stdout) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) self.assertGreater(len(execute.mock_calls), 2) self.assertIn('Synchronize unmigrated app: unmigrated_app_syncdb', stdout.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_replaced(self): """ Running a single squashed migration should record all of the original replaced migrations as run. """ recorder = MigrationRecorder(connection) out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) applied_migrations = recorder.applied_migrations() self.assertIn(("migrations", "0001_initial"), applied_migrations) self.assertIn(("migrations", "0002_second"), applied_migrations) self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations) # Rollback changes call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_squashed(self): """ Running migrate for a squashed migration should record as run if all of the replaced migrations have been run (#25231). """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") out = io.StringIO() call_command('showmigrations', 'migrations', stdout=out, no_color=True) self.assertEqual( "migrations\n" " [-] 0001_squashed_0002 (2 squashed migrations) " "run 'manage.py migrate' to finish recording.\n", out.getvalue().lower(), ) out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations() ) # No changes were actually applied so there is nothing to rollback def test_migrate_partially_applied_squashed_migration(self): """ Migrating to a squashed migration specified by name should succeed even if it is partially applied. """ with self.temporary_migration_module(module='migrations.test_migrations'): recorder = MigrationRecorder(connection) try: call_command('migrate', 'migrations', '0001_initial', verbosity=0) call_command( 'squashmigrations', 'migrations', '0002', interactive=False, verbosity=0, ) call_command( 'migrate', 'migrations', '0001_squashed_0002_second', verbosity=0, ) applied_migrations = recorder.applied_migrations() self.assertIn(('migrations', '0002_second'), applied_migrations) finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_migrate_backward_to_squashed_migration(self): try: call_command('migrate', 'migrations', '0001_squashed_0002', verbosity=0) self.assertTableExists('migrations_author') self.assertTableExists('migrations_book') call_command('migrate', 'migrations', '0001_initial', verbosity=0) self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_book') finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_inconsistent_history(self): """ Running migrate with some migrations applied before their dependencies should not be allowed. """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0002_second") msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("migrate") applied_migrations = recorder.applied_migrations() self.assertNotIn(("migrations", "0001_initial"), applied_migrations) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.migrated_unapplied_app', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_not_reflected_changes(self): class NewModel1(models.Model): class Meta(): app_label = 'migrated_app' class NewModel2(models.Model): class Meta(): app_label = 'migrated_unapplied_app' out = io.StringIO() try: call_command('migrate', verbosity=0) call_command('migrate', stdout=out, no_color=True) self.assertEqual( "operations to perform:\n" " apply all migrations: migrated_app, migrated_unapplied_app\n" "running migrations:\n" " no migrations to apply.\n" " your models in app(s): 'migrated_app', " "'migrated_unapplied_app' have changes that are not yet " "reflected in a migration, and so won't be applied.\n" " run 'manage.py makemigrations' to make new migrations, and " "then re-run 'manage.py migrate' to apply them.\n", out.getvalue().lower(), ) finally: # Unmigrate everything. call_command('migrate', 'migrated_app', 'zero', verbosity=0) call_command('migrate', 'migrated_unapplied_app', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations_squashed_no_replaces', }) def test_migrate_prune(self): """ With prune=True, references to migration files deleted from the migrations module (such as after being squashed) are removed from the django_migrations table. """ recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0001_initial') recorder.record_applied('migrations', '0002_second') recorder.record_applied('migrations', '0001_squashed_0002') out = io.StringIO() try: call_command('migrate', 'migrations', prune=True, stdout=out, no_color=True) self.assertEqual( out.getvalue(), 'Pruning migrations:\n' ' Pruning migrations.0001_initial OK\n' ' Pruning migrations.0002_second OK\n', ) applied_migrations = [ migration for migration in recorder.applied_migrations() if migration[0] == 'migrations' ] self.assertEqual(applied_migrations, [('migrations', '0001_squashed_0002')]) finally: recorder.record_unapplied('migrations', '0001_initial') recorder.record_unapplied('migrations', '0001_second') recorder.record_unapplied('migrations', '0001_squashed_0002') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_prune_deleted_squashed_migrations_in_replaces(self): out = io.StringIO() with self.temporary_migration_module( module='migrations.test_migrations_squashed' ) as migration_dir: try: call_command('migrate', 'migrations', verbosity=0) # Delete the replaced migrations. os.remove(os.path.join(migration_dir, '0001_initial.py')) os.remove(os.path.join(migration_dir, '0002_second.py')) # --prune cannot be used before removing the "replaces" # attribute. call_command( 'migrate', 'migrations', prune=True, stdout=out, no_color=True, ) self.assertEqual( out.getvalue(), "Pruning migrations:\n" " Cannot use --prune because the following squashed " "migrations have their 'replaces' attributes and may not " "be recorded as applied:\n" " migrations.0001_squashed_0002\n" " Re-run 'manage.py migrate' if they are not marked as " "applied, and remove 'replaces' attributes in their " "Migration classes.\n" ) finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'} ) def test_prune_no_migrations_to_prune(self): out = io.StringIO() call_command('migrate', 'migrations', prune=True, stdout=out, no_color=True) self.assertEqual( out.getvalue(), 'Pruning migrations:\n' ' No migrations to prune.\n', ) out = io.StringIO() call_command( 'migrate', 'migrations', prune=True, stdout=out, no_color=True, verbosity=0, ) self.assertEqual(out.getvalue(), '') def test_prune_no_app_label(self): msg = 'Migrations can be pruned only when an app is specified.' with self.assertRaisesMessage(CommandError, msg): call_command('migrate', prune=True) class MakeMigrationsTests(MigrationTestBase): """ Tests running the makemigrations command. """ def setUp(self): super().setUp() self._old_models = apps.app_configs['migrations'].models.copy() def tearDown(self): apps.app_configs['migrations'].models = self._old_models apps.all_models['migrations'] = self._old_models apps.clear_cache() super().tearDown() def test_files_content(self): self.assertTableNotExists("migrations_unicodemodel") apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) # Check for empty __init__.py file in migrations folder init_file = os.path.join(migration_dir, "__init__.py") self.assertTrue(os.path.exists(init_file)) with open(init_file) as fp: content = fp.read() self.assertEqual(content, '') # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() self.assertIn('migrations.CreateModel', content) self.assertIn('initial = True', content) self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name self.assertIn('“Ðjáñgó”', content) # title.default def test_makemigrations_order(self): """ makemigrations should recognize number-only migrations (0001.py). """ module = 'migrations.test_migrations_order' with self.temporary_migration_module(module=module) as migration_dir: if hasattr(importlib, 'invalidate_caches'): # importlib caches os.listdir() on some platforms like macOS # (#23850). importlib.invalidate_caches() call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0') self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py'))) def test_makemigrations_empty_connections(self): empty_connections = ConnectionHandler({'default': {}}) with mock.patch('django.core.management.commands.makemigrations.connections', new=empty_connections): # with no apps out = io.StringIO() call_command('makemigrations', stdout=out) self.assertIn('No changes detected', out.getvalue()) # with an app with self.temporary_migration_module() as migration_dir: call_command('makemigrations', 'migrations', verbosity=0) init_file = os.path.join(migration_dir, '__init__.py') self.assertTrue(os.path.exists(init_file)) @override_settings(INSTALLED_APPS=['migrations', 'migrations2']) def test_makemigrations_consistency_checks_respect_routers(self): """ The history consistency checks in makemigrations respect settings.DATABASE_ROUTERS. """ def patched_has_table(migration_recorder): if migration_recorder.connection is connections['other']: raise Exception('Other connection') else: return mock.DEFAULT self.assertTableNotExists('migrations_unicodemodel') apps.register_model('migrations', UnicodeModel) with mock.patch.object( MigrationRecorder, 'has_table', autospec=True, side_effect=patched_has_table) as has_table: with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) self.assertEqual(has_table.call_count, 1) # 'default' is checked # Router says not to migrate 'other' so consistency shouldn't # be checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 2) # 'default' again # With a router that doesn't prohibit migrating 'other', # consistency is checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.DefaultOtherRouter']): with self.assertRaisesMessage(Exception, 'Other connection'): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 4) # 'default' and 'other' # With a router that doesn't allow migrating on any database, # no consistency checks are made. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): with mock.patch.object(TestRouter, 'allow_migrate', return_value=False) as allow_migrate: call_command('makemigrations', 'migrations', verbosity=0) allow_migrate.assert_any_call('other', 'migrations', model_name='UnicodeModel') # allow_migrate() is called with the correct arguments. self.assertGreater(len(allow_migrate.mock_calls), 0) called_aliases = set() for mock_call in allow_migrate.mock_calls: _, call_args, call_kwargs = mock_call connection_alias, app_name = call_args called_aliases.add(connection_alias) # Raises an error if invalid app_name/model_name occurs. apps.get_app_config(app_name).get_model(call_kwargs['model_name']) self.assertEqual(called_aliases, set(connections)) self.assertEqual(has_table.call_count, 4) def test_failing_migration(self): # If a migration fails to serialize, it shouldn't generate an empty file. #21280 apps.register_model('migrations', UnserializableModel) with self.temporary_migration_module() as migration_dir: with self.assertRaisesMessage(ValueError, 'Cannot serialize'): call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertFalse(os.path.exists(initial_file)) def test_makemigrations_conflict_exit(self): """ makemigrations exits if it detects a conflict. """ with self.temporary_migration_module(module="migrations.test_migrations_conflict"): with self.assertRaises(CommandError) as context: call_command("makemigrations") self.assertEqual( str(context.exception), "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (0002_conflicting_second, 0002_second in " "migrations).\n" "To fix them run 'python manage.py makemigrations --merge'" ) def test_makemigrations_merge_no_conflict(self): """ makemigrations exits if in merge mode with no conflicts. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", merge=True, stdout=out) self.assertIn("No conflicts detected to merge.", out.getvalue()) def test_makemigrations_empty_no_app_specified(self): """ makemigrations exits if no app is specified with 'empty' mode. """ msg = 'You must supply at least one app label when using --empty.' with self.assertRaisesMessage(CommandError, msg): call_command("makemigrations", empty=True) def test_makemigrations_empty_migration(self): """ makemigrations properly constructs an empty migration. """ with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", empty=True, verbosity=0) # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() # Remove all whitespace to check for empty dependencies and operations content = content.replace(' ', '') self.assertIn('dependencies=[\n]', content) self.assertIn('operations=[\n]', content) @override_settings(MIGRATION_MODULES={"migrations": None}) def test_makemigrations_disabled_migrations_for_app(self): """ makemigrations raises a nice error when migrations are disabled for an app. """ msg = ( "Django can't create migrations for app 'migrations' because migrations " "have been disabled via the MIGRATION_MODULES setting." ) with self.assertRaisesMessage(ValueError, msg): call_command("makemigrations", "migrations", empty=True, verbosity=0) def test_makemigrations_no_changes_no_apps(self): """ makemigrations exits when there are no changes and no apps are specified. """ out = io.StringIO() call_command("makemigrations", stdout=out) self.assertIn("No changes detected", out.getvalue()) def test_makemigrations_no_changes(self): """ makemigrations exits when there are no changes to an app. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", stdout=out) self.assertIn("No changes detected in app 'migrations'", out.getvalue()) def test_makemigrations_no_apps_initial(self): """ makemigrations should detect initial is needed on empty migration modules if no app provided. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_empty"): call_command("makemigrations", stdout=out) self.assertIn("0001_initial.py", out.getvalue()) def test_makemigrations_no_init(self): """Migration directories without an __init__.py file are allowed.""" out = io.StringIO() with self.temporary_migration_module(module='migrations.test_migrations_no_init'): call_command('makemigrations', stdout=out) self.assertIn('0001_initial.py', out.getvalue()) def test_makemigrations_migrations_announce(self): """ makemigrations announces the migration at the default verbosity level. """ out = io.StringIO() with self.temporary_migration_module(): call_command("makemigrations", "migrations", stdout=out) self.assertIn("Migrations for 'migrations'", out.getvalue()) def test_makemigrations_no_common_ancestor(self): """ makemigrations fails to merge migrations with no common ancestor. """ with self.assertRaises(ValueError) as context: with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"): call_command("makemigrations", "migrations", merge=True) exception_message = str(context.exception) self.assertIn("Could not find common ancestor of", exception_message) self.assertIn("0002_second", exception_message) self.assertIn("0002_conflicting_second", exception_message) def test_makemigrations_interactive_reject(self): """ makemigrations enters and exits interactive mode properly. """ # Monkeypatch interactive questioner to auto reject with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: with captured_stdout(): call_command( 'makemigrations', 'migrations', name='merge', merge=True, interactive=True, verbosity=0, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) def test_makemigrations_interactive_accept(self): """ makemigrations enters interactive mode and merges properly. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_default_merge_name(self): out = io.StringIO() with self.temporary_migration_module( module='migrations.test_migrations_conflict' ) as migration_dir: call_command('makemigrations', 'migrations', merge=True, interactive=False, stdout=out) merge_file = os.path.join( migration_dir, '0003_merge_0002_conflicting_second_0002_second.py', ) self.assertIs(os.path.exists(merge_file), True) self.assertIn('Created new merge migration %s' % merge_file, out.getvalue()) @mock.patch('django.db.migrations.utils.datetime') def test_makemigrations_auto_merge_name(self, mock_datetime): mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4) with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module( module='migrations.test_migrations_conflict_long_name' ) as migration_dir: call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge_20160102_0304.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_non_interactive_not_null_addition(self): """ Non-interactive makemigrations fails when a default is missing on a new not-null field. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_int = models.IntegerField() class Meta: app_label = "migrations" with self.assertRaises(SystemExit): with self.temporary_migration_module(module="migrations.test_migrations_no_default"): with captured_stdout() as out: call_command('makemigrations', 'migrations', interactive=False) self.assertIn( "Field 'silly_int' on model 'sillymodel' not migrated: it is " "impossible to add a non-nullable field without specifying a " "default.", out.getvalue(), ) def test_makemigrations_interactive_not_null_addition(self): """ makemigrations messages when adding a NOT NULL field in interactive mode. """ class Author(models.Model): silly_field = models.BooleanField(null=False) class Meta: app_label = 'migrations' input_msg = ( "It is impossible to add a non-nullable field 'silly_field' to " "author without specifying a default. This is because the " "database needs something to populate existing rows.\n" "Please select a fix:\n" " 1) Provide a one-off default now (will be set on all existing " "rows with a null value for this column)\n" " 2) Quit and manually define a default value in models.py." ) with self.temporary_migration_module(module='migrations.test_migrations'): # 2 - quit. with mock.patch('builtins.input', return_value='2'): with captured_stdout() as out, self.assertRaises(SystemExit): call_command('makemigrations', 'migrations', interactive=True) self.assertIn(input_msg, out.getvalue()) # 1 - provide a default. with mock.patch('builtins.input', return_value='1'): with captured_stdout() as out: call_command('makemigrations', 'migrations', interactive=True) output = out.getvalue() self.assertIn(input_msg, output) self.assertIn('Please enter the default value as valid Python.', output) self.assertIn( 'The datetime and django.utils.timezone modules are ' 'available, so it is possible to provide e.g. timezone.now as ' 'a value', output, ) self.assertIn("Type 'exit' to exit this prompt", output) def test_makemigrations_non_interactive_not_null_alteration(self): """ Non-interactive makemigrations fails when a default is missing on a field changed to not-null. """ class Author(models.Model): name = models.CharField(max_length=255) slug = models.SlugField() age = models.IntegerField(default=0) class Meta: app_label = "migrations" with self.temporary_migration_module(module="migrations.test_migrations"): with captured_stdout() as out: call_command('makemigrations', 'migrations', interactive=False) self.assertIn("Alter field slug on author", out.getvalue()) self.assertIn( "Field 'slug' on model 'author' given a default of NOT PROVIDED " "and must be corrected.", out.getvalue(), ) def test_makemigrations_interactive_not_null_alteration(self): """ makemigrations messages when changing a NULL field to NOT NULL in interactive mode. """ class Author(models.Model): slug = models.SlugField(null=False) class Meta: app_label = 'migrations' input_msg = ( "It is impossible to change a nullable field 'slug' on author to " "non-nullable without providing a default. This is because the " "database needs something to populate existing rows.\n" "Please select a fix:\n" " 1) Provide a one-off default now (will be set on all existing " "rows with a null value for this column)\n" " 2) Ignore for now. Existing rows that contain NULL values will " "have to be handled manually, for example with a RunPython or " "RunSQL operation.\n" " 3) Quit and manually define a default value in models.py." ) with self.temporary_migration_module(module='migrations.test_migrations'): # No message appears if --dry-run. with captured_stdout() as out: call_command( 'makemigrations', 'migrations', interactive=True, dry_run=True, ) self.assertNotIn(input_msg, out.getvalue()) # 3 - quit. with mock.patch('builtins.input', return_value='3'): with captured_stdout() as out, self.assertRaises(SystemExit): call_command('makemigrations', 'migrations', interactive=True) self.assertIn(input_msg, out.getvalue()) # 1 - provide a default. with mock.patch('builtins.input', return_value='1'): with captured_stdout() as out: call_command('makemigrations', 'migrations', interactive=True) output = out.getvalue() self.assertIn(input_msg, output) self.assertIn('Please enter the default value as valid Python.', output) self.assertIn( 'The datetime and django.utils.timezone modules are ' 'available, so it is possible to provide e.g. timezone.now as ' 'a value', output, ) self.assertIn("Type 'exit' to exit this prompt", output) def test_makemigrations_non_interactive_no_model_rename(self): """ makemigrations adds and removes a possible model rename in non-interactive mode. """ class RenamedModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Delete model SillyModel", out.getvalue()) self.assertIn("Create model RenamedModel", out.getvalue()) def test_makemigrations_non_interactive_no_field_rename(self): """ makemigrations adds and removes a possible field rename in non-interactive mode. """ class SillyModel(models.Model): silly_rename = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Remove field silly_field from sillymodel", out.getvalue()) self.assertIn("Add field silly_rename to sillymodel", out.getvalue()) @mock.patch('builtins.input', return_value='Y') def test_makemigrations_model_rename_interactive(self, mock_input): class RenamedModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = 'migrations' with self.temporary_migration_module( module='migrations.test_migrations_no_default', ): with captured_stdout() as out: call_command('makemigrations', 'migrations', interactive=True) self.assertIn('Rename model SillyModel to RenamedModel', out.getvalue()) @mock.patch('builtins.input', return_value='Y') def test_makemigrations_field_rename_interactive(self, mock_input): class SillyModel(models.Model): silly_rename = models.BooleanField(default=False) class Meta: app_label = 'migrations' with self.temporary_migration_module( module='migrations.test_migrations_no_default', ): with captured_stdout() as out: call_command('makemigrations', 'migrations', interactive=True) self.assertIn( 'Rename field silly_field on sillymodel to silly_rename', out.getvalue(), ) def test_makemigrations_handle_merge(self): """ makemigrations properly merges the conflicting migrations with --noinput. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=False, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertIn("Created new merge migration", output) def test_makemigration_merge_dry_run(self): """ makemigrations respects --dry-run option when fixing migration conflicts (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) def test_makemigration_merge_dry_run_verbosity_3(self): """ `makemigrations --merge --dry-run` writes the merge migration file to stdout with `verbosity == 3` (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, verbosity=3, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) # Additional output caused by verbosity 3 # The complete merge migration file that would be written self.assertIn("class Migration(migrations.Migration):", output) self.assertIn("dependencies = [", output) self.assertIn("('migrations', '0002_second')", output) self.assertIn("('migrations', '0002_conflicting_second')", output) self.assertIn("operations = [", output) self.assertIn("]", output) def test_makemigrations_dry_run(self): """ `makemigrations --dry-run` should not ask for defaults. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_date = models.DateField() # Added field without a default silly_auto_now = models.DateTimeField(auto_now_add=True) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out) # Output the expected changes directly, without asking for defaults self.assertIn("Add field silly_date to sillymodel", out.getvalue()) def test_makemigrations_dry_run_verbosity_3(self): """ Allow `makemigrations --dry-run` to output the migrations file to stdout (with verbosity == 3). """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_char = models.CharField(default="") class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3) # Normal --dry-run output self.assertIn("- Add field silly_char to sillymodel", out.getvalue()) # Additional output caused by verbosity 3 # The complete migrations file that would be written self.assertIn("class Migration(migrations.Migration):", out.getvalue()) self.assertIn("dependencies = [", out.getvalue()) self.assertIn("('migrations', '0001_initial'),", out.getvalue()) self.assertIn("migrations.AddField(", out.getvalue()) self.assertIn("model_name='sillymodel',", out.getvalue()) self.assertIn("name='silly_char',", out.getvalue()) def test_makemigrations_scriptable(self): """ With scriptable=True, log output is diverted to stderr, and only the paths of generated migration files are written to stdout. """ out = io.StringIO() err = io.StringIO() with self.temporary_migration_module( module='migrations.migrations.test_migrations', ) as migration_dir: call_command( 'makemigrations', 'migrations', scriptable=True, stdout=out, stderr=err, ) initial_file = os.path.join(migration_dir, '0001_initial.py') self.assertEqual(out.getvalue(), f'{initial_file}\n') self.assertIn(' - Create model ModelWithCustomBase\n', err.getvalue()) @mock.patch('builtins.input', return_value='Y') def test_makemigrations_scriptable_merge(self, mock_input): out = io.StringIO() err = io.StringIO() with self.temporary_migration_module( module='migrations.test_migrations_conflict', ) as migration_dir: call_command( 'makemigrations', 'migrations', merge=True, name='merge', scriptable=True, stdout=out, stderr=err, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertEqual(out.getvalue(), f'{merge_file}\n') self.assertIn(f'Created new merge migration {merge_file}', err.getvalue()) def test_makemigrations_migrations_modules_path_not_exist(self): """ makemigrations creates migrations when specifying a custom location for migration files using MIGRATION_MODULES if the custom path doesn't already exist. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar" with self.temporary_migration_module(module=migration_module) as migration_dir: call_command("makemigrations", "migrations", stdout=out) # Migrations file is actually created in the expected path. initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) # Command output indicates the migration is created. self.assertIn(" - Create model SillyModel", out.getvalue()) @override_settings(MIGRATION_MODULES={'migrations': 'some.nonexistent.path'}) def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self): msg = ( 'Could not locate an appropriate location to create migrations ' 'package some.nonexistent.path. Make sure the toplevel package ' 'exists and can be imported.' ) with self.assertRaisesMessage(ValueError, msg): call_command('makemigrations', 'migrations', empty=True, verbosity=0) def test_makemigrations_interactive_by_default(self): """ The user is prompted to merge by default if there are conflicts and merge is True. Answer negative to differentiate it from behavior when --noinput is specified. """ # Monkeypatch interactive questioner to auto reject out = io.StringIO() with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') # This will fail if interactive is False by default self.assertFalse(os.path.exists(merge_file)) self.assertNotIn("Created new merge migration", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_no_merge(self): """ makemigrations does not raise a CommandError when an unspecified app has conflicting migrations. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", merge=False, verbosity=0) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_merge(self): """ makemigrations does not create a merge for an unspecified app even if it has conflicting migrations. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(app_label="migrated_app") as migration_dir: call_command("makemigrations", "migrated_app", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) self.assertIn("No conflicts detected to merge.", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.conflicting_app_with_dependencies"]) def test_makemigrations_merge_dont_output_dependency_operations(self): """ makemigrations --merge does not output any operations from apps that don't belong to a given app. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='N')): out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command( "makemigrations", "conflicting_app_with_dependencies", merge=True, interactive=True, stdout=out ) self.assertEqual( out.getvalue().lower(), 'merging conflicting_app_with_dependencies\n' ' branch 0002_conflicting_second\n' ' - create model something\n' ' branch 0002_second\n' ' - delete model tribble\n' ' - remove field silly_field from author\n' ' - add field rating to author\n' ' - create model book\n' '\n' 'merging will only work if the operations printed above do not conflict\n' 'with each other (working on different fields or models)\n' 'should these migration branches be merged? [y/n] ' ) def test_makemigrations_with_custom_name(self): """ makemigrations --name generate a custom migration name. """ with self.temporary_migration_module() as migration_dir: def cmd(migration_count, migration_name, *args): call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args) migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name)) # Check for existing migration file in migration folder self.assertTrue(os.path.exists(migration_file)) with open(migration_file, encoding='utf-8') as fp: content = fp.read() content = content.replace(" ", "") return content # generate an initial migration migration_name_0001 = "my_initial_migration" content = cmd("0001", migration_name_0001) self.assertIn("dependencies=[\n]", content) # importlib caches os.listdir() on some platforms like macOS # (#23850). if hasattr(importlib, 'invalidate_caches'): importlib.invalidate_caches() # generate an empty migration migration_name_0002 = "my_custom_migration" content = cmd("0002", migration_name_0002, "--empty") self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content) self.assertIn("operations=[\n]", content) def test_makemigrations_with_invalid_custom_name(self): msg = 'The migration name must be a valid Python identifier.' with self.assertRaisesMessage(CommandError, msg): call_command('makemigrations', 'migrations', '--name', 'invalid name', '--empty') def test_makemigrations_check(self): """ makemigrations --check should exit with a non-zero status when there are changes to an app requiring migrations. """ with self.temporary_migration_module(): with self.assertRaises(SystemExit): call_command("makemigrations", "--check", "migrations", verbosity=0) with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "--check", "migrations", verbosity=0) def test_makemigrations_migration_path_output(self): """ makemigrations should print the relative paths to the migrations unless they are outside of the current tree, in which case the absolute path should be shown. """ out = io.StringIO() apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_migration_path_output_valueerror(self): """ makemigrations prints the absolute path if os.path.relpath() raises a ValueError when it's impossible to obtain a relative path, e.g. on Windows if Django is installed on a different drive than where the migration files are created. """ out = io.StringIO() with self.temporary_migration_module() as migration_dir: with mock.patch('os.path.relpath', side_effect=ValueError): call_command('makemigrations', 'migrations', stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_inconsistent_history(self): """ makemigrations should raise InconsistentMigrationHistory exception if there are some migrations applied before their dependencies. """ recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.temporary_migration_module(module="migrations.test_migrations"): with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("makemigrations") def test_makemigrations_inconsistent_history_db_failure(self): msg = ( "Got an error checking a consistent migration history performed " "for database connection 'default': could not connect to server" ) with mock.patch( 'django.db.migrations.loader.MigrationLoader.check_consistent_history', side_effect=OperationalError('could not connect to server'), ): with self.temporary_migration_module(): with self.assertWarns(RuntimeWarning) as cm: call_command('makemigrations', verbosity=0) self.assertEqual(str(cm.warning), msg) @mock.patch('builtins.input', return_value='1') @mock.patch('django.db.migrations.questioner.sys.stdin', mock.MagicMock(encoding=sys.getdefaultencoding())) def test_makemigrations_auto_now_add_interactive(self, *args): """ makemigrations prompts the user when adding auto_now_add to an existing model. """ class Entry(models.Model): title = models.CharField(max_length=255) creation_date = models.DateTimeField(auto_now_add=True) class Meta: app_label = 'migrations' input_msg = ( "It is impossible to add the field 'creation_date' with " "'auto_now_add=True' to entry without providing a default. This " "is because the database needs something to populate existing " "rows.\n" " 1) Provide a one-off default now which will be set on all " "existing rows\n" " 2) Quit and manually define a default value in models.py." ) # Monkeypatch interactive questioner to auto accept prompt_stdout = io.StringIO() with self.temporary_migration_module(module='migrations.test_auto_now_add'): call_command('makemigrations', 'migrations', interactive=True, stdout=prompt_stdout) prompt_output = prompt_stdout.getvalue() self.assertIn(input_msg, prompt_output) self.assertIn('Please enter the default value as valid Python.', prompt_output) self.assertIn( "Accept the default 'timezone.now' by pressing 'Enter' or provide " "another value.", prompt_output, ) self.assertIn("Type 'exit' to exit this prompt", prompt_output) self.assertIn("Add field creation_date to entry", prompt_output) @mock.patch('builtins.input', return_value='2') def test_makemigrations_auto_now_add_interactive_quit(self, mock_input): class Author(models.Model): publishing_date = models.DateField(auto_now_add=True) class Meta: app_label = 'migrations' with self.temporary_migration_module(module='migrations.test_migrations'): with captured_stdout(): with self.assertRaises(SystemExit): call_command('makemigrations', 'migrations', interactive=True) def test_makemigrations_non_interactive_auto_now_add_addition(self): """ Non-interactive makemigrations fails when a default is missing on a new field when auto_now_add=True. """ class Entry(models.Model): creation_date = models.DateTimeField(auto_now_add=True) class Meta: app_label = 'migrations' with self.temporary_migration_module(module='migrations.test_auto_now_add'): with self.assertRaises(SystemExit), captured_stdout() as out: call_command('makemigrations', 'migrations', interactive=False) self.assertIn( "Field 'creation_date' on model 'entry' not migrated: it is " "impossible to add a field with 'auto_now_add=True' without " "specifying a default.", out.getvalue(), ) def test_makemigrations_interactive_unique_callable_default_addition(self): """ makemigrations prompts the user when adding a unique field with a callable default. """ class Book(models.Model): created = models.DateTimeField(unique=True, default=timezone.now) class Meta: app_label = 'migrations' version = get_docs_version() input_msg = ( f'Callable default on unique field book.created will not generate ' f'unique values upon migrating.\n' f'Please choose how to proceed:\n' f' 1) Continue making this migration as the first step in writing ' f'a manual migration to generate unique values described here: ' f'https://docs.djangoproject.com/en/{version}/howto/' f'writing-migrations/#migrations-that-add-unique-fields.\n' f' 2) Quit and edit field options in models.py.\n' ) with self.temporary_migration_module(module='migrations.test_migrations'): # 2 - quit. with mock.patch('builtins.input', return_value='2'): with captured_stdout() as out, self.assertRaises(SystemExit): call_command('makemigrations', 'migrations', interactive=True) out_value = out.getvalue() self.assertIn(input_msg, out_value) self.assertNotIn('Add field created to book', out_value) # 1 - continue. with mock.patch('builtins.input', return_value='1'): with captured_stdout() as out: call_command('makemigrations', 'migrations', interactive=True) out_value = out.getvalue() self.assertIn(input_msg, out_value) self.assertIn('Add field created to book', out_value) def test_makemigrations_non_interactive_unique_callable_default_addition(self): class Book(models.Model): created = models.DateTimeField(unique=True, default=timezone.now) class Meta: app_label = 'migrations' with self.temporary_migration_module(module='migrations.test_migrations'): with captured_stdout() as out: call_command('makemigrations', 'migrations', interactive=False) out_value = out.getvalue() self.assertIn('Add field created to book', out_value) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}, ) def test_makemigrations_continues_number_sequence_after_squash(self): with self.temporary_migration_module(module='migrations.test_migrations_squashed'): with captured_stdout() as out: call_command( 'makemigrations', 'migrations', interactive=False, empty=True, ) out_value = out.getvalue() self.assertIn('0003_auto', out_value) class SquashMigrationsTests(MigrationTestBase): """ Tests running the squashmigrations command. """ def test_squashmigrations_squashes(self): """ squashmigrations squashes migrations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command('squashmigrations', 'migrations', '0002', interactive=False, stdout=out, no_color=True) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") self.assertTrue(os.path.exists(squashed_migration_file)) self.assertEqual( out.getvalue(), 'Will squash the following migrations:\n' ' - 0001_initial\n' ' - 0002_second\n' 'Optimizing...\n' ' Optimized from 8 operations to 2 operations.\n' 'Created new squashed migration %s\n' ' You should commit this migration but leave the old ones in place;\n' ' the new migration will be used for new installs. Once you are sure\n' ' all instances of the codebase have applied the migrations you squashed,\n' ' you can delete them.\n' % squashed_migration_file ) def test_squashmigrations_initial_attribute(self): with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn("initial = True", content) def test_squashmigrations_optimizes(self): """ squashmigrations optimizes operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out) self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue()) def test_ticket_23799_squashmigrations_no_optimize(self): """ squashmigrations --no-optimize doesn't optimize operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, no_optimize=True, stdout=out) self.assertIn("Skipping optimization", out.getvalue()) def test_squashmigrations_valid_start(self): """ squashmigrations accepts a starting migration. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes") as migration_dir: call_command("squashmigrations", "migrations", "0002", "0003", interactive=False, verbosity=1, stdout=out) squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn(" ('migrations', '0001_initial')", content) self.assertNotIn("initial = True", content) out = out.getvalue() self.assertNotIn(" - 0001_initial", out) self.assertIn(" - 0002_second", out) self.assertIn(" - 0003_third", out) def test_squashmigrations_invalid_start(self): """ squashmigrations doesn't accept a starting migration after the ending migration. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): msg = ( "The migration 'migrations.0003_third' cannot be found. Maybe " "it comes after the migration 'migrations.0002_second'" ) with self.assertRaisesMessage(CommandError, msg): call_command("squashmigrations", "migrations", "0003", "0002", interactive=False, verbosity=0) def test_squashed_name_with_start_migration_name(self): """--squashed-name specifies the new migration's name.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', '0002', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashed_name_without_start_migration_name(self): """--squashed-name also works if a start migration is omitted.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashed_name_exists(self): msg = 'Migration 0001_initial already exists. Use a different name.' with self.temporary_migration_module(module='migrations.test_migrations'): with self.assertRaisesMessage(CommandError, msg): call_command( 'squashmigrations', 'migrations', '0001', '0002', squashed_name='initial', interactive=False, verbosity=0, ) def test_squashmigrations_manual_porting(self): out = io.StringIO() with self.temporary_migration_module( module='migrations.test_migrations_manual_porting', ) as migration_dir: call_command( 'squashmigrations', 'migrations', '0002', interactive=False, stdout=out, no_color=True, ) squashed_migration_file = os.path.join( migration_dir, '0001_squashed_0002_second.py', ) self.assertTrue(os.path.exists(squashed_migration_file)) self.assertEqual( out.getvalue(), f'Will squash the following migrations:\n' f' - 0001_initial\n' f' - 0002_second\n' f'Optimizing...\n' f' No optimizations possible.\n' f'Created new squashed migration {squashed_migration_file}\n' f' You should commit this migration but leave the old ones in place;\n' f' the new migration will be used for new installs. Once you are sure\n' f' all instances of the codebase have applied the migrations you squashed,\n' f' you can delete them.\n' f'Manual porting required\n' f' Your migrations contained functions that must be manually copied over,\n' f' as we could not safely copy their implementation.\n' f' See the comment at the top of the squashed migration for details.\n' ) class AppLabelErrorTests(TestCase): """ This class inherits TestCase because MigrationTestBase uses `available_apps = ['migrations']` which means that it's the only installed app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these tests. """ nonexistent_app_error = "No installed app with label 'nonexistent_app'." did_you_mean_auth_error = ( "No installed app with label 'django.contrib.auth'. Did you mean " "'auth'?" ) def test_makemigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_makemigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_migrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('migrate', 'nonexistent_app') def test_migrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('migrate', 'django.contrib.auth') def test_showmigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_showmigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_sqlmigrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('sqlmigrate', 'nonexistent_app', '0002') def test_sqlmigrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('sqlmigrate', 'django.contrib.auth', '0002') def test_squashmigrations_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('squashmigrations', 'nonexistent_app', '0002') def test_squashmigrations_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('squashmigrations', 'django.contrib.auth', '0002')
835457339eba3835f5bcf6180180cc2dd3a5dd315c9e4cd044c8678d41f514a6
import datetime import decimal import enum import functools import math import os import pathlib import re import sys import uuid from unittest import mock try: import zoneinfo except ImportError: from backports import zoneinfo try: import pytz except ImportError: pytz = None import custom_migration_operations.more_operations import custom_migration_operations.operations from django import get_version from django.conf import SettingsReference, settings from django.core.validators import EmailValidator, RegexValidator from django.db import migrations, models from django.db.migrations.serializer import BaseSerializer from django.db.migrations.writer import MigrationWriter, OperationWriter from django.test import SimpleTestCase from django.utils.deconstruct import deconstructible from django.utils.functional import SimpleLazyObject from django.utils.timezone import get_default_timezone, get_fixed_timezone, utc from django.utils.translation import gettext_lazy as _ from .models import FoodManager, FoodQuerySet class DeconstructibleInstances: def deconstruct(self): return ('DeconstructibleInstances', [], {}) class Money(decimal.Decimal): def deconstruct(self): return ( '%s.%s' % (self.__class__.__module__, self.__class__.__name__), [str(self)], {} ) class TestModel1: def upload_to(self): return '/somewhere/dynamic/' thing = models.FileField(upload_to=upload_to) class TextEnum(enum.Enum): A = 'a-value' B = 'value-b' class TextTranslatedEnum(enum.Enum): A = _('a-value') B = _('value-b') class BinaryEnum(enum.Enum): A = b'a-value' B = b'value-b' class IntEnum(enum.IntEnum): A = 1 B = 2 class OperationWriterTests(SimpleTestCase): def test_empty_signature(self): operation = custom_migration_operations.operations.TestOperation() buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.TestOperation(\n' '),' ) def test_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation(1, 2) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' '),' ) def test_kwargs_signature(self): operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' '),' ) def test_args_kwargs_signature(self): operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsKwargsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' kwarg2=4,\n' '),' ) def test_nested_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation( custom_migration_operations.operations.ArgsOperation(1, 2), custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4) ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' ),\n' ' arg2=custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=3,\n' ' kwarg2=4,\n' ' ),\n' '),' ) def test_multiline_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2") buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, "custom_migration_operations.operations.ArgsOperation(\n" " arg1='test\\n arg1',\n" " arg2='test\\narg2',\n" ")," ) def test_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2]) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' 1,\n' ' 2,\n' ' ],\n' '),' ) def test_nested_operation_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation( arg=[ custom_migration_operations.operations.KwargsOperation( kwarg1=1, kwarg2=2, ), ] ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' ' kwarg2=2,\n' ' ),\n' ' ],\n' '),' ) class WriterTests(SimpleTestCase): """ Tests the migration writer (makes migration files from Migration instances) """ class NestedEnum(enum.IntEnum): A = 1 B = 2 class NestedChoices(models.TextChoices): X = 'X', 'X value' Y = 'Y', 'Y value' def safe_exec(self, string, value=None): d = {} try: exec(string, globals(), d) except Exception as e: if value: self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e)) else: self.fail("Could not exec %r: %s" % (string.strip(), e)) return d def serialize_round_trip(self, value): string, imports = MigrationWriter.serialize(value) return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result'] def assertSerializedEqual(self, value): self.assertEqual(self.serialize_round_trip(value), value) def assertSerializedResultEqual(self, value, target): self.assertEqual(MigrationWriter.serialize(value), target) def assertSerializedFieldEqual(self, value): new_value = self.serialize_round_trip(value) self.assertEqual(value.__class__, new_value.__class__) self.assertEqual(value.max_length, new_value.max_length) self.assertEqual(value.null, new_value.null) self.assertEqual(value.unique, new_value.unique) def test_serialize_numbers(self): self.assertSerializedEqual(1) self.assertSerializedEqual(1.2) self.assertTrue(math.isinf(self.serialize_round_trip(float("inf")))) self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf")))) self.assertTrue(math.isnan(self.serialize_round_trip(float("nan")))) self.assertSerializedEqual(decimal.Decimal('1.3')) self.assertSerializedResultEqual( decimal.Decimal('1.3'), ("Decimal('1.3')", {'from decimal import Decimal'}) ) self.assertSerializedEqual(Money('1.3')) self.assertSerializedResultEqual( Money('1.3'), ("migrations.test_writer.Money('1.3')", {'import migrations.test_writer'}) ) def test_serialize_constants(self): self.assertSerializedEqual(None) self.assertSerializedEqual(True) self.assertSerializedEqual(False) def test_serialize_strings(self): self.assertSerializedEqual(b"foobar") string, imports = MigrationWriter.serialize(b"foobar") self.assertEqual(string, "b'foobar'") self.assertSerializedEqual("föobár") string, imports = MigrationWriter.serialize("foobar") self.assertEqual(string, "'foobar'") def test_serialize_multiline_strings(self): self.assertSerializedEqual(b"foo\nbar") string, imports = MigrationWriter.serialize(b"foo\nbar") self.assertEqual(string, "b'foo\\nbar'") self.assertSerializedEqual("föo\nbár") string, imports = MigrationWriter.serialize("foo\nbar") self.assertEqual(string, "'foo\\nbar'") def test_serialize_collections(self): self.assertSerializedEqual({1: 2}) self.assertSerializedEqual(["a", 2, True, None]) self.assertSerializedEqual({2, 3, "eighty"}) self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) self.assertSerializedEqual(_('Hello')) def test_serialize_builtin_types(self): self.assertSerializedEqual([list, tuple, dict, set, frozenset]) self.assertSerializedResultEqual( [list, tuple, dict, set, frozenset], ("[list, tuple, dict, set, frozenset]", set()) ) def test_serialize_lazy_objects(self): pattern = re.compile(r'^foo$') lazy_pattern = SimpleLazyObject(lambda: pattern) self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern) def test_serialize_enums(self): self.assertSerializedResultEqual( TextEnum.A, ("migrations.test_writer.TextEnum['A']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( TextTranslatedEnum.A, ("migrations.test_writer.TextTranslatedEnum['A']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( BinaryEnum.A, ("migrations.test_writer.BinaryEnum['A']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( IntEnum.B, ("migrations.test_writer.IntEnum['B']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( self.NestedEnum.A, ( "migrations.test_writer.WriterTests.NestedEnum['A']", {'import migrations.test_writer'}, ), ) self.assertSerializedEqual(self.NestedEnum.A) field = models.CharField(default=TextEnum.B, choices=[(m.value, m) for m in TextEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "('a-value', migrations.test_writer.TextEnum['A']), " "('value-b', migrations.test_writer.TextEnum['B'])], " "default=migrations.test_writer.TextEnum['B'])" ) field = models.CharField( default=TextTranslatedEnum.A, choices=[(m.value, m) for m in TextTranslatedEnum], ) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "('a-value', migrations.test_writer.TextTranslatedEnum['A']), " "('value-b', migrations.test_writer.TextTranslatedEnum['B'])], " "default=migrations.test_writer.TextTranslatedEnum['A'])" ) field = models.CharField(default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "(b'a-value', migrations.test_writer.BinaryEnum['A']), " "(b'value-b', migrations.test_writer.BinaryEnum['B'])], " "default=migrations.test_writer.BinaryEnum['B'])" ) field = models.IntegerField(default=IntEnum.A, choices=[(m.value, m) for m in IntEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.IntegerField(choices=[" "(1, migrations.test_writer.IntEnum['A']), " "(2, migrations.test_writer.IntEnum['B'])], " "default=migrations.test_writer.IntEnum['A'])" ) def test_serialize_choices(self): class TextChoices(models.TextChoices): A = 'A', 'A value' B = 'B', 'B value' class IntegerChoices(models.IntegerChoices): A = 1, 'One' B = 2, 'Two' class DateChoices(datetime.date, models.Choices): DATE_1 = 1969, 7, 20, 'First date' DATE_2 = 1969, 11, 19, 'Second date' self.assertSerializedResultEqual(TextChoices.A, ("'A'", set())) self.assertSerializedResultEqual(IntegerChoices.A, ('1', set())) self.assertSerializedResultEqual( DateChoices.DATE_1, ('datetime.date(1969, 7, 20)', {'import datetime'}), ) field = models.CharField(default=TextChoices.B, choices=TextChoices.choices) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[('A', 'A value'), ('B', 'B value')], " "default='B')", ) field = models.IntegerField(default=IntegerChoices.B, choices=IntegerChoices.choices) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.IntegerField(choices=[(1, 'One'), (2, 'Two')], default=2)", ) field = models.DateField(default=DateChoices.DATE_2, choices=DateChoices.choices) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.DateField(choices=[" "(datetime.date(1969, 7, 20), 'First date'), " "(datetime.date(1969, 11, 19), 'Second date')], " "default=datetime.date(1969, 11, 19))" ) def test_serialize_nested_class(self): for nested_cls in [self.NestedEnum, self.NestedChoices]: cls_name = nested_cls.__name__ with self.subTest(cls_name): self.assertSerializedResultEqual( nested_cls, ( "migrations.test_writer.WriterTests.%s" % cls_name, {'import migrations.test_writer'}, ), ) def test_serialize_uuid(self): self.assertSerializedEqual(uuid.uuid1()) self.assertSerializedEqual(uuid.uuid4()) uuid_a = uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8') uuid_b = uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2') self.assertSerializedResultEqual( uuid_a, ("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {'import uuid'}) ) self.assertSerializedResultEqual( uuid_b, ("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {'import uuid'}) ) field = models.UUIDField(choices=((uuid_a, 'UUID A'), (uuid_b, 'UUID B')), default=uuid_a) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.UUIDField(choices=[" "(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), " "(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], " "default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))" ) def test_serialize_pathlib(self): # Pure path objects work in all platforms. self.assertSerializedEqual(pathlib.PurePosixPath()) self.assertSerializedEqual(pathlib.PureWindowsPath()) path = pathlib.PurePosixPath('/path/file.txt') expected = ("pathlib.PurePosixPath('/path/file.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) path = pathlib.PureWindowsPath('A:\\File.txt') expected = ("pathlib.PureWindowsPath('A:/File.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) # Concrete path objects work on supported platforms. if sys.platform == 'win32': self.assertSerializedEqual(pathlib.WindowsPath.cwd()) path = pathlib.WindowsPath('A:\\File.txt') expected = ("pathlib.PureWindowsPath('A:/File.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) else: self.assertSerializedEqual(pathlib.PosixPath.cwd()) path = pathlib.PosixPath('/path/file.txt') expected = ("pathlib.PurePosixPath('/path/file.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) field = models.FilePathField(path=pathlib.PurePosixPath('/home/user')) string, imports = MigrationWriter.serialize(field) self.assertEqual( string, "models.FilePathField(path=pathlib.PurePosixPath('/home/user'))", ) self.assertIn('import pathlib', imports) def test_serialize_path_like(self): with os.scandir(os.path.dirname(__file__)) as entries: path_like = list(entries)[0] expected = (repr(path_like.path), {}) self.assertSerializedResultEqual(path_like, expected) field = models.FilePathField(path=path_like) string = MigrationWriter.serialize(field)[0] self.assertEqual(string, 'models.FilePathField(path=%r)' % path_like.path) def test_serialize_functions(self): with self.assertRaisesMessage(ValueError, 'Cannot serialize function: lambda'): self.assertSerializedEqual(lambda x: 42) self.assertSerializedEqual(models.SET_NULL) string, imports = MigrationWriter.serialize(models.SET(42)) self.assertEqual(string, 'models.SET(42)') self.serialize_round_trip(models.SET(42)) def test_serialize_datetime(self): self.assertSerializedEqual(datetime.datetime.now()) self.assertSerializedEqual(datetime.datetime.now) self.assertSerializedEqual(datetime.datetime.today()) self.assertSerializedEqual(datetime.datetime.today) self.assertSerializedEqual(datetime.date.today()) self.assertSerializedEqual(datetime.date.today) self.assertSerializedEqual(datetime.datetime.now().time()) self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone())) self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=get_fixed_timezone(180))) self.assertSerializedResultEqual( datetime.datetime(2014, 1, 1, 1, 1), ("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'}) ) for tzinfo in (utc, datetime.timezone.utc): with self.subTest(tzinfo=tzinfo): self.assertSerializedResultEqual( datetime.datetime(2012, 1, 1, 1, 1, tzinfo=tzinfo), ( "datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)", {'import datetime', 'from django.utils.timezone import utc'}, ) ) self.assertSerializedResultEqual( datetime.datetime(2012, 1, 1, 2, 1, tzinfo=zoneinfo.ZoneInfo('Europe/Paris')), ( "datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)", {'import datetime', 'from django.utils.timezone import utc'}, ) ) if pytz: self.assertSerializedResultEqual( pytz.timezone('Europe/Paris').localize(datetime.datetime(2012, 1, 1, 2, 1)), ( "datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)", {'import datetime', 'from django.utils.timezone import utc'}, ) ) def test_serialize_fields(self): self.assertSerializedFieldEqual(models.CharField(max_length=255)) self.assertSerializedResultEqual( models.CharField(max_length=255), ("models.CharField(max_length=255)", {"from django.db import models"}) ) self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) self.assertSerializedResultEqual( models.TextField(null=True, blank=True), ("models.TextField(blank=True, null=True)", {'from django.db import models'}) ) def test_serialize_settings(self): self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")) self.assertSerializedResultEqual( SettingsReference("someapp.model", "AUTH_USER_MODEL"), ("settings.AUTH_USER_MODEL", {"from django.conf import settings"}) ) def test_serialize_iterators(self): self.assertSerializedResultEqual( ((x, x * x) for x in range(3)), ("((0, 0), (1, 1), (2, 4))", set()) ) def test_serialize_compiled_regex(self): """ Make sure compiled regex can be serialized. """ regex = re.compile(r'^\w+$') self.assertSerializedEqual(regex) def test_serialize_class_based_validators(self): """ Ticket #22943: Test serialization of class-based validators, including compiled regexes. """ validator = RegexValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')") self.serialize_round_trip(validator) # Test with a compiled regex. validator = RegexValidator(regex=re.compile(r'^\w+$')) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$'))") self.serialize_round_trip(validator) # Test a string regex with flag validator = RegexValidator(r'^[0-9]+$', flags=re.S) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=re.RegexFlag['DOTALL'])") self.serialize_round_trip(validator) # Test message and code validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid') string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')") self.serialize_round_trip(validator) # Test with a subclass. validator = EmailValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')") self.serialize_round_trip(validator) validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')") validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello") with self.assertRaisesMessage(ImportError, "No module named 'custom'"): MigrationWriter.serialize(validator) validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello") with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."): MigrationWriter.serialize(validator) def test_serialize_complex_func_index(self): index = models.Index( models.Func('rating', function='ABS'), models.Case( models.When(name='special', then=models.Value('X')), default=models.Value('other'), ), models.ExpressionWrapper( models.F('pages'), output_field=models.IntegerField(), ), models.OrderBy(models.F('name').desc()), name='complex_func_index', ) string, imports = MigrationWriter.serialize(index) self.assertEqual( string, "models.Index(models.Func('rating', function='ABS'), " "models.Case(models.When(name='special', then=models.Value('X')), " "default=models.Value('other')), " "models.ExpressionWrapper(" "models.F('pages'), output_field=models.IntegerField()), " "models.OrderBy(models.OrderBy(models.F('name'), descending=True)), " "name='complex_func_index')" ) self.assertEqual(imports, {'from django.db import models'}) def test_serialize_empty_nonempty_tuple(self): """ Ticket #22679: makemigrations generates invalid code for (an empty tuple) default_permissions = () """ empty_tuple = () one_item_tuple = ('a',) many_items_tuple = ('a', 'b', 'c') self.assertSerializedEqual(empty_tuple) self.assertSerializedEqual(one_item_tuple) self.assertSerializedEqual(many_items_tuple) def test_serialize_range(self): string, imports = MigrationWriter.serialize(range(1, 5)) self.assertEqual(string, 'range(1, 5)') self.assertEqual(imports, set()) def test_serialize_builtins(self): string, imports = MigrationWriter.serialize(range) self.assertEqual(string, 'range') self.assertEqual(imports, set()) def test_serialize_unbound_method_reference(self): """An unbound method used within a class body can be serialized.""" self.serialize_round_trip(TestModel1.thing) def test_serialize_local_function_reference(self): """A reference in a local scope can't be serialized.""" class TestModel2: def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with self.assertRaisesMessage(ValueError, 'Could not find function upload_to in migrations.test_writer'): self.serialize_round_trip(TestModel2.thing) def test_serialize_managers(self): self.assertSerializedEqual(models.Manager()) self.assertSerializedResultEqual( FoodQuerySet.as_manager(), ('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'}) ) self.assertSerializedEqual(FoodManager('a', 'b')) self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4)) def test_serialize_frozensets(self): self.assertSerializedEqual(frozenset()) self.assertSerializedEqual(frozenset("let it go")) def test_serialize_set(self): self.assertSerializedEqual(set()) self.assertSerializedResultEqual(set(), ('set()', set())) self.assertSerializedEqual({'a'}) self.assertSerializedResultEqual({'a'}, ("{'a'}", set())) def test_serialize_timedelta(self): self.assertSerializedEqual(datetime.timedelta()) self.assertSerializedEqual(datetime.timedelta(minutes=42)) def test_serialize_functools_partial(self): value = functools.partial(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_serialize_functools_partialmethod(self): value = functools.partialmethod(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertIsInstance(result, functools.partialmethod) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_serialize_type_none(self): self.assertSerializedEqual(type(None)) def test_serialize_type_model(self): self.assertSerializedEqual(models.Model) self.assertSerializedResultEqual( MigrationWriter.serialize(models.Model), ("('models.Model', {'from django.db import models'})", set()), ) def test_simple_migration(self): """ Tests serializing a simple migration. """ fields = { 'charfield': models.DateTimeField(default=datetime.datetime.now), 'datetimefield': models.DateTimeField(default=datetime.datetime.now), } options = { 'verbose_name': 'My model', 'verbose_name_plural': 'My models', } migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)), migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)), migrations.CreateModel( name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,) ), migrations.DeleteModel("MyModel"), migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]), ], "dependencies": [("testapp", "some_other_one")], }) writer = MigrationWriter(migration) output = writer.as_string() # We don't test the output formatting - that's too fragile. # Just make sure it runs for now, and that things look alright. result = self.safe_exec(output) self.assertIn("Migration", result) def test_migration_path(self): test_apps = [ 'migrations.migrations_test_apps.normal', 'migrations.migrations_test_apps.with_package_model', 'migrations.migrations_test_apps.without_init_file', ] base_dir = os.path.dirname(os.path.dirname(__file__)) for app in test_apps: with self.modify_settings(INSTALLED_APPS={'append': app}): migration = migrations.Migration('0001_initial', app.split('.')[-1]) expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py'])) writer = MigrationWriter(migration) self.assertEqual(writer.path, expected_path) def test_custom_operation(self): migration = type("Migration", (migrations.Migration,), { "operations": [ custom_migration_operations.operations.TestOperation(), custom_migration_operations.operations.CreateModel(), migrations.CreateModel("MyModel", (), {}, (models.Model,)), custom_migration_operations.more_operations.TestOperation() ], "dependencies": [] }) writer = MigrationWriter(migration) output = writer.as_string() result = self.safe_exec(output) self.assertIn("custom_migration_operations", result) self.assertNotEqual( result['custom_migration_operations'].operations.TestOperation, result['custom_migration_operations'].more_operations.TestOperation ) def test_sorted_imports(self): """ #24155 - Tests ordering of imports. """ migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.AddField("mymodel", "myfield", models.DateTimeField( default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), )), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn( "import datetime\n" "from django.db import migrations, models\n" "from django.utils.timezone import utc\n", output ) def test_migration_file_header_comments(self): """ Test comments at top of file. """ migration = type("Migration", (migrations.Migration,), { "operations": [] }) dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=utc) with mock.patch('django.db.migrations.writer.now', lambda: dt): for include_header in (True, False): with self.subTest(include_header=include_header): writer = MigrationWriter(migration, include_header) output = writer.as_string() self.assertEqual( include_header, output.startswith( "# Generated by Django %s on 2015-07-31 04:40\n\n" % get_version() ) ) if not include_header: # Make sure the output starts with something that's not # a comment or indentation or blank line self.assertRegex(output.splitlines(keepends=True)[0], r"^[^#\s]+") def test_models_import_omitted(self): """ django.db.models shouldn't be imported if unused. """ migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.AlterModelOptions( name='model', options={'verbose_name': 'model', 'verbose_name_plural': 'models'}, ), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn("from django.db import migrations\n", output) def test_deconstruct_class_arguments(self): # Yes, it doesn't make sense to use a class as a default for a # CharField. It does make sense for custom fields though, for example # an enumfield that takes the enum class as an argument. string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0] self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)") def test_register_serializer(self): class ComplexSerializer(BaseSerializer): def serialize(self): return 'complex(%r)' % self.value, {} MigrationWriter.register_serializer(complex, ComplexSerializer) self.assertSerializedEqual(complex(1, 2)) MigrationWriter.unregister_serializer(complex) with self.assertRaisesMessage(ValueError, 'Cannot serialize: (1+2j)'): self.assertSerializedEqual(complex(1, 2)) def test_register_non_serializer(self): with self.assertRaisesMessage(ValueError, "'TestModel1' must inherit from 'BaseSerializer'."): MigrationWriter.register_serializer(complex, TestModel1)
91b505d5cb5d2f7b9b3b5d0fa7a0a63f94dc3cfeb3f8d8e080b6eccf5dd7758f
import compileall import os from importlib import import_module from django.db import connection, connections from django.db.migrations.exceptions import ( AmbiguityError, InconsistentMigrationHistory, NodeNotFoundError, ) from django.db.migrations.loader import MigrationLoader from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, modify_settings, override_settings from .test_base import MigrationTestBase class RecorderTests(TestCase): """ Tests recording migrations as applied or not. """ databases = {'default', 'other'} def test_apply(self): """ Tests marking migrations as applied/unapplied. """ recorder = MigrationRecorder(connection) self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, set(), ) recorder.record_applied("myapp", "0432_ponies") self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, {("myapp", "0432_ponies")}, ) # That should not affect records of another database recorder_other = MigrationRecorder(connections['other']) self.assertEqual( {(x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"}, set(), ) recorder.record_unapplied("myapp", "0432_ponies") self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, set(), ) class LoaderTests(TestCase): """ Tests the disk and database loader, and running through migrations in memory. """ def setUp(self): self.applied_records = [] def tearDown(self): # Unapply records on databases that don't roll back changes after each # test method. if not connection.features.supports_transactions: for recorder, app, name in self.applied_records: recorder.record_unapplied(app, name) def record_applied(self, recorder, app, name): recorder.record_applied(app, name) self.applied_records.append((recorder, app, name)) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) @modify_settings(INSTALLED_APPS={'append': 'basic'}) def test_load(self): """ Makes sure the loader can load the migrations for the test apps, and then render them out to a new Apps. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0002_second"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0002_second")) self.assertEqual(len(project_state.models), 2) author_state = project_state.models["migrations", "author"] self.assertEqual( list(author_state.fields), ["id", "name", "slug", "age", "rating"] ) book_state = project_state.models["migrations", "book"] self.assertEqual(list(book_state.fields), ['id', 'author']) # Ensure we've included unmigrated apps in there too self.assertIn("basic", project_state.real_apps) @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations', 'migrations2': 'migrations2.test_migrations_2', }) @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) def test_plan_handles_repeated_migrations(self): """ _generate_plan() doesn't readd migrations already in the plan (#29180). """ migration_loader = MigrationLoader(connection) nodes = [('migrations', '0002_second'), ('migrations2', '0001_initial')] self.assertEqual( migration_loader.graph._generate_plan(nodes, at_end=True), [('migrations', '0001_initial'), ('migrations', '0002_second'), ('migrations2', '0001_initial')] ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"}) def test_load_unmigrated_dependency(self): """ Makes sure the loader can load migrations with a dependency on an unmigrated app. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0001_initial")), [ ('contenttypes', '0001_initial'), ('auth', '0001_initial'), ("migrations", "0001_initial"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0001_initial")) self.assertEqual(len([m for a, m in project_state.models if a == "migrations"]), 1) book_state = project_state.models["migrations", "book"] self.assertEqual(list(book_state.fields), ['id', 'user']) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_run_before(self): """ Makes sure the loader uses Migration.run_before. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0003_third"), ("migrations", "0002_second"), ], ) @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations_first", "migrations2": "migrations2.test_migrations_2_first", }) @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) def test_first(self): """ Makes sure the '__first__' migrations build correctly. """ migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "second")), [ ("migrations", "thefirst"), ("migrations2", "0001_initial"), ("migrations2", "0002_second"), ("migrations", "second"), ], ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_name_match(self): "Tests prefix name matching" migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.get_migration_by_prefix("migrations", "0001").name, "0001_initial", ) msg = "There is more than one migration for 'migrations' with the prefix '0'" with self.assertRaisesMessage(AmbiguityError, msg): migration_loader.get_migration_by_prefix("migrations", "0") msg = "There is no migration for 'migrations' with the prefix 'blarg'" with self.assertRaisesMessage(KeyError, msg): migration_loader.get_migration_by_prefix("migrations", "blarg") def test_load_import_error(self): with override_settings(MIGRATION_MODULES={"migrations": "import_error_package"}): with self.assertRaises(ImportError): MigrationLoader(connection) def test_load_module_file(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App with migrations module file not in unmigrated apps." ) def test_load_empty_dir(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App missing __init__.py in migrations module not in unmigrated apps." ) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], ) def test_marked_as_migrated(self): """ Undefined MIGRATION_MODULES implies default migration module. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, {'migrated_app'}) self.assertEqual(migration_loader.unmigrated_apps, set()) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={"migrated_app": None}, ) def test_marked_as_unmigrated(self): """ MIGRATION_MODULES allows disabling of migrations for a particular app. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={'migrated_app': 'missing-module'}, ) def test_explicit_missing_module(self): """ If a MIGRATION_MODULES override points to a missing module, the error raised during the importation attempt should be propagated unless `ignore_no_migrations=True`. """ with self.assertRaisesMessage(ImportError, 'missing-module'): migration_loader = MigrationLoader(connection) migration_loader = MigrationLoader(connection, ignore_no_migrations=True) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_loading_squashed(self): "Tests loading a squashed migration" migration_loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Loading with nothing applied should just give us the one node self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 1, ) # However, fake-apply one migration and it should now use the old two self.record_applied(recorder, 'migrations', '0001_initial') migration_loader.build_graph() self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 2, ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_loading_squashed_complex(self): "Tests loading a complex set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations.keys()) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too self.record_applied(recorder, 'migrations', '1_auto') loader.build_graph() self.assertEqual(num_nodes(), 4) self.record_applied(recorder, 'migrations', '2_auto') loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 to 5 cannot use the squashed migration self.record_applied(recorder, 'migrations', '3_auto') loader.build_graph() self.assertEqual(num_nodes(), 4) self.record_applied(recorder, 'migrations', '4_auto') loader.build_graph() self.assertEqual(num_nodes(), 3) # Starting at 5 to 7 we are past the squashed migrations. self.record_applied(recorder, 'migrations', '5_auto') loader.build_graph() self.assertEqual(num_nodes(), 2) self.record_applied(recorder, 'migrations', '6_auto') loader.build_graph() self.assertEqual(num_nodes(), 1) self.record_applied(recorder, 'migrations', '7_auto') loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps(self): loader = MigrationLoader(connection) loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps_partially_applied(self): loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.record_applied(recorder, 'app1', '1_auto') self.record_applied(recorder, 'app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_erroneous"}) def test_loading_squashed_erroneous(self): "Tests loading a complex but erroneous set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations.keys()) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too self.record_applied(recorder, 'migrations', '1_auto') loader.build_graph() self.assertEqual(num_nodes(), 4) self.record_applied(recorder, 'migrations', '2_auto') loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 or 4, nonexistent migrations would be needed. msg = ("Migration migrations.6_auto depends on nonexistent node ('migrations', '5_auto'). " "Django tried to replace migration migrations.5_auto with any of " "[migrations.3_squashed_5] but wasn't able to because some of the replaced " "migrations are already applied.") self.record_applied(recorder, 'migrations', '3_auto') with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() self.record_applied(recorder, 'migrations', '4_auto') with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() # Starting at 5 to 7 we are passed the squashed migrations self.record_applied(recorder, 'migrations', '5_auto') loader.build_graph() self.assertEqual(num_nodes(), 2) self.record_applied(recorder, 'migrations', '6_auto') loader.build_graph() self.assertEqual(num_nodes(), 1) self.record_applied(recorder, 'migrations', '7_auto') loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history(self): loader = MigrationLoader(connection=None) loader.check_consistent_history(connection) recorder = MigrationRecorder(connection) self.record_applied(recorder, 'migrations', '0002_second') msg = ( "Migration migrations.0002_second is applied before its dependency " "migrations.0001_initial on database 'default'." ) with self.assertRaisesMessage(InconsistentMigrationHistory, msg): loader.check_consistent_history(connection) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed_extra'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history_squashed(self): """ MigrationLoader.check_consistent_history() should ignore unapplied squashed migrations that have all of their `replaces` applied. """ loader = MigrationLoader(connection=None) recorder = MigrationRecorder(connection) self.record_applied(recorder, 'migrations', '0001_initial') self.record_applied(recorder, 'migrations', '0002_second') loader.check_consistent_history(connection) self.record_applied(recorder, 'migrations', '0003_third') loader.check_consistent_history(connection) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_ref_squashed.app1", "app2": "migrations.test_migrations_squashed_ref_squashed.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_ref_squashed.app1", "migrations.test_migrations_squashed_ref_squashed.app2", ]}) def test_loading_squashed_ref_squashed(self): "Tests loading a squashed migration with a new migration referencing it" r""" The sample migrations are structured like this: app_1 1 --> 2 ---------------------*--> 3 *--> 4 \ / / *-------------------*----/--> 2_sq_3 --* \ / / =============== \ ============= / == / ====================== app_2 *--> 1_sq_2 --* / \ / *--> 1 --> 2 --* Where 2_sq_3 is a replacing migration for 2 and 3 in app_1, as 1_sq_2 is a replacing migration for 1 and 2 in app_2. """ loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Load with nothing applied: both migrations squashed. loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Load with nothing applied and migrate to a replaced migration. # Not possible if loader.replace_migrations is True (default). loader.build_graph() msg = "Node ('app1', '3_auto') not a valid node" with self.assertRaisesMessage(NodeNotFoundError, msg): loader.graph.forwards_plan(('app1', '3_auto')) # Possible if loader.replace_migrations is False. loader.replace_migrations = False loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '3_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app1', '1_auto'), ('app2', '1_auto'), ('app2', '2_auto'), ('app1', '2_auto'), ('app1', '3_auto'), } self.assertEqual(plan, expected_plan) loader.replace_migrations = True # Fake-apply a few from app1: unsquashes migration in app1. self.record_applied(recorder, 'app1', '1_auto') self.record_applied(recorder, 'app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Fake-apply one from app2: unsquashes migration in app2 too. self.record_applied(recorder, 'app2', '1_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '2_auto'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_private'}) def test_ignore_files(self): """Files prefixed with underscore, tilde, or dot aren't loaded.""" loader = MigrationLoader(connection) loader.load_disk() migrations = [name for app, name in loader.disk_migrations if app == 'migrations'] self.assertEqual(migrations, ['0001_initial']) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_namespace_package'}, ) def test_loading_namespace_package(self): """Migration directories without an __init__.py file are ignored.""" loader = MigrationLoader(connection) loader.load_disk() migrations = [name for app, name in loader.disk_migrations if app == 'migrations'] self.assertEqual(migrations, []) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_loading_package_without__file__(self): """ To support frozen environments, MigrationLoader loads migrations from regular packages with no __file__ attribute. """ test_module = import_module('migrations.test_migrations') loader = MigrationLoader(connection) # __file__ == __spec__.origin or the latter is None and former is # undefined. module_file = test_module.__file__ module_origin = test_module.__spec__.origin module_has_location = test_module.__spec__.has_location try: del test_module.__file__ test_module.__spec__.origin = None test_module.__spec__.has_location = False loader.load_disk() migrations = [ name for app, name in loader.disk_migrations if app == 'migrations' ] self.assertCountEqual(migrations, ['0001_initial', '0002_second']) finally: test_module.__file__ = module_file test_module.__spec__.origin = module_origin test_module.__spec__.has_location = module_has_location class PycLoaderTests(MigrationTestBase): def test_valid(self): """ To support frozen environments, MigrationLoader loads .pyc migrations. """ with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: # Compile .py files to .pyc files and delete .py files. compileall.compile_dir(migration_dir, force=True, quiet=1, legacy=True) for name in os.listdir(migration_dir): if name.endswith('.py'): os.remove(os.path.join(migration_dir, name)) loader = MigrationLoader(connection) self.assertIn(('migrations', '0001_initial'), loader.disk_migrations) def test_invalid(self): """ MigrationLoader reraises ImportErrors caused by "bad magic number" pyc files with a more helpful message. """ with self.temporary_migration_module(module='migrations.test_migrations_bad_pyc') as migration_dir: # The -tpl suffix is to avoid the pyc exclusion in MANIFEST.in. os.rename( os.path.join(migration_dir, '0001_initial.pyc-tpl'), os.path.join(migration_dir, '0001_initial.pyc'), ) msg = ( r"Couldn't import '\w+.migrations.0001_initial' as it appears " "to be a stale .pyc file." ) with self.assertRaisesRegex(ImportError, msg): MigrationLoader(connection)
69620670def3fe92e79dc879fcb3c32a82267b4e28ba0a7daa04ed4e2a30b3de
from django.core.exceptions import FieldDoesNotExist from django.db import ( IntegrityError, connection, migrations, models, transaction, ) from django.db.migrations.migration import Migration from django.db.migrations.operations.fields import FieldOperation from django.db.migrations.state import ModelState, ProjectState from django.db.models.functions import Abs from django.db.transaction import atomic from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext from .models import FoodManager, FoodQuerySet, UnicodeModel from .test_base import OperationTestBase class Mixin: pass class OperationTests(OperationTestBase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - both forwards and backwards. """ def test_create_model(self): """ Tests the CreateModel operation. Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) self.assertEqual(operation.describe(), "Create model Pony") self.assertEqual(operation.migration_name_fragment, 'pony') # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["fields", "name"]) # And default manager not in set operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())]) definition = operation.deconstruct() self.assertNotIn('managers', definition[2]) def test_create_model_with_duplicate_field_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'): migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.TextField()), ("pink", models.IntegerField(default=1)), ], ) def test_create_model_with_duplicate_base(self): message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.Pony",), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.pony",), ) message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, UnicodeModel,), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.unicodemodel',), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.UnicodeModel',), ) message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(models.Model, models.Model,), ) message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(Mixin, Mixin,), ) def test_create_model_with_duplicate_manager_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'): migrations.CreateModel( "Pony", fields=[], managers=[ ("objects", models.Manager()), ("objects", models.Manager()), ], ) def test_create_model_with_unique_after(self): """ Tests the CreateModel operation directly followed by an AlterUniqueTogether (bug #22844 - sqlite remake issues) """ operation1 = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) operation2 = migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("number", models.IntegerField(default=1)), ("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)), ], ) operation3 = migrations.AlterUniqueTogether( "Rider", [ ("number", "pony"), ], ) # Test the database alteration project_state = ProjectState() self.assertTableNotExists("test_crmoua_pony") self.assertTableNotExists("test_crmoua_rider") with connection.schema_editor() as editor: new_state = project_state.clone() operation1.state_forwards("test_crmoua", new_state) operation1.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation2.state_forwards("test_crmoua", new_state) operation2.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation3.state_forwards("test_crmoua", new_state) operation3.database_forwards("test_crmoua", editor, project_state, new_state) self.assertTableExists("test_crmoua_pony") self.assertTableExists("test_crmoua_rider") def test_create_model_m2m(self): """ Test the creation of a model with a ManyToMany field and the auto-created "through" model. """ project_state = self.set_up_test_model("test_crmomm") operation = migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("Pony", related_name="stables")) ] ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_crmomm", new_state) # Test the database alteration self.assertTableNotExists("test_crmomm_stable_ponies") with connection.schema_editor() as editor: operation.database_forwards("test_crmomm", editor, project_state, new_state) self.assertTableExists("test_crmomm_stable") self.assertTableExists("test_crmomm_stable_ponies") self.assertColumnNotExists("test_crmomm_stable", "ponies") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_crmomm", "Pony") Stable = new_state.apps.get_model("test_crmomm", "Stable") stable = Stable.objects.create() p1 = Pony.objects.create(pink=False, weight=4.55) p2 = Pony.objects.create(pink=True, weight=5.43) stable.ponies.add(p1, p2) self.assertEqual(stable.ponies.count(), 2) stable.ponies.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmomm", editor, new_state, project_state) self.assertTableNotExists("test_crmomm_stable") self.assertTableNotExists("test_crmomm_stable_ponies") def test_create_model_inheritance(self): """ Tests the CreateModel operation on a multi-table inheritance setup. """ project_state = self.set_up_test_model("test_crmoih") # Test the state alteration operation = migrations.CreateModel( "ShetlandPony", [ ('pony_ptr', models.OneToOneField( 'test_crmoih.Pony', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], ) new_state = project_state.clone() operation.state_forwards("test_crmoih", new_state) self.assertIn(("test_crmoih", "shetlandpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crmoih_shetlandpony") with connection.schema_editor() as editor: operation.database_forwards("test_crmoih", editor, project_state, new_state) self.assertTableExists("test_crmoih_shetlandpony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmoih", editor, new_state, project_state) self.assertTableNotExists("test_crmoih_shetlandpony") def test_create_proxy_model(self): """ CreateModel ignores proxy models. """ project_state = self.set_up_test_model("test_crprmo") # Test the state alteration operation = migrations.CreateModel( "ProxyPony", [], options={"proxy": True}, bases=("test_crprmo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model ProxyPony") new_state = project_state.clone() operation.state_forwards("test_crprmo", new_state) self.assertIn(("test_crprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crprmo", editor, project_state, new_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crprmo", editor, new_state, project_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"]) def test_create_unmanaged_model(self): """ CreateModel ignores unmanaged models. """ project_state = self.set_up_test_model("test_crummo") # Test the state alteration operation = migrations.CreateModel( "UnmanagedPony", [], options={"proxy": True}, bases=("test_crummo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony") new_state = project_state.clone() operation.state_forwards("test_crummo", new_state) self.assertIn(("test_crummo", "unmanagedpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crummo", editor, project_state, new_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crummo", editor, new_state, project_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") @skipUnlessDBFeature('supports_table_check_constraints') def test_create_model_with_constraint(self): where = models.Q(pink__gt=2) check_constraint = models.CheckConstraint(check=where, name='test_constraint_pony_pink_gt_2') operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ], options={'constraints': [check_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") with connection.cursor() as cursor: with self.assertRaises(IntegrityError): cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)") # Test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [check_constraint]) def test_create_model_with_partial_unique_constraint(self): partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [partial_unique_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists('test_crmo_pony') with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') # Test constraint works Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [partial_unique_constraint]) def test_create_model_with_deferred_unique_constraint(self): deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferrable_pink_constraint', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ], options={'constraints': [deferred_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [deferred_unique_constraint], ) @skipUnlessDBFeature('supports_covering_indexes') def test_create_model_with_covering_unique_constraint(self): covering_unique_constraint = models.UniqueConstraint( fields=['pink'], include=['weight'], name='test_constraint_pony_pink_covering_weight', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [covering_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [covering_unique_constraint], ) def test_create_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_cmoma") # Test the state alteration operation = migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Create model Food") new_state = project_state.clone() operation.state_forwards("test_cmoma", new_state) self.assertIn(("test_cmoma", "food"), new_state.models) managers = new_state.models["test_cmoma", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) def test_delete_model(self): """ Tests the DeleteModel operation. """ project_state = self.set_up_test_model("test_dlmo") # Test the state alteration operation = migrations.DeleteModel("Pony") self.assertEqual(operation.describe(), "Delete model Pony") self.assertEqual(operation.migration_name_fragment, 'delete_pony') new_state = project_state.clone() operation.state_forwards("test_dlmo", new_state) self.assertNotIn(("test_dlmo", "pony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dlmo", editor, project_state, new_state) self.assertTableNotExists("test_dlmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlmo", editor, new_state, project_state) self.assertTableExists("test_dlmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "DeleteModel") self.assertEqual(definition[1], []) self.assertEqual(list(definition[2]), ["name"]) def test_delete_proxy_model(self): """ Tests the DeleteModel operation ignores proxy models. """ project_state = self.set_up_test_model("test_dlprmo", proxy_model=True) # Test the state alteration operation = migrations.DeleteModel("ProxyPony") new_state = project_state.clone() operation.state_forwards("test_dlprmo", new_state) self.assertIn(("test_dlprmo", "proxypony"), project_state.models) self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") with connection.schema_editor() as editor: operation.database_forwards("test_dlprmo", editor, project_state, new_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlprmo", editor, new_state, project_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") def test_delete_mti_model(self): project_state = self.set_up_test_model('test_dlmtimo', mti_model=True) # Test the state alteration operation = migrations.DeleteModel('ShetlandPony') new_state = project_state.clone() operation.state_forwards('test_dlmtimo', new_state) self.assertIn(('test_dlmtimo', 'shetlandpony'), project_state.models) self.assertNotIn(('test_dlmtimo', 'shetlandpony'), new_state.models) # Test the database alteration self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') with connection.schema_editor() as editor: operation.database_forwards('test_dlmtimo', editor, project_state, new_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableNotExists('test_dlmtimo_shetlandpony') # And test reversal with connection.schema_editor() as editor: operation.database_backwards('test_dlmtimo', editor, new_state, project_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') def test_rename_model(self): """ Tests the RenameModel operation. """ project_state = self.set_up_test_model("test_rnmo", related_model=True) # Test the state alteration operation = migrations.RenameModel("Pony", "Horse") self.assertEqual(operation.describe(), "Rename model Pony to Horse") self.assertEqual(operation.migration_name_fragment, 'rename_pony_horse') # Test initial state and database self.assertIn(("test_rnmo", "pony"), project_state.models) self.assertNotIn(("test_rnmo", "horse"), project_state.models) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate forwards new_state = project_state.clone() atomic_rename = connection.features.supports_atomic_references_rename new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename) # Test new state and database self.assertNotIn(("test_rnmo", "pony"), new_state.models) self.assertIn(("test_rnmo", "horse"), new_state.models) # RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( new_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'test_rnmo.Horse', ) self.assertTableNotExists("test_rnmo_pony") self.assertTableExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate backwards original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename) # Test original state and database self.assertIn(("test_rnmo", "pony"), original_state.models) self.assertNotIn(("test_rnmo", "horse"), original_state.models) self.assertEqual( original_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'Pony', ) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"}) def test_rename_model_state_forwards(self): """ RenameModel operations shouldn't trigger the caching of rendered apps on state without prior apps. """ state = ProjectState() state.add_model(ModelState('migrations', 'Foo', [])) operation = migrations.RenameModel('Foo', 'Bar') operation.state_forwards('migrations', state) self.assertNotIn('apps', state.__dict__) self.assertNotIn(('migrations', 'foo'), state.models) self.assertIn(('migrations', 'bar'), state.models) # Now with apps cached. apps = state.apps operation = migrations.RenameModel('Bar', 'Foo') operation.state_forwards('migrations', state) self.assertIs(state.apps, apps) self.assertNotIn(('migrations', 'bar'), state.models) self.assertIn(('migrations', 'foo'), state.models) def test_rename_model_with_self_referential_fk(self): """ Tests the RenameModel operation on model with self referential FK. """ project_state = self.set_up_test_model("test_rmwsrf", related_model=True) # Test the state alteration operation = migrations.RenameModel("Rider", "HorseRider") self.assertEqual(operation.describe(), "Rename model Rider to HorseRider") new_state = project_state.clone() operation.state_forwards("test_rmwsrf", new_state) self.assertNotIn(("test_rmwsrf", "rider"), new_state.models) self.assertIn(("test_rmwsrf", "horserider"), new_state.models) # Remember, RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( 'self', new_state.models["test_rmwsrf", "horserider"].fields['friend'].remote_field.model ) HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider') self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider) # Test the database alteration self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards("test_rmwsrf", editor, project_state, new_state) self.assertTableNotExists("test_rmwsrf_rider") self.assertTableExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id")) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards("test_rmwsrf", editor, new_state, project_state) self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) def test_rename_model_with_superclass_fk(self): """ Tests the RenameModel operation on a model which has a superclass that has a foreign key. """ project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True) # Test the state alteration operation = migrations.RenameModel("ShetlandPony", "LittleHorse") self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse") new_state = project_state.clone() operation.state_forwards("test_rmwsc", new_state) self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models) self.assertIn(("test_rmwsc", "littlehorse"), new_state.models) # RenameModel shouldn't repoint the superclass's relations, only local ones self.assertEqual( project_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, new_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, ) # Before running the migration we have a table for Shetland Pony, not Little Horse self.assertTableExists("test_rmwsc_shetlandpony") self.assertTableNotExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # and the foreign key on rider points to pony, not shetland pony self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")) with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: operation.database_forwards("test_rmwsc", editor, project_state, new_state) # Now we have a little horse table, not shetland pony self.assertTableNotExists("test_rmwsc_shetlandpony") self.assertTableExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # but the Foreign keys still point at pony, not little horse self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")) def test_rename_model_with_self_referential_m2m(self): app_label = "test_rename_model_with_self_referential_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("ReflexivePony", fields=[ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("self")), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("ReflexivePony", "ReflexivePony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "ReflexivePony2") pony = Pony.objects.create() pony.ponies.add(pony) def test_rename_model_with_m2m(self): app_label = "test_rename_model_with_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Pony", "Pony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony2") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_model_with_db_table_noop(self): app_label = 'test_rmwdbtn' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ], options={'db_table': 'rider'}), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), ]) new_state = project_state.clone() operation = migrations.RenameModel('Rider', 'Runner') operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, project_state, new_state) with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards(app_label, editor, new_state, project_state) def test_rename_m2m_target_model(self): app_label = "test_rename_m2m_target_model" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Rider", "Rider2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider2") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_through_model(self): app_label = "test_rename_through" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("PonyRider", fields=[ ("id", models.AutoField(primary_key=True)), ("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)), ("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)), ]), migrations.AddField( "Pony", "riders", models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"), ), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider") pony = Pony.objects.create() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("PonyRider", "PonyRider2"), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider2") pony = Pony.objects.first() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) self.assertEqual(Pony.objects.count(), 1) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(PonyRider.objects.count(), 2) self.assertEqual(pony.riders.count(), 2) def test_rename_m2m_model_after_rename_field(self): """RenameModel renames a many-to-many column after a RenameField.""" app_label = 'test_rename_multiple' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=20)), ]), migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('test_rename_multiple.Pony', models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('riders', models.ManyToManyField('Rider')), ]), migrations.RenameField(model_name='pony', old_name='name', new_name='fancy_name'), migrations.RenameModel(old_name='Rider', new_name='Jockey'), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, 'Pony') Jockey = project_state.apps.get_model(app_label, 'Jockey') PonyRider = project_state.apps.get_model(app_label, 'PonyRider') # No "no such column" error means the column was renamed correctly. pony = Pony.objects.create(fancy_name='a good name') jockey = Jockey.objects.create(pony=pony) ponyrider = PonyRider.objects.create() ponyrider.riders.add(jockey) def test_add_field(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) self.assertEqual(operation.describe(), "Add field height to Pony") self.assertEqual(operation.migration_name_fragment, 'pony_height') project_state, new_state = self.make_test_state("test_adfl", operation) self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) field = new_state.models['test_adfl', 'pony'].fields['height'] self.assertEqual(field.default, 5) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adfl", editor, project_state, new_state) self.assertColumnExists("test_adfl_pony", "height") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_add_charfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adchfl") Pony = project_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adchfl", project_state, [ migrations.AddField( "Pony", "text", models.CharField(max_length=10, default="some text"), ), migrations.AddField( "Pony", "empty", models.CharField(max_length=10, default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.CharField(max_length=10, default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.CharField(max_length=10, default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_textfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adtxtfl") Pony = project_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adtxtfl", project_state, [ migrations.AddField( "Pony", "text", models.TextField(default="some text"), ), migrations.AddField( "Pony", "empty", models.TextField(default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.TextField(default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.TextField(default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_binaryfield(self): """ Tests the AddField operation on TextField/BinaryField. """ project_state = self.set_up_test_model("test_adbinfl") Pony = project_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adbinfl", project_state, [ migrations.AddField( "Pony", "blob", models.BinaryField(default=b"some text"), ), migrations.AddField( "Pony", "empty", models.BinaryField(default=b""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.BinaryField(default=b"42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.BinaryField(default=b'"\'"'), ), ]) Pony = new_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.get(pk=pony.pk) # SQLite returns buffer/memoryview, cast to bytes for checking. self.assertEqual(bytes(pony.blob), b"some text") self.assertEqual(bytes(pony.empty), b"") self.assertEqual(bytes(pony.digits), b"42") self.assertEqual(bytes(pony.quotes), b'"\'"') def test_column_name_quoting(self): """ Column names that are SQL keywords shouldn't cause problems when used in migrations (#22168). """ project_state = self.set_up_test_model("test_regr22168") operation = migrations.AddField( "Pony", "order", models.IntegerField(default=0), ) new_state = project_state.clone() operation.state_forwards("test_regr22168", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_regr22168", editor, project_state, new_state) self.assertColumnExists("test_regr22168_pony", "order") def test_add_field_preserve_default(self): """ Tests the AddField operation's state alteration when preserve_default = False. """ project_state = self.set_up_test_model("test_adflpd") # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=4), preserve_default=False, ) new_state = project_state.clone() operation.state_forwards("test_adflpd", new_state) self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4) field = new_state.models['test_adflpd', 'pony'].fields['height'] self.assertEqual(field.default, models.NOT_PROVIDED) # Test the database alteration project_state.apps.get_model("test_adflpd", "pony").objects.create( weight=4, ) self.assertColumnNotExists("test_adflpd_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adflpd", editor, project_state, new_state) self.assertColumnExists("test_adflpd_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]) def test_add_field_m2m(self): """ Tests the AddField operation with a ManyToManyField. """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) # Test the database alteration self.assertTableNotExists("test_adflmm_pony_stables") with connection.schema_editor() as editor: operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_adflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.stables.create() self.assertEqual(p.stables.count(), 1) p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) self.assertTableNotExists("test_adflmm_pony_stables") def test_alter_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertFalse(Pony._meta.get_field('stables').blank) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField( "Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True) ) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertTrue(Pony._meta.get_field('stables').blank) def test_repoint_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies")) ]) # Ensure the new field actually works Pony = project_state.apps.get_model("test_alflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.places.create() self.assertEqual(p.places.count(), 1) p.places.all().delete() def test_remove_field_m2m(self): project_state = self.set_up_test_model("test_rmflmm", second_model=True) project_state = self.apply_operations("test_rmflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) self.assertTableExists("test_rmflmm_pony_stables") with_field_state = project_state.clone() operations = [migrations.RemoveField("Pony", "stables")] project_state = self.apply_operations("test_rmflmm", project_state, operations=operations) self.assertTableNotExists("test_rmflmm_pony_stables") # And test reversal self.unapply_operations("test_rmflmm", with_field_state, operations=operations) self.assertTableExists("test_rmflmm_pony_stables") def test_remove_field_m2m_with_through(self): project_state = self.set_up_test_model("test_rmflmmwt", second_model=True) self.assertTableNotExists("test_rmflmmwt_ponystables") project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[ migrations.CreateModel("PonyStables", fields=[ ("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)), ("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)), ]), migrations.AddField( "Pony", "stables", models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables') ) ]) self.assertTableExists("test_rmflmmwt_ponystables") operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")] self.apply_operations("test_rmflmmwt", project_state, operations=operations) def test_remove_field(self): """ Tests the RemoveField operation. """ project_state = self.set_up_test_model("test_rmfl") # Test the state alteration operation = migrations.RemoveField("Pony", "pink") self.assertEqual(operation.describe(), "Remove field pink from Pony") self.assertEqual(operation.migration_name_fragment, 'remove_pony_pink') new_state = project_state.clone() operation.state_forwards("test_rmfl", new_state) self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) # Test the database alteration self.assertColumnExists("test_rmfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_rmfl", editor, project_state, new_state) self.assertColumnNotExists("test_rmfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmfl", editor, new_state, project_state) self.assertColumnExists("test_rmfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'}) def test_remove_fk(self): """ Tests the RemoveField operation on a foreign key. """ project_state = self.set_up_test_model("test_rfk", related_model=True) self.assertColumnExists("test_rfk_rider", "pony_id") operation = migrations.RemoveField("Rider", "pony") new_state = project_state.clone() operation.state_forwards("test_rfk", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_rfk", editor, project_state, new_state) self.assertColumnNotExists("test_rfk_rider", "pony_id") with connection.schema_editor() as editor: operation.database_backwards("test_rfk", editor, new_state, project_state) self.assertColumnExists("test_rfk_rider", "pony_id") def test_alter_model_table(self): """ Tests the AlterModelTable operation. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2") self.assertEqual(operation.migration_name_fragment, 'alter_pony_table') new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") # Test the database alteration self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableNotExists("test_almota_pony") self.assertTableExists("test_almota_pony_2") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelTable") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"}) def test_alter_model_table_none(self): """ Tests the AlterModelTable operation if the table name is set to None. """ operation = migrations.AlterModelTable("Pony", None) self.assertEqual(operation.describe(), "Rename table for Pony to (default)") def test_alter_model_table_noop(self): """ Tests the AlterModelTable operation if the table name is not changed. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony") # Test the database alteration self.assertTableExists("test_almota_pony") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableExists("test_almota_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") def test_alter_model_table_m2m(self): """ AlterModelTable should rename auto-generated M2M tables. """ app_label = "test_talflmltlm2m" pony_db_table = 'pony_foo' project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table) # Add the M2M field first_state = project_state.clone() operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) operation.state_forwards(app_label, first_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, first_state) original_m2m_table = "%s_%s" % (pony_db_table, "stables") new_m2m_table = "%s_%s" % (app_label, "pony_stables") self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) # Rename the Pony db_table which should also rename the m2m table. second_state = first_state.clone() operation = migrations.AlterModelTable(name='pony', table=None) operation.state_forwards(app_label, second_state) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards(app_label, editor, first_state, second_state) self.assertTableExists(new_m2m_table) self.assertTableNotExists(original_m2m_table) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards(app_label, editor, second_state, first_state) self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) def test_alter_field(self): """ Tests the AlterField operation. """ project_state = self.set_up_test_model("test_alfl") # Test the state alteration operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) self.assertEqual(operation.describe(), "Alter field pink on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_pink') new_state = project_state.clone() operation.state_forwards("test_alfl", new_state) self.assertIs(project_state.models['test_alfl', 'pony'].fields['pink'].null, False) self.assertIs(new_state.models['test_alfl', 'pony'].fields['pink'].null, True) # Test the database alteration self.assertColumnNotNull("test_alfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alfl", editor, project_state, new_state) self.assertColumnNull("test_alfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alfl", editor, new_state, project_state) self.assertColumnNotNull("test_alfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_alter_field_add_db_column_noop(self): """ AlterField operation is a noop when adding only a db_column and the column name is not changed. """ app_label = 'test_afadbn' project_state = self.set_up_test_model(app_label, related_model=True) pony_table = '%s_pony' % app_label new_state = project_state.clone() operation = migrations.AlterField('Pony', 'weight', models.FloatField(db_column='weight')) operation.state_forwards(app_label, new_state) self.assertIsNone( project_state.models[app_label, 'pony'].fields['weight'].db_column, ) self.assertEqual( new_state.models[app_label, 'pony'].fields['weight'].db_column, 'weight', ) self.assertColumnExists(pony_table, 'weight') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, project_state, new_state) self.assertColumnExists(pony_table, 'weight') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards(app_label, editor, new_state, project_state) self.assertColumnExists(pony_table, 'weight') rider_table = '%s_rider' % app_label new_state = project_state.clone() operation = migrations.AlterField( 'Rider', 'pony', models.ForeignKey('Pony', models.CASCADE, db_column='pony_id'), ) operation.state_forwards(app_label, new_state) self.assertIsNone( project_state.models[app_label, 'rider'].fields['pony'].db_column, ) self.assertIs( new_state.models[app_label, 'rider'].fields['pony'].db_column, 'pony_id', ) self.assertColumnExists(rider_table, 'pony_id') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, project_state, new_state) self.assertColumnExists(rider_table, 'pony_id') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, new_state, project_state) self.assertColumnExists(rider_table, 'pony_id') def test_alter_field_pk(self): """ Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness) """ project_state = self.set_up_test_model("test_alflpk") # Test the state alteration operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpk", new_state) self.assertIsInstance( project_state.models['test_alflpk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpk', 'pony'].fields['id'], models.IntegerField, ) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpk", editor, project_state, new_state) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpk", editor, new_state, project_state) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_pk_fk(self): """ Tests the AlterField operation on primary keys changes any FKs pointing to it. """ project_state = self.set_up_test_model("test_alflpkfk", related_model=True) project_state = self.apply_operations('test_alflpkfk', project_state, [ migrations.CreateModel('Stable', fields=[ ('ponies', models.ManyToManyField('Pony')), ]), migrations.AddField( 'Pony', 'stables', models.ManyToManyField('Stable'), ), ]) # Test the state alteration operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpkfk", new_state) self.assertIsInstance( project_state.models['test_alflpkfk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpkfk', 'pony'].fields['id'], models.FloatField, ) def assertIdTypeEqualsFkType(): with connection.cursor() as cursor: id_type, id_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony") if c.name == "id" ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider") if c.name == "pony_id" ][0] m2m_fk_type, m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_pony_stables', ) if c.name == 'pony_id' ][0] remote_m2m_fk_type, remote_m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_stable_ponies', ) if c.name == 'pony_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_type, m2m_fk_type) self.assertEqual(id_type, remote_m2m_fk_type) self.assertEqual(id_null, fk_null) self.assertEqual(id_null, m2m_fk_null) self.assertEqual(id_null, remote_m2m_fk_null) assertIdTypeEqualsFkType() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpkfk", editor, project_state, new_state) assertIdTypeEqualsFkType() if connection.features.supports_foreign_keys: self.assertFKExists( 'test_alflpkfk_pony_stables', ['pony_id'], ('test_alflpkfk_pony', 'id'), ) self.assertFKExists( 'test_alflpkfk_stable_ponies', ['pony_id'], ('test_alflpkfk_pony', 'id'), ) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpkfk", editor, new_state, project_state) assertIdTypeEqualsFkType() if connection.features.supports_foreign_keys: self.assertFKExists( 'test_alflpkfk_pony_stables', ['pony_id'], ('test_alflpkfk_pony', 'id'), ) self.assertFKExists( 'test_alflpkfk_stable_ponies', ['pony_id'], ('test_alflpkfk_pony', 'id'), ) def test_alter_field_pk_mti_fk(self): app_label = 'test_alflpkmtifk' project_state = self.set_up_test_model(app_label, mti_model=True) project_state = self.apply_operations(app_label, project_state, [ migrations.CreateModel('ShetlandRider', fields=[ ( 'pony', models.ForeignKey(f'{app_label}.ShetlandPony', models.CASCADE), ), ]), ]) operation = migrations.AlterField( 'Pony', 'id', models.BigAutoField(primary_key=True), ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertIsInstance( new_state.models[app_label, 'pony'].fields['id'], models.BigAutoField, ) def _get_column_id_type(cursor, table, column): return [ c.type_code for c in connection.introspection.get_table_description( cursor, f'{app_label}_{table}', ) if c.name == column ][0] def assertIdTypeEqualsMTIFkType(): with connection.cursor() as cursor: parent_id_type = _get_column_id_type(cursor, 'pony', 'id') child_id_type = _get_column_id_type(cursor, 'shetlandpony', 'pony_ptr_id') mti_id_type = _get_column_id_type(cursor, 'shetlandrider', 'pony_id') self.assertEqual(parent_id_type, child_id_type) self.assertEqual(parent_id_type, mti_id_type) assertIdTypeEqualsMTIFkType() # Alter primary key. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertIdTypeEqualsMTIFkType() if connection.features.supports_foreign_keys: self.assertFKExists( f'{app_label}_shetlandpony', ['pony_ptr_id'], (f'{app_label}_pony', 'id'), ) self.assertFKExists( f'{app_label}_shetlandrider', ['pony_id'], (f'{app_label}_shetlandpony', 'pony_ptr_id'), ) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertIdTypeEqualsMTIFkType() if connection.features.supports_foreign_keys: self.assertFKExists( f'{app_label}_shetlandpony', ['pony_ptr_id'], (f'{app_label}_pony', 'id'), ) self.assertFKExists( f'{app_label}_shetlandrider', ['pony_id'], (f'{app_label}_shetlandpony', 'pony_ptr_id'), ) def test_alter_field_pk_mti_and_fk_to_base(self): app_label = 'test_alflpkmtiftb' project_state = self.set_up_test_model( app_label, mti_model=True, related_model=True, ) operation = migrations.AlterField( 'Pony', 'id', models.BigAutoField(primary_key=True), ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertIsInstance( new_state.models[app_label, 'pony'].fields['id'], models.BigAutoField, ) def _get_column_id_type(cursor, table, column): return [ c.type_code for c in connection.introspection.get_table_description( cursor, f'{app_label}_{table}', ) if c.name == column ][0] def assertIdTypeEqualsMTIFkType(): with connection.cursor() as cursor: parent_id_type = _get_column_id_type(cursor, 'pony', 'id') fk_id_type = _get_column_id_type(cursor, 'rider', 'pony_id') child_id_type = _get_column_id_type(cursor, 'shetlandpony', 'pony_ptr_id') self.assertEqual(parent_id_type, child_id_type) self.assertEqual(parent_id_type, fk_id_type) assertIdTypeEqualsMTIFkType() # Alter primary key. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertIdTypeEqualsMTIFkType() if connection.features.supports_foreign_keys: self.assertFKExists( f'{app_label}_shetlandpony', ['pony_ptr_id'], (f'{app_label}_pony', 'id'), ) self.assertFKExists( f'{app_label}_rider', ['pony_id'], (f'{app_label}_pony', 'id'), ) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertIdTypeEqualsMTIFkType() if connection.features.supports_foreign_keys: self.assertFKExists( f'{app_label}_shetlandpony', ['pony_ptr_id'], (f'{app_label}_pony', 'id'), ) self.assertFKExists( f'{app_label}_rider', ['pony_id'], (f'{app_label}_pony', 'id'), ) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self): app_label = 'test_alflrsfkwtflttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.IntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='code')), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) id_type, id_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_rider' % app_label) if c.name == 'code' ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_pony' % app_label) if c.name == 'rider_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_null, fk_null) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_related_name_target_type_change(self): app_label = 'test_alflrsfkwtflrnttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.PositiveIntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey( '%s.Rider' % app_label, models.CASCADE, to_field='code', related_name='+', )), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) def test_alter_field_reloads_state_on_fk_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ]) def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)), migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)), ]) def test_rename_field_reloads_state_on_fk_target_changes(self): """ If RenameField doesn't reload state appropriately, the AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_rename_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameField('Rider', 'id', 'id2'), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ], atomic=connection.features.supports_atomic_references_rename) def test_rename_field(self): """ Tests the RenameField operation. """ project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True) # Test the state alteration operation = migrations.RenameField("Pony", "pink", "blue") self.assertEqual(operation.describe(), "Rename field pink on Pony to blue") self.assertEqual(operation.migration_name_fragment, 'rename_pink_pony_blue') new_state = project_state.clone() operation.state_forwards("test_rnfl", new_state) self.assertIn("blue", new_state.models["test_rnfl", "pony"].fields) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].fields) # Make sure the unique_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) # Make sure the index_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0]) # Test the database alteration self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") with connection.schema_editor() as editor: operation.database_forwards("test_rnfl", editor, project_state, new_state) self.assertColumnExists("test_rnfl_pony", "blue") self.assertColumnNotExists("test_rnfl_pony", "pink") # Ensure the unique constraint has been ported over with connection.cursor() as cursor: cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_rnfl_pony") # Ensure the index constraint has been ported over self.assertIndexExists("test_rnfl_pony", ["weight", "blue"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rnfl", editor, new_state, project_state) self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") # Ensure the index constraint has been reset self.assertIndexExists("test_rnfl_pony", ["weight", "pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"}) def test_rename_field_with_db_column(self): project_state = self.apply_operations('test_rfwdbc', ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(db_column='db_field')), ('fk_field', models.ForeignKey( 'Pony', models.CASCADE, db_column='db_fk_field', )), ]), ]) new_state = project_state.clone() operation = migrations.RenameField('Pony', 'field', 'renamed_field') operation.state_forwards('test_rfwdbc', new_state) self.assertIn('renamed_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertNotIn('field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertColumnExists('test_rfwdbc_pony', 'db_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards('test_rfwdbc', editor, project_state, new_state) self.assertColumnExists('test_rfwdbc_pony', 'db_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards('test_rfwdbc', editor, new_state, project_state) self.assertColumnExists('test_rfwdbc_pony', 'db_field') new_state = project_state.clone() operation = migrations.RenameField('Pony', 'fk_field', 'renamed_fk_field') operation.state_forwards('test_rfwdbc', new_state) self.assertIn('renamed_fk_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertNotIn('fk_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards('test_rfwdbc', editor, project_state, new_state) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards('test_rfwdbc', editor, new_state, project_state) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') def test_rename_field_case(self): project_state = self.apply_operations('test_rfmx', ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField()), ]), ]) new_state = project_state.clone() operation = migrations.RenameField('Pony', 'field', 'FiElD') operation.state_forwards('test_rfmx', new_state) self.assertIn('FiElD', new_state.models['test_rfmx', 'pony'].fields) self.assertColumnExists('test_rfmx_pony', 'field') with connection.schema_editor() as editor: operation.database_forwards('test_rfmx', editor, project_state, new_state) self.assertColumnExists( 'test_rfmx_pony', connection.introspection.identifier_converter('FiElD'), ) with connection.schema_editor() as editor: operation.database_backwards('test_rfmx', editor, new_state, project_state) self.assertColumnExists('test_rfmx_pony', 'field') def test_rename_missing_field(self): state = ProjectState() state.add_model(ModelState('app', 'model', [])) with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"): migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state) def test_rename_referenced_field_state_forward(self): state = ProjectState() state.add_model(ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ])) state.add_model(ModelState('app', 'OtherModel', [ ('id', models.AutoField(primary_key=True)), ('fk', models.ForeignKey('Model', models.CASCADE, to_field='field')), ('fo', models.ForeignObject('Model', models.CASCADE, from_fields=('fk',), to_fields=('field',))), ])) operation = migrations.RenameField('Model', 'field', 'renamed') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].from_fields, ['self']) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) operation = migrations.RenameField('OtherModel', 'fk', 'renamed_fk') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].from_fields, ('self',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('renamed_fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) def test_alter_unique_together(self): """ Tests the AlterUniqueTogether operation. """ project_state = self.set_up_test_model("test_alunto") # Test the state alteration operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_unique_together', ) new_state = project_state.clone() operation.state_forwards("test_alunto", new_state) self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # Make sure we can insert duplicate rows with connection.cursor() as cursor: cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alunto", editor, new_state, project_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test flat unique_together operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight")) operation.state_forwards("test_alunto", new_state) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterUniqueTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}}) def test_alter_unique_together_remove(self): operation = migrations.AlterUniqueTogether("Pony", None) self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))") def test_add_index(self): """ Test the AddIndex operation. """ project_state = self.set_up_test_model("test_adin") msg = ( "Indexes passed to AddIndex operations require a name argument. " "<Index: fields=['pink']> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): migrations.AddIndex("Pony", models.Index(fields=["pink"])) index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx") operation = migrations.AddIndex("Pony", index) self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony") self.assertEqual( operation.migration_name_fragment, 'pony_test_adin_pony_pink_idx', ) new_state = project_state.clone() operation.state_forwards("test_adin", new_state) # Test the database alteration self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1) self.assertIndexNotExists("test_adin_pony", ["pink"]) with connection.schema_editor() as editor: operation.database_forwards("test_adin", editor, project_state, new_state) self.assertIndexExists("test_adin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adin", editor, new_state, project_state) self.assertIndexNotExists("test_adin_pony", ["pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'index': index}) def test_remove_index(self): """ Test the RemoveIndex operation. """ project_state = self.set_up_test_model("test_rmin", multicol_index=True) self.assertTableExists("test_rmin_pony") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) operation = migrations.RemoveIndex("Pony", "pony_test_idx") self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony") self.assertEqual( operation.migration_name_fragment, 'remove_pony_pony_test_idx', ) new_state = project_state.clone() operation.state_forwards("test_rmin", new_state) # Test the state alteration self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_rmin", editor, project_state, new_state) self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmin", editor, new_state, project_state) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"}) # Also test a field dropped with index - sqlite remake issue operations = [ migrations.RemoveIndex("Pony", "pony_test_idx"), migrations.RemoveField("Pony", "pink"), ] self.assertColumnExists("test_rmin_pony", "pink") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test database alteration new_state = project_state.clone() self.apply_operations('test_rmin', new_state, operations=operations) self.assertColumnNotExists("test_rmin_pony", "pink") self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal self.unapply_operations("test_rmin", project_state, operations=operations) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) def test_add_index_state_forwards(self): project_state = self.set_up_test_model('test_adinsf') index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx') old_model = project_state.apps.get_model('test_adinsf', 'Pony') new_state = project_state.clone() operation = migrations.AddIndex('Pony', index) operation.state_forwards('test_adinsf', new_state) new_model = new_state.apps.get_model('test_adinsf', 'Pony') self.assertIsNot(old_model, new_model) def test_remove_index_state_forwards(self): project_state = self.set_up_test_model('test_rminsf') index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx') migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state) old_model = project_state.apps.get_model('test_rminsf', 'Pony') new_state = project_state.clone() operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx') operation.state_forwards('test_rminsf', new_state) new_model = new_state.apps.get_model('test_rminsf', 'Pony') self.assertIsNot(old_model, new_model) @skipUnlessDBFeature('supports_expression_indexes') def test_add_func_index(self): app_label = 'test_addfuncin' index_name = f'{app_label}_pony_abs_idx' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label) index = models.Index(Abs('weight'), name=index_name) operation = migrations.AddIndex('Pony', index) self.assertEqual( operation.describe(), 'Create index test_addfuncin_pony_abs_idx on Abs(F(weight)) on model Pony', ) self.assertEqual( operation.migration_name_fragment, 'pony_test_addfuncin_pony_abs_idx', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['indexes']), 1) self.assertIndexNameNotExists(table_name, index_name) # Add index. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameExists(table_name, index_name) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameNotExists(table_name, index_name) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddIndex') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'index': index}) @skipUnlessDBFeature('supports_expression_indexes') def test_remove_func_index(self): app_label = 'test_rmfuncin' index_name = f'{app_label}_pony_abs_idx' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label, indexes=[ models.Index(Abs('weight'), name=index_name), ]) self.assertTableExists(table_name) self.assertIndexNameExists(table_name, index_name) operation = migrations.RemoveIndex('Pony', index_name) self.assertEqual( operation.describe(), 'Remove index test_rmfuncin_pony_abs_idx from Pony', ) self.assertEqual( operation.migration_name_fragment, 'remove_pony_test_rmfuncin_pony_abs_idx', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['indexes']), 0) # Remove index. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameNotExists(table_name, index_name) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameExists(table_name, index_name) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveIndex') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'name': index_name}) @skipUnlessDBFeature('supports_expression_indexes') def test_alter_field_with_func_index(self): app_label = 'test_alfuncin' index_name = f'{app_label}_pony_idx' table_name = f'{app_label}_pony' project_state = self.set_up_test_model( app_label, indexes=[models.Index(Abs('pink'), name=index_name)], ) operation = migrations.AlterField('Pony', 'pink', models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameExists(table_name, index_name) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameExists(table_name, index_name) def test_alter_field_with_index(self): """ Test AlterField operation with an index to ensure indexes created via Meta.indexes don't get dropped with sqlite3 remake. """ project_state = self.set_up_test_model("test_alflin", index=True) operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards("test_alflin", new_state) # Test the database alteration self.assertColumnNotNull("test_alflin_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alflin", editor, project_state, new_state) # Index hasn't been dropped self.assertIndexExists("test_alflin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflin", editor, new_state, project_state) # Ensure the index is still there self.assertIndexExists("test_alflin_pony", ["pink"]) def test_alter_index_together(self): """ Tests the AlterIndexTogether operation. """ project_state = self.set_up_test_model("test_alinto") # Test the state alteration operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_index_together', ) new_state = project_state.clone() operation.state_forwards("test_alinto", new_state) self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) # Make sure there's no matching index self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alinto", editor, project_state, new_state) self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alinto", editor, new_state, project_state) self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterIndexTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}}) def test_alter_index_together_remove(self): operation = migrations.AlterIndexTogether("Pony", None) self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))") @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_alter_index_together_remove_with_unique_together(self): app_label = 'test_alintoremove_wunto' table_name = '%s_pony' % app_label project_state = self.set_up_test_model(app_label, unique_together=True) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) # Add index together. new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', [('pink', 'weight')]) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexExists(table_name, ['pink', 'weight']) # Remove index together. project_state = new_state new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', set()) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNotExists(table_name, ['pink', 'weight']) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint(self): project_state = self.set_up_test_model("test_addconstraint") gt_check = models.Q(pink__gt=2) gt_constraint = models.CheckConstraint(check=gt_check, name="test_add_constraint_pony_pink_gt_2") gt_operation = migrations.AddConstraint("Pony", gt_constraint) self.assertEqual( gt_operation.describe(), "Create constraint test_add_constraint_pony_pink_gt_2 on model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'pony_test_add_constraint_pony_pink_gt_2', ) # Test the state alteration new_state = project_state.clone() gt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 1) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Add another one. lt_check = models.Q(pink__lt=100) lt_constraint = models.CheckConstraint(check=lt_check, name="test_add_constraint_pony_pink_lt_100") lt_operation = migrations.AddConstraint("Pony", lt_constraint) lt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 2) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 2) with connection.schema_editor() as editor: lt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_addconstraint", editor, new_state, project_state) Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "AddConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'constraint': gt_constraint}) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_percent_escaping(self): app_label = 'add_constraint_string_quoting' operations = [ migrations.CreateModel( 'Author', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ('surname', models.CharField(max_length=100, default='')), ('rebate', models.CharField(max_length=100)), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) # "%" generated in startswith lookup should be escaped in a way that is # considered a leading wildcard. check = models.Q(name__startswith='Albert') constraint = models.CheckConstraint(check=check, name='name_constraint') operation = migrations.AddConstraint('Author', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Artur') # Literal "%" should be escaped in a way that is not a considered a # wildcard. check = models.Q(rebate__endswith='%') constraint = models.CheckConstraint(check=check, name='rebate_constraint') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) Author = to_state.apps.get_model(app_label, 'Author') with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', rebate='10$') author = Author.objects.create(name='Albert', rebate='10%') self.assertEqual(Author.objects.get(), author) # Right-hand-side baked "%" literals should not be used for parameters # interpolation. check = ~models.Q(surname__startswith=models.F('name')) constraint = models.CheckConstraint(check=check, name='name_constraint_rhs') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', surname='Alberto') @skipUnlessDBFeature('supports_table_check_constraints') def test_add_or_constraint(self): app_label = 'test_addorconstraint' constraint_name = 'add_constraint_or' from_state = self.set_up_test_model(app_label) check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0) constraint = models.CheckConstraint(check=check, name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Pony = to_state.apps.get_model(app_label, 'Pony') with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=2, weight=3.0) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=3, weight=1.0) Pony.objects.bulk_create([ Pony(pink=3, weight=-1.0), Pony(pink=1, weight=-1.0), Pony(pink=3, weight=3.0), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_combinable(self): app_label = 'test_addconstraint_combinable' operations = [ migrations.CreateModel( 'Book', fields=[ ('id', models.AutoField(primary_key=True)), ('read', models.PositiveIntegerField()), ('unread', models.PositiveIntegerField()), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) constraint = models.CheckConstraint( check=models.Q(read=(100 - models.F('unread'))), name='test_addconstraint_combinable_sum_100', ) operation = migrations.AddConstraint('Book', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Book = to_state.apps.get_model(app_label, 'Book') with self.assertRaises(IntegrityError), transaction.atomic(): Book.objects.create(read=70, unread=10) Book.objects.create(read=70, unread=30) @skipUnlessDBFeature('supports_table_check_constraints') def test_remove_constraint(self): project_state = self.set_up_test_model("test_removeconstraint", constraints=[ models.CheckConstraint(check=models.Q(pink__gt=2), name="test_remove_constraint_pony_pink_gt_2"), models.CheckConstraint(check=models.Q(pink__lt=100), name="test_remove_constraint_pony_pink_lt_100"), ]) gt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_gt_2") self.assertEqual( gt_operation.describe(), "Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'remove_pony_test_remove_constraint_pony_pink_gt_2', ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 1) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=1, weight=1.0).delete() with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Remove the other one. lt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_lt_100") lt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 0) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: lt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=100, weight=1.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_removeconstraint", editor, new_state, project_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "RemoveConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "test_remove_constraint_pony_pink_gt_2"}) def test_add_partial_unique_constraint(self): project_state = self.set_up_test_model('test_addpartialuniqueconstraint') partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.AddConstraint('Pony', partial_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq ' 'on model Pony' ) # Test the state alteration new_state = project_state.clone() operation.state_forwards('test_addpartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_addpartialuniqueconstraint', 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model('test_addpartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards('test_addpartialuniqueconstraint', editor, project_state, new_state) # Test constraint works Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_addpartialuniqueconstraint', editor, new_state, project_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'constraint': partial_unique_constraint}) def test_remove_partial_unique_constraint(self): project_state = self.set_up_test_model('test_removepartialuniqueconstraint', constraints=[ models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ), ]) gt_operation = migrations.RemoveConstraint('Pony', 'test_constraint_pony_pink_for_weight_gt_5_uniq') self.assertEqual( gt_operation.describe(), 'Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from model Pony' ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards('test_removepartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_removepartialuniqueconstraint', 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model('test_removepartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards('test_removepartialuniqueconstraint', editor, project_state, new_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) Pony.objects.create(pink=1, weight=7.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards('test_removepartialuniqueconstraint', editor, new_state, project_state) # Test constraint works if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'test_constraint_pony_pink_for_weight_gt_5_uniq', }) def test_add_deferred_unique_constraint(self): app_label = 'test_adddeferred_uc' project_state = self.set_up_test_model(app_label) deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_add', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.AddConstraint('Pony', deferred_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint deferred_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: self.assertEqual(len(ctx), 0) Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': deferred_unique_constraint}, ) def test_remove_deferred_unique_constraint(self): app_label = 'test_removedeferred_uc' deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_rm', deferrable=models.Deferrable.DEFERRED, ) project_state = self.set_up_test_model(app_label, constraints=[deferred_unique_constraint]) operation = migrations.RemoveConstraint('Pony', deferred_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint deferred_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() if not connection.features.supports_deferrable_unique_constraints: self.assertEqual(len(ctx), 0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'deferred_pink_constraint_rm', }) def test_add_covering_unique_constraint(self): app_label = 'test_addcovering_uc' project_state = self.set_up_test_model(app_label) covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_add', include=['weight'], ) operation = migrations.AddConstraint('Pony', covering_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint covering_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: self.assertEqual(len(ctx), 0) Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': covering_unique_constraint}, ) def test_remove_covering_unique_constraint(self): app_label = 'test_removecovering_uc' covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_rm', include=['weight'], ) project_state = self.set_up_test_model(app_label, constraints=[covering_unique_constraint]) operation = migrations.RemoveConstraint('Pony', covering_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint covering_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() if not connection.features.supports_covering_indexes: self.assertEqual(len(ctx), 0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'covering_pink_constraint_rm', }) def test_alter_field_with_func_unique_constraint(self): app_label = 'test_alfuncuc' constraint_name = f'{app_label}_pony_uq' table_name = f'{app_label}_pony' project_state = self.set_up_test_model( app_label, constraints=[models.UniqueConstraint('pink', 'weight', name=constraint_name)] ) operation = migrations.AlterField('Pony', 'pink', models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) def test_add_func_unique_constraint(self): app_label = 'test_adfuncuc' constraint_name = f'{app_label}_pony_abs_uq' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label) constraint = models.UniqueConstraint(Abs('weight'), name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) self.assertEqual( operation.describe(), 'Create constraint test_adfuncuc_pony_abs_uq on model Pony', ) self.assertEqual( operation.migration_name_fragment, 'pony_test_adfuncuc_pony_abs_uq', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) self.assertIndexNameNotExists(table_name, constraint_name) # Add constraint. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony = new_state.apps.get_model(app_label, 'Pony') Pony.objects.create(weight=4.0) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) with self.assertRaises(IntegrityError): Pony.objects.create(weight=-4.0) else: self.assertIndexNameNotExists(table_name, constraint_name) Pony.objects.create(weight=-4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameNotExists(table_name, constraint_name) # Constraint doesn't work. Pony.objects.create(weight=-4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': constraint}, ) def test_remove_func_unique_constraint(self): app_label = 'test_rmfuncuc' constraint_name = f'{app_label}_pony_abs_uq' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label, constraints=[ models.UniqueConstraint(Abs('weight'), name=constraint_name), ]) self.assertTableExists(table_name) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) operation = migrations.RemoveConstraint('Pony', constraint_name) self.assertEqual( operation.describe(), 'Remove constraint test_rmfuncuc_pony_abs_uq from model Pony', ) self.assertEqual( operation.migration_name_fragment, 'remove_pony_test_rmfuncuc_pony_abs_uq', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Remove constraint. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameNotExists(table_name, constraint_name) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=-4.0).delete() # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) with self.assertRaises(IntegrityError): Pony.objects.create(weight=-4.0) else: self.assertIndexNameNotExists(table_name, constraint_name) Pony.objects.create(weight=-4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'name': constraint_name}) def test_alter_model_options(self): """ Tests the AlterModelOptions operation. """ project_state = self.set_up_test_model("test_almoop") # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]}) self.assertEqual(operation.describe(), "Change Meta options on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_options') new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}}) def test_alter_model_options_emptying(self): """ The AlterModelOptions operation removes keys from the dict (#23121) """ project_state = self.set_up_test_model("test_almoop", options=True) # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {}}) def test_alter_order_with_respect_to(self): """ Tests the AlterOrderWithRespectTo operation. """ project_state = self.set_up_test_model("test_alorwrtto", related_model=True) # Test the state alteration operation = migrations.AlterOrderWithRespectTo("Rider", "pony") self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony") self.assertEqual( operation.migration_name_fragment, 'alter_rider_order_with_respect_to', ) new_state = project_state.clone() operation.state_forwards("test_alorwrtto", new_state) self.assertIsNone( project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None) ) self.assertEqual( new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None), "pony" ) # Make sure there's no matching index self.assertColumnNotExists("test_alorwrtto_rider", "_order") # Create some rows before alteration rendered_state = project_state.apps pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50) rider1 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony) rider1.friend = rider1 rider1.save() rider2 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony) rider2.friend = rider2 rider2.save() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alorwrtto", editor, project_state, new_state) self.assertColumnExists("test_alorwrtto_rider", "_order") # Check for correct value in rows updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all() self.assertEqual(updated_riders[0]._order, 0) self.assertEqual(updated_riders[1]._order, 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alorwrtto", editor, new_state, project_state) self.assertColumnNotExists("test_alorwrtto_rider", "_order") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterOrderWithRespectTo") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"}) def test_alter_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almoma") # Test the state alteration operation = migrations.AlterModelManagers( "Pony", managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Change managers on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_managers') managers = project_state.models["test_almoma", "pony"].managers self.assertEqual(managers, []) new_state = project_state.clone() operation.state_forwards("test_almoma", new_state) self.assertIn(("test_almoma", "pony"), new_state.models) managers = new_state.models["test_almoma", "pony"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) rendered_state = new_state.apps model = rendered_state.get_model('test_almoma', 'pony') self.assertIsInstance(model.food_qs, models.Manager) self.assertIsInstance(model.food_mgr, FoodManager) self.assertIsInstance(model.food_mgr_kwargs, FoodManager) def test_alter_model_managers_emptying(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almomae", manager_model=True) # Test the state alteration operation = migrations.AlterModelManagers("Food", managers=[]) self.assertEqual(operation.describe(), "Change managers on Food") self.assertIn(("test_almomae", "food"), project_state.models) managers = project_state.models["test_almomae", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) new_state = project_state.clone() operation.state_forwards("test_almomae", new_state) managers = new_state.models["test_almomae", "food"].managers self.assertEqual(managers, []) def test_alter_fk(self): """ Creating and then altering an FK works correctly and deals with the pending SQL (#23091) """ project_state = self.set_up_test_model("test_alfk") # Test adding and then altering the FK in one go create_operation = migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ], ) create_state = project_state.clone() create_operation.state_forwards("test_alfk", create_state) alter_operation = migrations.AlterField( model_name='Rider', name='pony', field=models.ForeignKey("Pony", models.CASCADE, editable=False), ) alter_state = create_state.clone() alter_operation.state_forwards("test_alfk", alter_state) with connection.schema_editor() as editor: create_operation.database_forwards("test_alfk", editor, project_state, create_state) alter_operation.database_forwards("test_alfk", editor, create_state, alter_state) def test_alter_fk_non_fk(self): """ Altering an FK to a non-FK works (#23244) """ # Test the state alteration operation = migrations.AlterField( model_name="Rider", name="pony", field=models.FloatField(), ) project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True) # Test the database alteration self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") with connection.schema_editor() as editor: operation.database_forwards("test_afknfk", editor, project_state, new_state) self.assertColumnExists("test_afknfk_rider", "pony") self.assertColumnNotExists("test_afknfk_rider", "pony_id") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_afknfk", editor, new_state, project_state) self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( # Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively "CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n" "UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';" "UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';", # Run delete queries to test for parameter substitution failure # reported in #23426 "DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';" "DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';" "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) self.assertEqual(operation.describe(), "Raw SQL operation") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test SQL collection with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql)) operation.database_backwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql)) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 2) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'") self.assertEqual(cursor.fetchall()[0][0], 1) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'") self.assertEqual(cursor.fetchall()[0][0], 1) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunSQL") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_sql_params(self): """ #23426 - RunSQL should accept parameters. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( ["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"], ["DROP TABLE i_love_ponies"], ) param_operation = migrations.RunSQL( # forwards ( "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');", ["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']], ("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)), ), # backwards [ "DELETE FROM i_love_ponies WHERE special_thing = 'Django';", ["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None], ("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']), ] ) # Make sure there's no table self.assertTableNotExists("i_love_ponies") new_state = project_state.clone() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) # Test parameter passing with connection.schema_editor() as editor: param_operation.database_forwards("test_runsql", editor, project_state, new_state) # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 3) with connection.schema_editor() as editor: param_operation.database_backwards("test_runsql", editor, new_state, project_state) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") def test_run_sql_params_invalid(self): """ #23426 - RunSQL should fail when a list of statements with an incorrect number of tuples is given. """ project_state = self.set_up_test_model("test_runsql") new_state = project_state.clone() operation = migrations.RunSQL( # forwards [ ["INSERT INTO foo (bar) VALUES ('buz');"] ], # backwards ( ("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'), ), ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"): operation.database_forwards("test_runsql", editor, project_state, new_state) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"): operation.database_backwards("test_runsql", editor, new_state, project_state) def test_run_sql_noop(self): """ #24098 - Tests no-op RunSQL operations. """ operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, None, None) operation.database_backwards("test_runsql", editor, None, None) def test_run_sql_add_missing_semicolon_on_collect_sql(self): project_state = self.set_up_test_model('test_runsql') new_state = project_state.clone() tests = [ 'INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1);\n', 'INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1)\n', ] for sql in tests: with self.subTest(sql=sql): operation = migrations.RunSQL(sql, migrations.RunPython.noop) with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards('test_runsql', editor, project_state, new_state) collected_sql = '\n'.join(editor.collected_sql) self.assertEqual(collected_sql.count(';'), 1) def test_run_python(self): """ Tests the RunPython operation """ project_state = self.set_up_test_model("test_runpython", mti_model=True) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) def inner_method_reverse(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.filter(pink=1, weight=3.55).delete() Pony.objects.filter(weight=5).delete() operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse) self.assertEqual(operation.describe(), "Raw Python operation") # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards("test_runpython", new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) # Now test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) # Now test we can't use a string with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'): migrations.RunPython("print 'ahahaha'") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code", "reverse_code"]) # Also test reversal fails, with an operation identical to above but without reverse_code set no_reverse_operation = migrations.RunPython(inner_method) self.assertFalse(no_reverse_operation.reversible) with connection.schema_editor() as editor: no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state) with self.assertRaises(NotImplementedError): no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) def create_ponies(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") pony1 = Pony.objects.create(pink=1, weight=3.55) self.assertIsNot(pony1.pk, None) pony2 = Pony.objects.create(weight=5) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_ponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code"]) def create_shetlandponies(models, schema_editor): ShetlandPony = models.get_model("test_runpython", "ShetlandPony") pony1 = ShetlandPony.objects.create(weight=4.0) self.assertIsNot(pony1.pk, None) pony2 = ShetlandPony.objects.create(weight=5.0) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_shetlandponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6) self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunPython(inner_method, elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_python_atomic(self): """ Tests the RunPython operation correctly handles the "atomic" keyword """ project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True) def inner_method(models, schema_editor): Pony = models.get_model("test_runpythonatomic", "Pony") Pony.objects.create(pink=1, weight=3.55) raise ValueError("Adrian hates ponies.") # Verify atomicity when applying. atomic_migration = Migration("test", "test_runpythonatomic") atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)] non_atomic_migration = Migration("test", "test_runpythonatomic") non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)] # If we're a fully-transactional database, both versions should rollback if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation should leave a row there else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Reset object count to zero and verify atomicity when unapplying. project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete() # On a fully-transactional database, both versions rollback. if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation leaves a row there. else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Verify deconstruction. definition = non_atomic_migration.operations[0].deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"]) def test_run_python_related_assignment(self): """ #24282 - Model changes to a FK reverse side update the model on the FK side as well. """ def inner_method(models, schema_editor): Author = models.get_model("test_authors", "Author") Book = models.get_model("test_books", "Book") author = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author) create_author = migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey("test_authors.Author", models.CASCADE)) ], options={}, ) add_hometown = migrations.AddField( "Author", "hometown", models.CharField(max_length=100), ) create_old_man = migrations.RunPython(inner_method, inner_method) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_authors", new_state) create_author.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_books", new_state) create_book.database_forwards("test_books", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: add_hometown.state_forwards("test_authors", new_state) add_hometown.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_old_man.state_forwards("test_books", new_state) create_old_man.database_forwards("test_books", editor, project_state, new_state) def test_model_with_bigautofield(self): """ A model with BigAutoField can be created. """ def create_data(models, schema_editor): Author = models.get_model("test_author", "Author") Book = models.get_model("test_book", "Book") author1 = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author1) Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1) author2 = Author.objects.create(id=2 ** 33, name="Remarque") Book.objects.create(title="All quiet on the western front", author=author2) Book.objects.create(title="Arc de Triomphe", author=author2) create_author = migrations.CreateModel( "Author", [ ("id", models.BigAutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.BigAutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE)) ], options={}, ) fill_data = migrations.RunPython(create_data) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_author", new_state) create_author.database_forwards("test_author", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_book", new_state) create_book.database_forwards("test_book", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_data.state_forwards("fill_data", new_state) fill_data.database_forwards("fill_data", editor, project_state, new_state) def _test_autofield_foreignfield_growth(self, source_field, target_field, target_value): """ A field may be migrated in the following ways: - AutoField to BigAutoField - SmallAutoField to AutoField - SmallAutoField to BigAutoField """ def create_initial_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog = Blog.objects.create(name="web development done right") Article.objects.create(name="Frameworks", blog=blog) Article.objects.create(name="Programming Languages", blog=blog) def create_big_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog2 = Blog.objects.create(name="Frameworks", id=target_value) Article.objects.create(name="Django", blog=blog2) Article.objects.create(id=target_value, name="Django2", blog=blog2) create_blog = migrations.CreateModel( "Blog", [ ("id", source_field(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_article = migrations.CreateModel( "Article", [ ("id", source_field(primary_key=True)), ("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)), ("name", models.CharField(max_length=100)), ("data", models.TextField(default="")), ], options={}, ) fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data) fill_big_data = migrations.RunPython(create_big_data, create_big_data) grow_article_id = migrations.AlterField('Article', 'id', target_field(primary_key=True)) grow_blog_id = migrations.AlterField('Blog', 'id', target_field(primary_key=True)) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_blog.state_forwards("test_blog", new_state) create_blog.database_forwards("test_blog", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_article.state_forwards("test_article", new_state) create_article.database_forwards("test_article", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_initial_data.state_forwards("fill_initial_data", new_state) fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_article_id.state_forwards("test_article", new_state) grow_article_id.database_forwards("test_article", editor, project_state, new_state) state = new_state.clone() article = state.apps.get_model("test_article.Article") self.assertIsInstance(article._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_blog_id.state_forwards("test_blog", new_state) grow_blog_id.database_forwards("test_blog", editor, project_state, new_state) state = new_state.clone() blog = state.apps.get_model("test_blog.Blog") self.assertIsInstance(blog._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_big_data.state_forwards("fill_big_data", new_state) fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state) def test_autofield__bigautofield_foreignfield_growth(self): """A field may be migrated from AutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.AutoField, models.BigAutoField, 2 ** 33, ) def test_smallfield_autofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to AutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.AutoField, 2 ** 22, ) def test_smallfield_bigautofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.BigAutoField, 2 ** 33, ) def test_run_python_noop(self): """ #24098 - Tests no-op RunPython operations. """ project_state = ProjectState() new_state = project_state.clone() operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) operation.database_backwards("test_runpython", editor, new_state, project_state) def test_separate_database_and_state(self): """ Tests the SeparateDatabaseAndState operation. """ project_state = self.set_up_test_model("test_separatedatabaseandstate") # Create the operation database_operation = migrations.RunSQL( "CREATE TABLE i_love_ponies (id int, special_thing int);", "DROP TABLE i_love_ponies;" ) state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))]) operation = migrations.SeparateDatabaseAndState( state_operations=[state_operation], database_operations=[database_operation] ) self.assertEqual(operation.describe(), "Custom state/database change combination") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_separatedatabaseandstate", new_state) self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "SeparateDatabaseAndState") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"]) def test_separate_database_and_state2(self): """ A complex SeparateDatabaseAndState operation: Multiple operations both for state and database. Verify the state dependencies within each list and that state ops don't affect the database. """ app_label = "test_separatedatabaseandstate2" project_state = self.set_up_test_model(app_label) # Create the operation database_operations = [ migrations.CreateModel( "ILovePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveponies"}, ), migrations.CreateModel( "ILoveMorePonies", # We use IntegerField and not AutoField because # the model is going to be deleted immediately # and with an AutoField this fails on Oracle [("id", models.IntegerField(primary_key=True))], options={"db_table": "ilovemoreponies"}, ), migrations.DeleteModel("ILoveMorePonies"), migrations.CreateModel( "ILoveEvenMorePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveevenmoreponies"}, ), ] state_operations = [ migrations.CreateModel( "SomethingElse", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingelse"}, ), migrations.DeleteModel("SomethingElse"), migrations.CreateModel( "SomethingCompletelyDifferent", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingcompletelydifferent"}, ), ] operation = migrations.SeparateDatabaseAndState( state_operations=state_operations, database_operations=database_operations, ) # Test the state alteration new_state = project_state.clone() operation.state_forwards(app_label, new_state) def assertModelsAndTables(after_db): # Tables and models exist, or don't, as they should: self.assertNotIn((app_label, "somethingelse"), new_state.models) self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1) self.assertNotIn((app_label, "iloveponiesonies"), new_state.models) self.assertNotIn((app_label, "ilovemoreponies"), new_state.models) self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models) self.assertTableNotExists("somethingelse") self.assertTableNotExists("somethingcompletelydifferent") self.assertTableNotExists("ilovemoreponies") if after_db: self.assertTableExists("iloveponies") self.assertTableExists("iloveevenmoreponies") else: self.assertTableNotExists("iloveponies") self.assertTableNotExists("iloveevenmoreponies") assertModelsAndTables(after_db=False) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertModelsAndTables(after_db=True) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertModelsAndTables(after_db=False) class SwappableOperationTests(OperationTestBase): """ Key operations ignore swappable models (we don't want to replicate all of them here, as the functionality is in a common base class anyway) """ available_apps = ['migrations'] @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_create_ignore_swapped(self): """ The CreateTable operation ignores swapped models. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], options={ "swappable": "TEST_SWAP_MODEL", }, ) # Test the state alteration (it should still be there!) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crigsw", new_state) self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crigsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crigsw", editor, project_state, new_state) self.assertTableNotExists("test_crigsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crigsw", editor, new_state, project_state) self.assertTableNotExists("test_crigsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_delete_ignore_swapped(self): """ Tests the DeleteModel operation ignores swapped models. """ operation = migrations.DeleteModel("Pony") project_state, new_state = self.make_test_state("test_dligsw", operation) # Test the database alteration self.assertTableNotExists("test_dligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dligsw", editor, project_state, new_state) self.assertTableNotExists("test_dligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dligsw", editor, new_state, project_state) self.assertTableNotExists("test_dligsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_add_field_ignore_swapped(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) project_state, new_state = self.make_test_state("test_adfligsw", operation) # Test the database alteration self.assertTableNotExists("test_adfligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_adfligsw", editor, project_state, new_state) self.assertTableNotExists("test_adfligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfligsw", editor, new_state, project_state) self.assertTableNotExists("test_adfligsw_pony") @override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel') def test_indexes_ignore_swapped(self): """ Add/RemoveIndex operations ignore swapped models. """ operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state('test_adinigsw', operation) with connection.schema_editor() as editor: # No database queries should be run for swapped models operation.database_forwards('test_adinigsw', editor, project_state, new_state) operation.database_backwards('test_adinigsw', editor, new_state, project_state) operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state("test_rminigsw", operation) with connection.schema_editor() as editor: operation.database_forwards('test_rminigsw', editor, project_state, new_state) operation.database_backwards('test_rminigsw', editor, new_state, project_state) class TestCreateModel(SimpleTestCase): def test_references_model_mixin(self): migrations.CreateModel( 'name', fields=[], bases=(Mixin, models.Model), ).references_model('other_model', 'migrations') class FieldOperationTests(SimpleTestCase): def test_references_model(self): operation = FieldOperation('MoDel', 'field', models.ForeignKey('Other', models.CASCADE)) # Model name match. self.assertIs(operation.references_model('mOdEl', 'migrations'), True) # Referenced field. self.assertIs(operation.references_model('oTher', 'migrations'), True) # Doesn't reference. self.assertIs(operation.references_model('Whatever', 'migrations'), False) def test_references_field_by_name(self): operation = FieldOperation('MoDel', 'field', models.BooleanField(default=False)) self.assertIs(operation.references_field('model', 'field', 'migrations'), True) def test_references_field_by_remote_field_model(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE)) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_from_fields(self): operation = FieldOperation( 'Model', 'field', models.fields.related.ForeignObject('Other', models.CASCADE, ['from'], ['to']) ) self.assertIs(operation.references_field('Model', 'from', 'migrations'), True) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) self.assertIs(operation.references_field('Other', 'from', 'migrations'), False) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) def test_references_field_by_to_fields(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE, to_field='field')) self.assertIs(operation.references_field('Other', 'field', 'migrations'), True) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_through(self): operation = FieldOperation('Model', 'field', models.ManyToManyField('Other', through='Through')) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_reference_field_by_through_fields(self): operation = FieldOperation( 'Model', 'field', models.ManyToManyField('Other', through='Through', through_fields=('first', 'second')) ) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Through', 'first', 'migrations'), True) self.assertIs(operation.references_field('Through', 'second', 'migrations'), True)
832d196892fe10aaafd4c224b73d7e7b3d184590ca2eebc3608ec090d1de1a4b
import functools import re from unittest import mock from django.apps import apps from django.conf import settings from django.contrib.auth.models import AbstractBaseUser from django.core.validators import RegexValidator, validate_slug from django.db import connection, migrations, models from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.graph import MigrationGraph from django.db.migrations.loader import MigrationLoader from django.db.migrations.questioner import MigrationQuestioner from django.db.migrations.state import ModelState, ProjectState from django.test import SimpleTestCase, TestCase, override_settings from django.test.utils import isolate_lru_cache from .models import FoodManager, FoodQuerySet class DeconstructibleObject: """ A custom deconstructible object. """ def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs def deconstruct(self): return ( self.__module__ + '.' + self.__class__.__name__, self.args, self.kwargs ) class AutodetectorTests(TestCase): """ Tests the migration autodetector. """ author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_name = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) author_name_null = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, null=True)), ]) author_name_longer = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400)), ]) author_name_renamed = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("names", models.CharField(max_length=200)), ]) author_name_default = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default='Ada Lovelace')), ]) author_name_check_constraint = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}, ) author_dates_of_birth_auto_now = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now=True)), ("date_time_of_birth", models.DateTimeField(auto_now=True)), ("time_of_birth", models.TimeField(auto_now=True)), ]) author_dates_of_birth_auto_now_add = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now_add=True)), ("date_time_of_birth", models.DateTimeField(auto_now_add=True)), ("time_of_birth", models.TimeField(auto_now_add=True)), ]) author_name_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_4 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_list_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 999])), ]) author_name_deconstructible_tuple_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 999))), ]) author_name_deconstructible_dict_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 999 })), ]) author_name_nested_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2-changed'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_extra_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), None, a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c-changed')), ))), ]) author_name_nested_deconstructible_extra_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), c=None, ))), ]) author_custom_pk = ModelState("testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]) author_with_biography_non_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField()), ("biography", models.TextField()), ]) author_with_biography_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(blank=True)), ("biography", models.TextField(blank=True)), ]) author_with_book = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_book_order_wrt = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ], options={"order_with_respect_to": "book"}) author_renamed_with_book = ModelState("testapp", "Writer", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_publisher_string = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher_name", models.CharField(max_length=200)), ]) author_with_publisher = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) author_with_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("auth.User", models.CASCADE)), ]) author_with_custom_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)), ]) author_proxy = ModelState("testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_options = ModelState("testapp", "AuthorProxy", [], { "proxy": True, "verbose_name": "Super Author", }, ("testapp.author",)) author_proxy_notproxy = ModelState("testapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_third = ModelState("thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_third_notproxy = ModelState("thirdapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_proxy = ModelState("testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy",)) author_unmanaged = ModelState("testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author",)) author_unmanaged_managed = ModelState("testapp", "AuthorUnmanaged", [], {}, ("testapp.author",)) author_unmanaged_default_pk = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_unmanaged_custom_pk = ModelState("testapp", "Author", [ ("pk_field", models.IntegerField(primary_key=True)), ]) author_with_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher")), ]) author_with_m2m_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", blank=True)), ]) author_with_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Contract")), ]) author_with_renamed_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Deal")), ]) author_with_former_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.CharField(max_length=100)), ]) author_with_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], { "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) author_with_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_with_new_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_two"}) author_renamed_with_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_renamed_with_new_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_three"}) contract = ModelState("testapp", "Contract", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) contract_renamed = ModelState("testapp", "Deal", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) publisher = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ]) publisher_with_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_aardvark_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_book = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Book", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) other_pony = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ]) other_pony_food = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ], managers=[ ('food_qs', FoodQuerySet.as_manager()), ('food_mgr', FoodManager('a', 'b')), ('food_mgr_kwargs', FoodManager('x', 'y', 3, 4)), ]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) book = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)), ]) book_migrations_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_no_author_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.IntegerField()), ("title", models.CharField(max_length=200)), ]) book_with_no_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=200)), ]) book_with_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_field_and_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("writer", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author")), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors_through_attribution = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author", through="otherapp.Attribution")), ("title", models.CharField(max_length=200)), ]) book_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["author", "title"], name="book_title_author_idx")], }) book_unordered_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["title", "author"], name="book_author_title_idx")], }) book_foo_together = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("author", "title")}, "unique_together": {("author", "title")}, }) book_foo_together_2 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) book_foo_together_3 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield")}, "unique_together": {("title", "newfield")}, }) book_foo_together_4 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield2", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield2")}, "unique_together": {("title", "newfield2")}, }) attribution = ModelState("otherapp", "Attribution", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) edition = ModelState("thirdapp", "Edition", [ ("id", models.AutoField(primary_key=True)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) custom_user = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ], bases=(AbstractBaseUser,)) custom_user_no_inherit = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ]) aardvark = ModelState("thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_testapp = ModelState("testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_based_on_author = ModelState("testapp", "Aardvark", [], bases=("testapp.Author",)) aardvark_pk_fk_author = ModelState("testapp", "Aardvark", [ ("id", models.OneToOneField("testapp.Author", models.CASCADE, primary_key=True)), ]) knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))]) rabbit = ModelState("eggs", "Rabbit", [ ("id", models.AutoField(primary_key=True)), ("knight", models.ForeignKey("eggs.Knight", models.CASCADE)), ("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)), ], { "unique_together": {("parent", "knight")}, "indexes": [models.Index(fields=["parent", "knight"], name='rabbit_circular_fk_index')], }) def repr_changes(self, changes, include_dependencies=False): output = "" for app_label, migrations_ in sorted(changes.items()): output += " %s:\n" % app_label for migration in migrations_: output += " %s\n" % migration.name for operation in migration.operations: output += " %s\n" % operation if include_dependencies: output += " Dependencies:\n" if migration.dependencies: for dep in migration.dependencies: output += " %s\n" % (dep,) else: output += " None\n" return output def assertNumberMigrations(self, changes, app_label, number): if len(changes.get(app_label, [])) != number: self.fail("Incorrect number of migrations (%s) for %s (expected %s)\n%s" % ( len(changes.get(app_label, [])), app_label, number, self.repr_changes(changes), )) def assertMigrationDependencies(self, changes, app_label, position, dependencies): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if set(migration.dependencies) != set(dependencies): self.fail("Migration dependencies mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, dependencies, self.repr_changes(changes, include_dependencies=True), )) def assertOperationTypes(self, changes, app_label, position, types): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] real_types = [operation.__class__.__name__ for operation in migration.operations] if types != real_types: self.fail("Operation type mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, types, self.repr_changes(changes), )) def assertOperationAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] for attr, value in attrs.items(): if getattr(operation, attr, None) != value: self.fail("Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(operation, attr, None), self.repr_changes(changes), )) def assertOperationFieldAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] if not hasattr(operation, 'field'): self.fail("No field attribute for %s.%s op #%s." % ( app_label, migration.name, operation_position, )) field = operation.field for attr, value in attrs.items(): if getattr(field, attr, None) != value: self.fail("Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(field, attr, None), self.repr_changes(changes), )) def make_project_state(self, model_states): "Shortcut to make ProjectStates from lists of predefined models" project_state = ProjectState() for model_state in model_states: project_state.add_model(model_state.clone()) return project_state def get_changes(self, before_states, after_states, questioner=None): if not isinstance(before_states, ProjectState): before_states = self.make_project_state(before_states) if not isinstance(after_states, ProjectState): after_states = self.make_project_state(after_states) return MigrationAutodetector( before_states, after_states, questioner, )._detect_changes() def test_arrange_for_graph(self): """Tests auto-naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("otherapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([self.publisher, self.other_pony]) after = self.make_project_state([ self.author_empty, self.publisher, self.other_pony, self.other_stable, ]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph changes = autodetector.arrange_for_graph(changes, graph) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_author") self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, '0002_stable') self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_arrange_for_graph_with_multiple_initial(self): # Make a fake graph. graph = MigrationGraph() # Use project state to make a new migration change set. before = self.make_project_state([]) after = self.make_project_state([self.author_with_book, self.book, self.attribution]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({'ask_initial': True})) changes = autodetector._detect_changes() changes = autodetector.arrange_for_graph(changes, graph) self.assertEqual(changes['otherapp'][0].name, '0001_initial') self.assertEqual(changes['otherapp'][0].dependencies, []) self.assertEqual(changes['otherapp'][1].name, '0002_initial') self.assertCountEqual( changes['otherapp'][1].dependencies, [('testapp', '0001_initial'), ('otherapp', '0001_initial')], ) self.assertEqual(changes['testapp'][0].name, '0001_initial') self.assertEqual(changes['testapp'][0].dependencies, [('otherapp', '0001_initial')]) def test_trim_apps(self): """ Trim does not remove dependencies but does remove unwanted apps. """ # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable, self.third_thing]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_initial": True})) changes = autodetector._detect_changes() # Run through arrange_for_graph graph = MigrationGraph() changes = autodetector.arrange_for_graph(changes, graph) changes["testapp"][0].dependencies.append(("otherapp", "0001_initial")) changes = autodetector._trim_to_apps(changes, {"testapp"}) # Make sure there's the right set of migrations self.assertEqual(changes["testapp"][0].name, "0001_initial") self.assertEqual(changes["otherapp"][0].name, "0001_initial") self.assertNotIn("thirdapp", changes) def test_custom_migration_name(self): """Tests custom naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph migration_name = 'custom_name' changes = autodetector.arrange_for_graph(changes, graph, migration_name) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name) self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name) self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_new_model(self): """Tests autodetection of new models.""" changes = self.get_changes([], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) def test_old_model(self): """Tests deletion of old models.""" changes = self.get_changes([self.author_empty], []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") def test_add_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_empty], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_not_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition') def test_add_date_fields_with_auto_now_add_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) self.assertEqual(mocked_ask_method.call_count, 3) def test_remove_field(self): """Tests autodetection of removed fields.""" changes = self.get_changes([self.author_name], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") def test_alter_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_name], [self.author_name_longer]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) def test_supports_functools_partial(self): def _content_file_name(instance, filename, key, **kwargs): return '{}/{}'.format(instance, filename) def content_file_name(key, **kwargs): return functools.partial(_content_file_name, key, **kwargs) # An unchanged partial reference. before = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] after = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] changes = self.get_changes(before, after) self.assertNumberMigrations(changes, 'testapp', 0) # A changed partial reference. args_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('other-file'))), ])] changes = self.get_changes(before, args_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) # Can't use assertOperationFieldAttributes because we need the # deconstructed version, i.e., the exploded func/args/keywords rather # than the partial: we don't care if it's not the same instance of the # partial, only if it's the same source function, args, and keywords. value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('other-file',), {}), (value.func, value.args, value.keywords) ) kwargs_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file', spam='eggs'))), ])] changes = self.get_changes(before, kwargs_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('file',), {'spam': 'eggs'}), (value.func, value.args, value.keywords) ) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', side_effect=AssertionError("Should not have prompted for not null addition")) def test_alter_field_to_not_null_with_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default='Ada Lovelace') @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value=models.NOT_PROVIDED, ) def test_alter_field_to_not_null_without_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default=models.NOT_PROVIDED) @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value='Some Name', ) def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=False) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default="Some Name") def test_rename_field(self): """Tests autodetection of renamed fields.""" changes = self.get_changes( [self.author_name], [self.author_name_renamed], MigrationQuestioner({"ask_rename": True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="name", new_name="names") def test_rename_field_foreign_key_to_field(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='field')), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='renamed_field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) # Right number/type of migrations? self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField']) self.assertOperationAttributes(changes, 'app', 0, 0, old_name='field', new_name='renamed_field') def test_rename_foreign_object_fields(self): fields = ('first', 'second') renamed_fields = ('first_renamed', 'second_renamed') before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=fields, )), ]), ] # Case 1: to_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ], options={'unique_together': {renamed_fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=renamed_fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField', 'AlterUniqueTogether']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='foo', old_name='second', new_name='second_renamed', ) # Case 2: from_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=renamed_fields, to_fields=fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='bar', old_name='second', new_name='second_renamed', ) def test_rename_referenced_primary_key(self): before = [ ModelState('app', 'Foo', [ ('id', models.CharField(primary_key=True, serialize=False)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] after = [ ModelState('app', 'Foo', [ ('renamed_id', models.CharField(primary_key=True, serialize=False)) ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField']) self.assertOperationAttributes(changes, 'app', 0, 0, old_name='id', new_name='renamed_id') def test_rename_field_preserved_db_column(self): """ RenameField is used if a field is renamed and db_column equal to the old field's column is added. """ before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField()), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(db_column='field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['AlterField', 'RenameField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', name='field', ) self.assertEqual(changes['app'][0].operations[0].field.deconstruct(), ( 'field', 'django.db.models.IntegerField', [], {'db_column': 'field'}, )) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='foo', old_name='field', new_name='renamed_field', ) def test_rename_related_field_preserved_db_column(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('renamed_foo', models.ForeignKey('app.Foo', models.CASCADE, db_column='foo_id')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['AlterField', 'RenameField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', name='foo', ) self.assertEqual(changes['app'][0].operations[0].field.deconstruct(), ( 'foo', 'django.db.models.ForeignKey', [], {'to': 'app.foo', 'on_delete': models.CASCADE, 'db_column': 'foo_id'}, )) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='bar', old_name='foo', new_name='renamed_foo', ) def test_rename_model(self): """Tests autodetection of renamed models.""" changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_author_renamed], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Now that RenameModel handles related fields too, there should be # no AlterField for the related field. self.assertNumberMigrations(changes, 'otherapp', 0) def test_rename_model_case(self): """ Model name is case-insensitive. Changing case doesn't lead to any autodetected operations. """ author_renamed = ModelState('testapp', 'author', [ ('id', models.AutoField(primary_key=True)), ]) changes = self.get_changes( [self.author_empty, self.book], [author_renamed, self.book], questioner=MigrationQuestioner({'ask_rename_model': True}), ) self.assertNumberMigrations(changes, 'testapp', 0) self.assertNumberMigrations(changes, 'otherapp', 0) def test_renamed_referenced_m2m_model_case(self): publisher_renamed = ModelState('testapp', 'publisher', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ]) changes = self.get_changes( [self.publisher, self.author_with_m2m], [publisher_renamed, self.author_with_m2m], questioner=MigrationQuestioner({'ask_rename_model': True}), ) self.assertNumberMigrations(changes, 'testapp', 0) self.assertNumberMigrations(changes, 'otherapp', 0) def test_rename_m2m_through_model(self): """ Tests autodetection of renamed models that are used in M2M relations as through models. """ changes = self.get_changes( [self.author_with_m2m_through, self.publisher, self.contract], [self.author_with_renamed_m2m_through, self.publisher, self.contract_renamed], MigrationQuestioner({'ask_rename_model': True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='Contract', new_name='Deal') def test_rename_model_with_renamed_rel_field(self): """ Tests autodetection of renamed models while simultaneously renaming one of the fields that relate to the renamed model. """ changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_field_and_author_renamed], MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Right number/type of migrations for related field rename? # Alter is already taken care of. self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, old_name="author", new_name="writer") def test_rename_model_with_fks_in_different_position(self): """ #24537 - The order of fields in a model does not influence the RenameModel detection. """ before = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "EntityB", [ ("id", models.AutoField(primary_key=True)), ("some_label", models.CharField(max_length=255)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ]), ] after = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "RenamedEntityB", [ ("id", models.AutoField(primary_key=True)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ("some_label", models.CharField(max_length=255)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({"ask_rename_model": True})) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB") def test_rename_model_reverse_relation_dependencies(self): """ The migration to rename a model pointed to by a foreign key in another app must run after the other app's migration that adds the foreign key with model's original name. Therefore, the renaming migration has a dependency on that other migration. """ before = [ ModelState('testapp', 'EntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.EntityA', models.CASCADE)), ]), ] after = [ ModelState('testapp', 'RenamedEntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.RenamedEntityA', models.CASCADE)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename_model': True})) self.assertNumberMigrations(changes, 'testapp', 1) self.assertMigrationDependencies(changes, 'testapp', 0, [('otherapp', '__first__')]) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='EntityA', new_name='RenamedEntityA') def test_fk_dependency(self): """Having a ForeignKey automatically adds a dependency.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (author), # thirdapp (edition) depends on otherapp (book) changes = self.get_changes([], [self.author_name, self.book, self.edition]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("testapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="Edition") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("otherapp", "auto_1")]) def test_proxy_fk_dependency(self): """FK dependencies still work on proxy models.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (authorproxy) changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("thirdapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="AuthorProxy") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("testapp", "auto_1")]) def test_same_app_no_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_circular_fk_dependency(self): """ Having a circular ForeignKey dependency automatically resolves the situation into 2 migrations on one side and 1 on the other. """ changes = self.get_changes([], [self.author_with_book, self.book, self.publisher_with_book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 2) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'otherapp', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'otherapp', 0, []) self.assertMigrationDependencies(changes, 'otherapp', 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]) # both split migrations should be `initial` self.assertTrue(changes['otherapp'][0].initial) self.assertTrue(changes['otherapp'][1].initial) def test_same_app_circular_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher_with_author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self): """ #22275 - A migration with circular FK dependency does not try to create unique together constraint and indexes before creating all required fields first. """ changes = self.get_changes([], [self.knight, self.rabbit]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'eggs', 1) self.assertOperationTypes( changes, 'eggs', 0, ["CreateModel", "CreateModel", "AddIndex", "AlterUniqueTogether"] ) self.assertNotIn("unique_together", changes['eggs'][0].operations[0].options) self.assertNotIn("unique_together", changes['eggs'][0].operations[1].options) self.assertMigrationDependencies(changes, 'eggs', 0, []) def test_alter_db_table_add(self): """Tests detection for adding db_table in model's options.""" changes = self.get_changes([self.author_empty], [self.author_with_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_one") def test_alter_db_table_change(self): """Tests detection for changing db_table in model's options'.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_with_new_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_two") def test_alter_db_table_remove(self): """Tests detection for removing db_table in model's options.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table=None) def test_alter_db_table_no_changes(self): """ Alter_db_table doesn't generate a migration if no changes have been made. """ changes = self.get_changes([self.author_with_db_table_options], [self.author_with_db_table_options]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_keep_db_table_with_model_change(self): """ Tests when model changes but db_table stays as-is, autodetector must not create more than one operation. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") def test_alter_db_table_with_model_change(self): """ Tests when model and db_table changes, autodetector must create two operations. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_new_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel", "AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") self.assertOperationAttributes(changes, "testapp", 0, 1, name="newauthor", table="author_three") def test_identical_regex_doesnt_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 0) def test_different_regex_does_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[a-z]+\\Z', 32), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterField"]) def test_empty_foo_together(self): """ #23452 - Empty unique/index_together shouldn't generate a migration. """ # Explicitly testing for not specified, since this is the case after # a CreateModel operation w/o any definition on the original model model_state_not_specified = ModelState("a", "model", [("id", models.AutoField(primary_key=True))]) # Explicitly testing for None, since this was the issue in #23452 after # an AlterFooTogether operation with e.g. () as value model_state_none = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": None, "unique_together": None, }) # Explicitly testing for the empty set, since we now always have sets. # During removal (('col1', 'col2'),) --> () this becomes set([]) model_state_empty = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": set(), "unique_together": set(), }) def test(from_state, to_state, msg): changes = self.get_changes([from_state], [to_state]) if changes: ops = ', '.join(o.__class__.__name__ for o in changes['a'][0].operations) self.fail('Created operation(s) %s from %s' % (ops, msg)) tests = ( (model_state_not_specified, model_state_not_specified, '"not specified" to "not specified"'), (model_state_not_specified, model_state_none, '"not specified" to "None"'), (model_state_not_specified, model_state_empty, '"not specified" to "empty"'), (model_state_none, model_state_not_specified, '"None" to "not specified"'), (model_state_none, model_state_none, '"None" to "None"'), (model_state_none, model_state_empty, '"None" to "empty"'), (model_state_empty, model_state_not_specified, '"empty" to "not specified"'), (model_state_empty, model_state_none, '"empty" to "None"'), (model_state_empty, model_state_empty, '"empty" to "empty"'), ) for t in tests: test(*t) def test_create_model_with_indexes(self): """Test creation of new model with indexes already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'indexes': [models.Index(fields=['name'], name='create_model_with_indexes_idx')]}) changes = self.get_changes([], [author]) added_index = models.Index(fields=['name'], name='create_model_with_indexes_idx') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', index=added_index) def test_add_indexes(self): """Test change detection of new indexes.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_indexes]) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AddIndex']) added_index = models.Index(fields=['author', 'title'], name='book_title_author_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', index=added_index) def test_remove_indexes(self): """Test change detection of removed indexes.""" changes = self.get_changes([self.author_empty, self.book_indexes], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') def test_order_fields_indexes(self): """Test change detection of reordering of fields in indexes.""" changes = self.get_changes( [self.author_empty, self.book_indexes], [self.author_empty, self.book_unordered_indexes] ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') added_index = models.Index(fields=['title', 'author'], name='book_author_title_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='book', index=added_index) def test_create_model_with_check_constraint(self): """Test creation of new model with constraints already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}) changes = self.get_changes([], [author]) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddConstraint']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', constraint=added_constraint) def test_add_constraints(self): """Test change detection of new constraints.""" changes = self.get_changes([self.author_name], [self.author_name_check_constraint]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AddConstraint']) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', constraint=added_constraint) def test_remove_constraints(self): """Test change detection of removed constraints.""" changes = self.get_changes([self.author_name_check_constraint], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RemoveConstraint']) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', name='name_contains_bob') def test_add_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) def test_remove_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book_foo_together], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) def test_foo_together_remove_fk(self): """Tests unique_together and field removal detection & ordering""" changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_with_no_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, [ "AlterUniqueTogether", "AlterIndexTogether", "RemoveField" ]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="author") def test_foo_together_no_changes(self): """ index/unique_together doesn't generate a migration if no changes have been made. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together] ) # Right number of migrations? self.assertEqual(len(changes), 0) def test_foo_together_ordering(self): """ index/unique_together also triggers on ordering changes. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together_2] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, [ 'AlterUniqueTogether', 'AlterIndexTogether', ]) self.assertOperationAttributes( changes, 'otherapp', 0, 0, name='book', unique_together={('title', 'author')}, ) self.assertOperationAttributes( changes, 'otherapp', 0, 1, name='book', index_together={('title', 'author')}, ) def test_add_field_and_foo_together(self): """ Added fields will be created before using them in index/unique_together. """ changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together_3]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AddField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={("title", "newfield")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield")}) def test_create_model_and_unique_together(self): author = ModelState("otherapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) book_with_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) changes = self.get_changes([self.book_with_no_author], [author, book_with_author]) # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 4) # Right actions order? self.assertOperationTypes( changes, 'otherapp', 0, ['CreateModel', 'AddField', 'AlterUniqueTogether', 'AlterIndexTogether'] ) def test_remove_field_and_foo_together(self): """ Removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, [ 'AlterUniqueTogether', 'AlterIndexTogether', 'RemoveField', ]) self.assertOperationAttributes( changes, 'otherapp', 0, 0, name='book', unique_together={('author', 'title')}, ) self.assertOperationAttributes( changes, 'otherapp', 0, 1, name='book', index_together={('author', 'title')}, ) self.assertOperationAttributes( changes, 'otherapp', 0, 2, model_name='book', name='newfield', ) def test_alter_field_and_foo_together(self): """Fields are altered after deleting some index/unique_together.""" initial_author = ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ('age', models.IntegerField(db_index=True)), ], { 'unique_together': {('name',)}, }) author_reversed_constraints = ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200, unique=True)), ('age', models.IntegerField()), ], { 'index_together': {('age',)}, }) changes = self.get_changes([initial_author], [author_reversed_constraints]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, [ 'AlterUniqueTogether', 'AlterField', 'AlterField', 'AlterIndexTogether', ]) self.assertOperationAttributes( changes, 'testapp', 0, 0, name='author', unique_together=set(), ) self.assertOperationAttributes( changes, 'testapp', 0, 1, model_name='author', name='age', ) self.assertOperationAttributes( changes, 'testapp', 0, 2, model_name='author', name='name', ) self.assertOperationAttributes( changes, 'testapp', 0, 3, name='author', index_together={('age',)}, ) def test_partly_alter_foo_together(self): initial_author = ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ('age', models.IntegerField()), ], { 'unique_together': {('name',), ('age',)}, 'index_together': {('name',)}, }) author_reversed_constraints = ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ('age', models.IntegerField()), ], { 'unique_together': {('age',)}, 'index_together': {('name',), ('age',)}, }) changes = self.get_changes([initial_author], [author_reversed_constraints]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, [ 'AlterUniqueTogether', 'AlterIndexTogether', ]) self.assertOperationAttributes( changes, 'testapp', 0, 0, name='author', unique_together={('age',)}, ) self.assertOperationAttributes( changes, 'testapp', 0, 1, name='author', index_together={('name',), ('age',)}, ) def test_rename_field_and_foo_together(self): """Fields are renamed before updating index/unique_together.""" changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together_4], MigrationQuestioner({"ask_rename": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, [ 'RenameField', 'AlterUniqueTogether', 'AlterIndexTogether', ]) self.assertOperationAttributes( changes, 'otherapp', 0, 1, name='book', unique_together={('title', 'newfield2')}, ) self.assertOperationAttributes( changes, 'otherapp', 0, 2, name='book', index_together={('title', 'newfield2')}, ) def test_proxy(self): """The autodetector correctly deals with proxy models.""" # First, we test adding a proxy model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"]) self.assertOperationAttributes( changes, "testapp", 0, 0, name="AuthorProxy", options={"proxy": True, "indexes": [], "constraints": []} ) # Now, we test turning a proxy model into a non-proxy model # It should delete the proxy then make the real one changes = self.get_changes( [self.author_empty, self.author_proxy], [self.author_empty, self.author_proxy_notproxy] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy") self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy", options={}) def test_proxy_non_model_parent(self): class Mixin: pass author_proxy_non_model_parent = ModelState( 'testapp', 'AuthorProxy', [], {'proxy': True}, (Mixin, 'testapp.author'), ) changes = self.get_changes( [self.author_empty], [self.author_empty, author_proxy_non_model_parent], ) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['CreateModel']) self.assertOperationAttributes( changes, 'testapp', 0, 0, name='AuthorProxy', options={'proxy': True, 'indexes': [], 'constraints': []}, bases=(Mixin, 'testapp.author'), ) def test_proxy_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on proxy models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # The model the FK is pointing from and to. self.assertEqual( changes['otherapp'][0].operations[0].fields[2][1].remote_field.model, 'thirdapp.AuthorProxy', ) # Now, we test the custom pk field name changes = self.get_changes([], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]) # The model the FK is pointing from and to. self.assertEqual( changes['otherapp'][0].operations[0].fields[2][1].remote_field.model, 'thirdapp.AuthorProxy', ) def test_proxy_to_mti_with_fk_to_proxy(self): # First, test the pk table and field name. to_state = self.make_project_state( [self.author_empty, self.author_proxy_third, self.book_proxy_fk], ) changes = self.get_changes([], to_state) fk_field = changes['otherapp'][0].operations[0].fields[2][1] self.assertEqual( to_state.get_concrete_model_key(fk_field.remote_field.model), ('testapp', 'author'), ) self.assertEqual(fk_field.remote_field.model, 'thirdapp.AuthorProxy') # Change AuthorProxy to use MTI. from_state = to_state.clone() to_state = self.make_project_state( [self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk], ) changes = self.get_changes(from_state, to_state) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AuthorProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on thirdapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('thirdapp', 'auto_1')]) # Now, test the pk table and field name. fk_field = changes['otherapp'][0].operations[0].field self.assertEqual( to_state.get_concrete_model_key(fk_field.remote_field.model), ('thirdapp', 'authorproxy'), ) self.assertEqual(fk_field.remote_field.model, 'thirdapp.AuthorProxy') def test_proxy_to_mti_with_fk_to_proxy_proxy(self): # First, test the pk table and field name. to_state = self.make_project_state([ self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk, ]) changes = self.get_changes([], to_state) fk_field = changes['otherapp'][0].operations[0].fields[1][1] self.assertEqual( to_state.get_concrete_model_key(fk_field.remote_field.model), ('testapp', 'author'), ) self.assertEqual(fk_field.remote_field.model, 'testapp.AAuthorProxyProxy') # Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy, # a proxy of AuthorProxy. from_state = to_state.clone() to_state = self.make_project_state([ self.author_empty, self.author_proxy_notproxy, self.author_proxy_proxy, self.book_proxy_proxy_fk, ]) changes = self.get_changes(from_state, to_state) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AAuthorProxyProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on testapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', 'auto_1')]) # Now, test the pk table and field name. fk_field = changes['otherapp'][0].operations[0].field self.assertEqual( to_state.get_concrete_model_key(fk_field.remote_field.model), ('testapp', 'authorproxy'), ) self.assertEqual(fk_field.remote_field.model, 'testapp.AAuthorProxyProxy') def test_unmanaged_create(self): """The autodetector correctly deals with managed models.""" # First, we test adding an unmanaged model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_unmanaged]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="AuthorUnmanaged", options={"managed": False}) def test_unmanaged_delete(self): changes = self.get_changes([self.author_empty, self.author_unmanaged], [self.author_empty]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel']) def test_unmanaged_to_managed(self): # Now, we test turning an unmanaged model into a managed model changes = self.get_changes( [self.author_empty, self.author_unmanaged], [self.author_empty, self.author_unmanaged_managed] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="authorunmanaged", options={}) def test_managed_to_unmanaged(self): # Now, we turn managed to unmanaged. changes = self.get_changes( [self.author_empty, self.author_unmanaged_managed], [self.author_empty, self.author_unmanaged] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}) def test_unmanaged_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on unmanaged models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book]) # The model the FK on the book model points to. fk_field = changes['otherapp'][0].operations[0].fields[2][1] self.assertEqual(fk_field.remote_field.model, 'testapp.Author') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book]) # The model the FK on the book model points to. fk_field = changes['otherapp'][0].operations[0].fields[2][1] self.assertEqual(fk_field.remote_field.model, 'testapp.Author') @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable(self): with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([self.custom_user], [self.custom_user, self.author_with_custom_user]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("__setting__", "AUTH_USER_MODEL")]) def test_swappable_lowercase(self): model_state = ModelState('testapp', 'Document', [ ('id', models.AutoField(primary_key=True)), ('owner', models.ForeignKey( settings.AUTH_USER_MODEL.lower(), models.CASCADE, )), ]) with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([], [model_state]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['CreateModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Document') self.assertMigrationDependencies( changes, 'testapp', 0, [('__setting__', 'AUTH_USER_MODEL')], ) def test_swappable_changed(self): with isolate_lru_cache(apps.get_swappable_settings_name): before = self.make_project_state([self.custom_user, self.author_with_user]) with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"): after = self.make_project_state([self.custom_user, self.author_with_custom_user]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name='user') fk_field = changes['testapp'][0].operations[0].field self.assertEqual(fk_field.remote_field.model, 'thirdapp.CustomUser') def test_add_field_with_default(self): """#22030 - Adding a field with a default should work.""" changes = self.get_changes([self.author_empty], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="name") def test_custom_deconstructible(self): """ Two instances which deconstruct to the same value aren't considered a change. """ changes = self.get_changes([self.author_name_deconstructible_1], [self.author_name_deconstructible_2]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_deconstruct_field_kwarg(self): """Field instances are handled correctly by nested deconstruction.""" changes = self.get_changes([self.author_name_deconstructible_3], [self.author_name_deconstructible_4]) self.assertEqual(changes, {}) def test_deconstructible_list(self): """Nested deconstruction descends into lists.""" # When lists contain items that deconstruct to identical values, those lists # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed lists should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_tuple(self): """Nested deconstruction descends into tuples.""" # When tuples contain items that deconstruct to identical values, those tuples # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed tuples should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_dict(self): """Nested deconstruction descends into dict values.""" # When dicts contain items whose values deconstruct to identical values, # those dicts should be considered equal for the purpose of detecting # state changes (even if the original values are unequal). changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed dicts should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_3] ) self.assertEqual(len(changes), 1) def test_nested_deconstructible_objects(self): """ Nested deconstruction is applied recursively to the args/kwargs of deconstructed objects. """ # If the items within a deconstructed object's args/kwargs have the same # deconstructed values - whether or not the items themselves are different # instances - then the object as a whole is regarded as unchanged. changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_2] ) self.assertEqual(changes, {}) # Differences that exist solely within the args list of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_arg] ) self.assertEqual(len(changes), 1) # Additional args should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_arg] ) self.assertEqual(len(changes), 1) # Differences that exist solely within the kwargs dict of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_kwarg] ) self.assertEqual(len(changes), 1) # Additional kwargs should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_kwarg] ) self.assertEqual(len(changes), 1) def test_deconstruct_type(self): """ #22951 -- Uninstantiated classes with deconstruct are correctly returned by deep_deconstruct during serialization. """ author = ModelState( "testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField( max_length=200, # IntegerField intentionally not instantiated. default=models.IntegerField, )) ], ) changes = self.get_changes([], [author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) def test_replace_string_with_foreignkey(self): """ #22300 - Adding an FK in the same "spot" as a deleted CharField should work. """ changes = self.get_changes([self.author_with_publisher_string], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publisher_name") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publisher") def test_foreign_key_removed_before_target_model(self): """ Removing an FK and the model it targets in the same change must remove the FK field before the model to maintain consistency. """ changes = self.get_changes( [self.author_with_publisher, self.publisher], [self.author_name] ) # removes both the model and FK # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Publisher") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_many_to_many(self, mocked_ask_method): """#22435 - Adding a ManyToManyField should not prompt for a default.""" changes = self.get_changes([self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_alter_many_to_many(self): changes = self.get_changes( [self.author_with_m2m, self.publisher], [self.author_with_m2m_blank, self.publisher] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_create_with_through_model(self): """ Adding a m2m with a through model and the models that use it should be ordered correctly. """ changes = self.get_changes([], [self.author_with_m2m_through, self.publisher, self.contract]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ 'CreateModel', 'CreateModel', 'CreateModel', 'AddField', ]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Contract') self.assertOperationAttributes(changes, 'testapp', 0, 3, model_name='author', name='publishers') def test_many_to_many_removed_before_through_model(self): """ Removing a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.book_with_no_author, self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') def test_many_to_many_removed_before_through_model_2(self): """ Removing a model that contains a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 2, name='Book') def test_m2m_w_through_multistep_remove(self): """ A model with a m2m field that specifies a "through" model cannot be removed in the same migration as that through model as the schema will pass through an inconsistent state. The autodetector should produce two migrations to avoid this issue. """ changes = self.get_changes([self.author_with_m2m_through, self.publisher, self.contract], [self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ "RemoveField", "RemoveField", "DeleteModel", "DeleteModel" ]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 1, name="publisher", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 3, name="Contract") def test_concrete_field_changed_to_many_to_many(self): """ #23938 - Changing a concrete field into a ManyToManyField first removes the concrete field and then adds the m2m field. """ changes = self.get_changes([self.author_with_former_m2m], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publishers", model_name='author') def test_many_to_many_changed_to_concrete_field(self): """ #23938 - Changing a ManyToManyField into a concrete field first removes the m2m field and then adds the concrete field. """ changes = self.get_changes([self.author_with_m2m, self.publisher], [self.author_with_former_m2m]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "AddField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Publisher') self.assertOperationFieldAttributes(changes, 'testapp', 0, 1, max_length=100) def test_non_circular_foreignkey_dependency_removal(self): """ If two models with a ForeignKey from one to the other are removed at the same time, the autodetector should remove them in the correct order. """ changes = self.get_changes([self.author_with_publisher, self.publisher_with_author], []) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='publisher') self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher") def test_alter_model_options(self): """Changing a model's options should make a change.""" changes = self.get_changes([self.author_empty], [self.author_with_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, options={ "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) # Changing them back to empty should also make a change changes = self.get_changes([self.author_with_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", options={}) def test_alter_model_options_proxy(self): """Changing a proxy model's options should also make a change.""" changes = self.get_changes( [self.author_proxy, self.author_empty], [self.author_proxy_options, self.author_empty] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorproxy", options={ "verbose_name": "Super Author" }) def test_set_alter_order_with_respect_to(self): """Setting order_with_respect_to adds a field.""" changes = self.get_changes([self.book, self.author_with_book], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to="book") def test_add_alter_order_with_respect_to(self): """ Setting order_with_respect_to when adding the FK too does things in the right order. """ changes = self.get_changes([self.author_name], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name="book") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author", order_with_respect_to="book") def test_remove_alter_order_with_respect_to(self): """ Removing order_with_respect_to when removing the FK too does things in the right order. """ changes = self.get_changes([self.book, self.author_with_book_order_wrt], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo", "RemoveField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to=None) self.assertOperationAttributes(changes, 'testapp', 0, 1, model_name="author", name="book") def test_add_model_order_with_respect_to(self): """ Setting order_with_respect_to when adding the whole model does things in the right order. """ changes = self.get_changes([], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes( changes, 'testapp', 0, 0, name="Author", options={'order_with_respect_to': 'book'} ) self.assertNotIn("_order", [name for name, field in changes['testapp'][0].operations[0].fields]) def test_add_model_order_with_respect_to_index_foo_together(self): changes = self.get_changes([], [ self.book, ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ('book', models.ForeignKey('otherapp.Book', models.CASCADE)), ], options={ 'order_with_respect_to': 'book', 'index_together': {('name', '_order')}, 'unique_together': {('id', '_order')}, }), ]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['CreateModel']) self.assertOperationAttributes( changes, 'testapp', 0, 0, name='Author', options={ 'order_with_respect_to': 'book', 'index_together': {('name', '_order')}, 'unique_together': {('id', '_order')}, }, ) def test_add_model_order_with_respect_to_index_constraint(self): tests = [ ( 'AddIndex', {'indexes': [ models.Index(fields=['_order'], name='book_order_idx'), ]}, ), ( 'AddConstraint', {'constraints': [ models.CheckConstraint( check=models.Q(_order__gt=1), name='book_order_gt_1', ), ]}, ), ] for operation, extra_option in tests: with self.subTest(operation=operation): after = ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ('book', models.ForeignKey('otherapp.Book', models.CASCADE)), ], options={ 'order_with_respect_to': 'book', **extra_option, }) changes = self.get_changes([], [self.book, after]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, [ 'CreateModel', operation, ]) self.assertOperationAttributes( changes, 'testapp', 0, 0, name='Author', options={'order_with_respect_to': 'book'}, ) def test_set_alter_order_with_respect_to_index_constraint_foo_together(self): tests = [ ( 'AddIndex', {'indexes': [ models.Index(fields=['_order'], name='book_order_idx'), ]}, ), ( 'AddConstraint', {'constraints': [ models.CheckConstraint( check=models.Q(_order__gt=1), name='book_order_gt_1', ), ]}, ), ('AlterIndexTogether', {'index_together': {('name', '_order')}}), ('AlterUniqueTogether', {'unique_together': {('id', '_order')}}), ] for operation, extra_option in tests: with self.subTest(operation=operation): after = ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ('book', models.ForeignKey('otherapp.Book', models.CASCADE)), ], options={ 'order_with_respect_to': 'book', **extra_option, }) changes = self.get_changes( [self.book, self.author_with_book], [self.book, after], ) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, [ 'AlterOrderWithRespectTo', operation, ]) def test_alter_model_managers(self): """ Changing the model managers adds a new operation. """ changes = self.get_changes([self.other_pony], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["AlterModelManagers"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) self.assertEqual(changes['otherapp'][0].operations[0].managers[1][1].args, ('a', 'b', 1, 2)) self.assertEqual(changes['otherapp'][0].operations[0].managers[2][1].args, ('x', 'y', 3, 4)) def test_swappable_first_inheritance(self): """Swappable models get their CreateModel first.""" changes = self.get_changes([], [self.custom_user, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_default_related_name_option(self): model_state = ModelState('app', 'model', [ ('id', models.AutoField(primary_key=True)), ], options={'default_related_name': 'related_name'}) changes = self.get_changes([], [model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['CreateModel']) self.assertOperationAttributes( changes, 'app', 0, 0, name='model', options={'default_related_name': 'related_name'}, ) altered_model_state = ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ]) changes = self.get_changes([model_state], [altered_model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['AlterModelOptions']) self.assertOperationAttributes(changes, 'app', 0, 0, name='model', options={}) @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable_first_setting(self): """Swappable models get their CreateModel first.""" with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_bases_first(self): """Bases of other models come first.""" changes = self.get_changes([], [self.aardvark_based_on_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_bases_first_mixed_case_app_label(self): app_label = 'MiXedCaseApp' changes = self.get_changes([], [ ModelState(app_label, 'owner', [ ('id', models.AutoField(primary_key=True)), ]), ModelState(app_label, 'place', [ ('id', models.AutoField(primary_key=True)), ('owner', models.ForeignKey('MiXedCaseApp.owner', models.CASCADE)), ]), ModelState(app_label, 'restaurant', [], bases=('MiXedCaseApp.place',)), ]) self.assertNumberMigrations(changes, app_label, 1) self.assertOperationTypes(changes, app_label, 0, [ 'CreateModel', 'CreateModel', 'CreateModel', ]) self.assertOperationAttributes(changes, app_label, 0, 0, name='owner') self.assertOperationAttributes(changes, app_label, 0, 1, name='place') self.assertOperationAttributes(changes, app_label, 0, 2, name='restaurant') def test_multiple_bases(self): """#23956 - Inheriting models doesn't move *_ptr fields into AddField operations.""" A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))]) B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))]) C = ModelState("app", "C", [], bases=("app.A", "app.B")) D = ModelState("app", "D", [], bases=("app.A", "app.B")) E = ModelState("app", "E", [], bases=("app.A", "app.B")) changes = self.get_changes([], [A, B, C, D, E]) # Right number/type of migrations? self.assertNumberMigrations(changes, "app", 1) self.assertOperationTypes(changes, "app", 0, [ "CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel" ]) self.assertOperationAttributes(changes, "app", 0, 0, name="A") self.assertOperationAttributes(changes, "app", 0, 1, name="B") self.assertOperationAttributes(changes, "app", 0, 2, name="C") self.assertOperationAttributes(changes, "app", 0, 3, name="D") self.assertOperationAttributes(changes, "app", 0, 4, name="E") def test_proxy_bases_first(self): """Bases of proxies come first.""" changes = self.get_changes([], [self.author_empty, self.author_proxy, self.author_proxy_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="AuthorProxy") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="AAuthorProxyProxy") def test_pk_fk_included(self): """ A relation used as the primary key is kept as part of CreateModel. """ changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_first_dependency(self): """ A dependency to an app with no migrations uses __first__. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = {'migrations'} autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "__first__")]) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_last_dependency(self): """ A dependency to an app with existing migrations uses the last migration of that app. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = {'migrations'} autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "0002_second")]) def test_alter_fk_before_model_deletion(self): """ ForeignKeys are altered _before_ the model they used to refer to are deleted. """ changes = self.get_changes( [self.author_name, self.publisher_with_author], [self.aardvark_testapp, self.publisher_with_aardvark_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "AlterField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Aardvark") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="Author") def test_fk_dependency_other_app(self): """ #23100 - ForeignKeys correctly depend on other apps' models. """ changes = self.get_changes([self.author_name, self.book], [self.author_with_book, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="book") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "__first__")]) def test_alter_field_to_fk_dependency_other_app(self): changes = self.get_changes( [self.author_empty, self.book_with_no_author_fk], [self.author_empty, self.book], ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', '__first__')]) def test_circular_dependency_mixed_addcreate(self): """ #23315 - The dependency resolver knows to put all CreateModel before AddField and not become unsolvable. """ address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)), ]) person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ]) apackage = ModelState("b", "APackage", [ ("id", models.AutoField(primary_key=True)), ("person", models.ForeignKey("a.Person", models.CASCADE)), ]) country = ModelState("b", "DeliveryCountry", [ ("id", models.AutoField(primary_key=True)), ]) changes = self.get_changes([], [address, person, apackage, country]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel", "CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertOperationTypes(changes, 'b', 0, ["CreateModel", "CreateModel"]) @override_settings(AUTH_USER_MODEL="a.Tenant") def test_circular_dependency_swappable(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): tenant = ModelState("a", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("b.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) address = ModelState("b", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('a', 'auto_1'), ('b', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('__setting__', 'AUTH_USER_MODEL')]) @override_settings(AUTH_USER_MODEL="b.Tenant") def test_circular_dependency_swappable2(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models but with the swappable not being the first migrated model. """ with isolate_lru_cache(apps.get_swappable_settings_name): address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) tenant = ModelState("b", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("a.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('__setting__', 'AUTH_USER_MODEL'), ('a', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('a', 'auto_1')]) @override_settings(AUTH_USER_MODEL="a.Person") def test_circular_dependency_swappable_self(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ("parent1", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE, related_name='children')) ]) changes = self.get_changes([], [person]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'a', 0, []) @override_settings(AUTH_USER_MODEL='a.User') def test_swappable_circular_multi_mti(self): with isolate_lru_cache(apps.get_swappable_settings_name): parent = ModelState('a', 'Parent', [ ('user', models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)) ]) child = ModelState('a', 'Child', [], bases=('a.Parent',)) user = ModelState('a', 'User', [], bases=(AbstractBaseUser, 'a.Child')) changes = self.get_changes([], [parent, child, user]) self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ['CreateModel', 'CreateModel', 'CreateModel', 'AddField']) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and blank `CharField` or `TextField` without default should not prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_blank]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition') def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and non-blank `CharField` or `TextField` without default should prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_non_blank]) self.assertEqual(mocked_ask_method.call_count, 2) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) def test_mti_inheritance_model_removal(self): Animal = ModelState('app', 'Animal', [ ("id", models.AutoField(primary_key=True)), ]) Dog = ModelState('app', 'Dog', [], bases=('app.Animal',)) changes = self.get_changes([Animal, Dog], [Animal]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['DeleteModel']) self.assertOperationAttributes(changes, 'app', 0, 0, name='Dog') def test_add_model_with_field_removed_from_base_model(self): """ Removing a base field takes place before adding a new inherited model that has a field with the same name. """ before = [ ModelState('app', 'readable', [ ('id', models.AutoField(primary_key=True)), ('title', models.CharField(max_length=200)), ]), ] after = [ ModelState('app', 'readable', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'book', [ ('title', models.CharField(max_length=200)), ], bases=('app.readable',)), ] changes = self.get_changes(before, after) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RemoveField', 'CreateModel']) self.assertOperationAttributes(changes, 'app', 0, 0, name='title', model_name='readable') self.assertOperationAttributes(changes, 'app', 0, 1, name='book') def test_parse_number(self): tests = [ ('no_number', None), ('0001_initial', 1), ('0002_model3', 2), ('0002_auto_20380101_1112', 2), ('0002_squashed_0003', 3), ('0002_model2_squashed_0003_other4', 3), ('0002_squashed_0003_squashed_0004', 4), ('0002_model2_squashed_0003_other4_squashed_0005_other6', 5), ('0002_custom_name_20380101_1112_squashed_0003_model', 3), ('2_squashed_4', 4), ] for migration_name, expected_number in tests: with self.subTest(migration_name=migration_name): self.assertEqual( MigrationAutodetector.parse_number(migration_name), expected_number, ) def test_add_custom_fk_with_hardcoded_to(self): class HardcodedForeignKey(models.ForeignKey): def __init__(self, *args, **kwargs): kwargs['to'] = 'testapp.Author' super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['to'] return name, path, args, kwargs book_hardcoded_fk_to = ModelState('testapp', 'Book', [ ('author', HardcodedForeignKey(on_delete=models.CASCADE)), ]) changes = self.get_changes( [self.author_empty], [self.author_empty, book_hardcoded_fk_to], ) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['CreateModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Book') class MigrationSuggestNameTests(SimpleTestCase): def test_no_operations(self): class Migration(migrations.Migration): operations = [] migration = Migration('some_migration', 'test_app') self.assertIs(migration.suggest_name().startswith('auto_'), True) def test_no_operations_initial(self): class Migration(migrations.Migration): initial = True operations = [] migration = Migration('some_migration', 'test_app') self.assertEqual(migration.suggest_name(), 'initial') def test_single_operation(self): class Migration(migrations.Migration): operations = [migrations.CreateModel('Person', fields=[])] migration = Migration('0001_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'person') class Migration(migrations.Migration): operations = [migrations.DeleteModel('Person')] migration = Migration('0002_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'delete_person') def test_single_operation_long_name(self): class Migration(migrations.Migration): operations = [migrations.CreateModel('A' * 53, fields=[])] migration = Migration('some_migration', 'test_app') self.assertEqual(migration.suggest_name(), 'a' * 53) def test_two_operations(self): class Migration(migrations.Migration): operations = [ migrations.CreateModel('Person', fields=[]), migrations.DeleteModel('Animal'), ] migration = Migration('some_migration', 'test_app') self.assertEqual(migration.suggest_name(), 'person_delete_animal') def test_two_create_models(self): class Migration(migrations.Migration): operations = [ migrations.CreateModel('Person', fields=[]), migrations.CreateModel('Animal', fields=[]), ] migration = Migration('0001_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'person_animal') def test_two_create_models_with_initial_true(self): class Migration(migrations.Migration): initial = True operations = [ migrations.CreateModel('Person', fields=[]), migrations.CreateModel('Animal', fields=[]), ] migration = Migration('0001_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'initial') def test_many_operations_suffix(self): class Migration(migrations.Migration): operations = [ migrations.CreateModel('Person1', fields=[]), migrations.CreateModel('Person2', fields=[]), migrations.CreateModel('Person3', fields=[]), migrations.DeleteModel('Person4'), migrations.DeleteModel('Person5'), ] migration = Migration('some_migration', 'test_app') self.assertEqual( migration.suggest_name(), 'person1_person2_person3_delete_person4_and_more', ) def test_operation_with_no_suggested_name(self): class Migration(migrations.Migration): operations = [ migrations.CreateModel('Person', fields=[]), migrations.RunSQL('SELECT 1 FROM person;'), ] migration = Migration('some_migration', 'test_app') self.assertIs(migration.suggest_name().startswith('auto_'), True) def test_none_name(self): class Migration(migrations.Migration): operations = [migrations.RunSQL('SELECT 1 FROM person;')] migration = Migration('0001_initial', 'test_app') suggest_name = migration.suggest_name() self.assertIs(suggest_name.startswith('auto_'), True) def test_none_name_with_initial_true(self): class Migration(migrations.Migration): initial = True operations = [migrations.RunSQL('SELECT 1 FROM person;')] migration = Migration('0001_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'initial') def test_auto(self): migration = migrations.Migration('0001_initial', 'test_app') suggest_name = migration.suggest_name() self.assertIs(suggest_name.startswith('auto_'), True)
9bd6a4f2c98eca17f51f987799dce33ed43ae914c2d640e4a1fe25bde0a282be
from django.apps.registry import Apps from django.contrib.contenttypes.fields import GenericForeignKey from django.db import models from django.db.migrations.exceptions import InvalidBasesError from django.db.migrations.operations import ( AddField, AlterField, DeleteModel, RemoveField, ) from django.db.migrations.state import ( ModelState, ProjectState, get_related_models_recursive, ) from django.test import SimpleTestCase, override_settings from django.test.utils import isolate_apps from .models import ( FoodManager, FoodQuerySet, ModelWithCustomBase, NoMigrationFoodManager, UnicodeModel, ) class StateTests(SimpleTestCase): """ Tests state construction, rendering and modification by operations. """ def test_create(self): """ Tests making a ProjectState from an Apps """ new_apps = Apps(["migrations"]) class Author(models.Model): name = models.CharField(max_length=255) bio = models.TextField() age = models.IntegerField(blank=True, null=True) class Meta: app_label = "migrations" apps = new_apps unique_together = ["name", "bio"] index_together = ["bio", "age"] class AuthorProxy(Author): class Meta: app_label = "migrations" apps = new_apps proxy = True ordering = ["name"] class SubAuthor(Author): width = models.FloatField(null=True) class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): title = models.CharField(max_length=1000) author = models.ForeignKey(Author, models.CASCADE) contributors = models.ManyToManyField(Author) class Meta: app_label = "migrations" apps = new_apps verbose_name = "tome" db_table = "test_tome" indexes = [models.Index(fields=['title'])] class Food(models.Model): food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() food_no_mgr = NoMigrationFoodManager('x', 'y') class Meta: app_label = "migrations" apps = new_apps class FoodNoManagers(models.Model): class Meta: app_label = "migrations" apps = new_apps class FoodNoDefaultManager(models.Model): food_no_mgr = NoMigrationFoodManager('x', 'y') food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() class Meta: app_label = "migrations" apps = new_apps mgr1 = FoodManager('a', 'b') mgr2 = FoodManager('x', 'y', c=3, d=4) class FoodOrderedManagers(models.Model): # The managers on this model should be ordered by their creation # counter and not by the order in model body food_no_mgr = NoMigrationFoodManager('x', 'y') food_mgr2 = mgr2 food_mgr1 = mgr1 class Meta: app_label = "migrations" apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] author_proxy_state = project_state.models['migrations', 'authorproxy'] sub_author_state = project_state.models['migrations', 'subauthor'] book_state = project_state.models['migrations', 'book'] food_state = project_state.models['migrations', 'food'] food_no_managers_state = project_state.models['migrations', 'foodnomanagers'] food_no_default_manager_state = project_state.models['migrations', 'foodnodefaultmanager'] food_order_manager_state = project_state.models['migrations', 'foodorderedmanagers'] book_index = models.Index(fields=['title']) book_index.set_name_with_model(Book) self.assertEqual(author_state.app_label, "migrations") self.assertEqual(author_state.name, "Author") self.assertEqual(list(author_state.fields), ["id", "name", "bio", "age"]) self.assertEqual(author_state.fields['name'].max_length, 255) self.assertIs(author_state.fields['bio'].null, False) self.assertIs(author_state.fields['age'].null, True) self.assertEqual( author_state.options, { "unique_together": {("name", "bio")}, "index_together": {("bio", "age")}, "indexes": [], "constraints": [], } ) self.assertEqual(author_state.bases, (models.Model,)) self.assertEqual(book_state.app_label, "migrations") self.assertEqual(book_state.name, "Book") self.assertEqual(list(book_state.fields), ["id", "title", "author", "contributors"]) self.assertEqual(book_state.fields['title'].max_length, 1000) self.assertIs(book_state.fields['author'].null, False) self.assertEqual(book_state.fields['contributors'].__class__.__name__, 'ManyToManyField') self.assertEqual( book_state.options, {"verbose_name": "tome", "db_table": "test_tome", "indexes": [book_index], "constraints": []}, ) self.assertEqual(book_state.bases, (models.Model,)) self.assertEqual(author_proxy_state.app_label, "migrations") self.assertEqual(author_proxy_state.name, "AuthorProxy") self.assertEqual(author_proxy_state.fields, {}) self.assertEqual( author_proxy_state.options, {"proxy": True, "ordering": ["name"], "indexes": [], "constraints": []}, ) self.assertEqual(author_proxy_state.bases, ("migrations.author",)) self.assertEqual(sub_author_state.app_label, "migrations") self.assertEqual(sub_author_state.name, "SubAuthor") self.assertEqual(len(sub_author_state.fields), 2) self.assertEqual(sub_author_state.bases, ("migrations.author",)) # The default manager is used in migrations self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr']) self.assertTrue(all(isinstance(name, str) for name, mgr in food_state.managers)) self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2)) # No explicit managers defined. Migrations will fall back to the default self.assertEqual(food_no_managers_state.managers, []) # food_mgr is used in migration but isn't the default mgr, hence add the # default self.assertEqual([name for name, mgr in food_no_default_manager_state.managers], ['food_no_mgr', 'food_mgr']) self.assertTrue(all(isinstance(name, str) for name, mgr in food_no_default_manager_state.managers)) self.assertEqual(food_no_default_manager_state.managers[0][1].__class__, models.Manager) self.assertIsInstance(food_no_default_manager_state.managers[1][1], FoodManager) self.assertEqual([name for name, mgr in food_order_manager_state.managers], ['food_mgr1', 'food_mgr2']) self.assertTrue(all(isinstance(name, str) for name, mgr in food_order_manager_state.managers)) self.assertEqual([mgr.args for name, mgr in food_order_manager_state.managers], [('a', 'b', 1, 2), ('x', 'y', 3, 4)]) def test_custom_default_manager_added_to_the_model_state(self): """ When the default manager of the model is a custom manager, it needs to be added to the model state. """ new_apps = Apps(['migrations']) custom_manager = models.Manager() class Author(models.Model): objects = models.TextField() authors = custom_manager class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.managers, [('authors', custom_manager)]) def test_custom_default_manager_named_objects_with_false_migration_flag(self): """ When a manager is added with a name of 'objects' but it does not have `use_in_migrations = True`, no migration should be added to the model state (#26643). """ new_apps = Apps(['migrations']) class Author(models.Model): objects = models.Manager() class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.managers, []) def test_no_duplicate_managers(self): """ When a manager is added with `use_in_migrations = True` and a parent model had a manager with the same name and `use_in_migrations = True`, the parent's manager shouldn't appear in the model state (#26881). """ new_apps = Apps(['migrations']) class PersonManager(models.Manager): use_in_migrations = True class Person(models.Model): objects = PersonManager() class Meta: abstract = True class BossManager(PersonManager): use_in_migrations = True class Boss(Person): objects = BossManager() class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) boss_state = project_state.models['migrations', 'boss'] self.assertEqual(boss_state.managers, [('objects', Boss.objects)]) def test_custom_default_manager(self): new_apps = Apps(['migrations']) class Author(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps default_manager_name = 'manager2' project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.options['default_manager_name'], 'manager2') self.assertEqual(author_state.managers, [('manager2', Author.manager1)]) def test_custom_base_manager(self): new_apps = Apps(['migrations']) class Author(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps base_manager_name = 'manager2' class Author2(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps base_manager_name = 'manager1' project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.options['base_manager_name'], 'manager2') self.assertEqual(author_state.managers, [ ('manager1', Author.manager1), ('manager2', Author.manager2), ]) author2_state = project_state.models['migrations', 'author2'] self.assertEqual(author2_state.options['base_manager_name'], 'manager1') self.assertEqual(author2_state.managers, [ ('manager1', Author2.manager1), ]) def test_apps_bulk_update(self): """ StateApps.bulk_update() should update apps.ready to False and reset the value afterward. """ project_state = ProjectState() apps = project_state.apps with apps.bulk_update(): self.assertFalse(apps.ready) self.assertTrue(apps.ready) with self.assertRaises(ValueError): with apps.bulk_update(): self.assertFalse(apps.ready) raise ValueError() self.assertTrue(apps.ready) def test_render(self): """ Tests rendering a ProjectState into an Apps. """ project_state = ProjectState() project_state.add_model(ModelState( app_label="migrations", name="Tag", fields=[ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ("hidden", models.BooleanField()), ], )) project_state.add_model(ModelState( app_label="migrations", name="SubTag", fields=[ ('tag_ptr', models.OneToOneField( 'migrations.Tag', models.CASCADE, auto_created=True, parent_link=True, primary_key=True, to_field='id', serialize=False, )), ("awesome", models.BooleanField()), ], bases=("migrations.Tag",), )) base_mgr = models.Manager() mgr1 = FoodManager('a', 'b') mgr2 = FoodManager('x', 'y', c=3, d=4) project_state.add_model(ModelState( app_label="migrations", name="Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ # The ordering we really want is objects, mgr1, mgr2 ('default', base_mgr), ('food_mgr2', mgr2), ('food_mgr1', mgr1), ] )) new_apps = project_state.apps self.assertEqual(new_apps.get_model("migrations", "Tag")._meta.get_field("name").max_length, 100) self.assertIs(new_apps.get_model("migrations", "Tag")._meta.get_field("hidden").null, False) self.assertEqual(len(new_apps.get_model("migrations", "SubTag")._meta.local_fields), 2) Food = new_apps.get_model("migrations", "Food") self.assertEqual([mgr.name for mgr in Food._meta.managers], ['default', 'food_mgr1', 'food_mgr2']) self.assertTrue(all(isinstance(mgr.name, str) for mgr in Food._meta.managers)) self.assertEqual([mgr.__class__ for mgr in Food._meta.managers], [models.Manager, FoodManager, FoodManager]) def test_render_model_inheritance(self): class Book(models.Model): title = models.CharField(max_length=1000) class Meta: app_label = "migrations" apps = Apps() class Novel(Book): class Meta: app_label = "migrations" apps = Apps() # First, test rendering individually apps = Apps(["migrations"]) # We shouldn't be able to render yet ms = ModelState.from_model(Novel) with self.assertRaises(InvalidBasesError): ms.render(apps) # Once the parent model is in the app registry, it should be fine ModelState.from_model(Book).render(apps) ModelState.from_model(Novel).render(apps) def test_render_model_with_multiple_inheritance(self): class Foo(models.Model): class Meta: app_label = "migrations" apps = Apps() class Bar(models.Model): class Meta: app_label = "migrations" apps = Apps() class FooBar(Foo, Bar): class Meta: app_label = "migrations" apps = Apps() class AbstractSubFooBar(FooBar): class Meta: abstract = True apps = Apps() class SubFooBar(AbstractSubFooBar): class Meta: app_label = "migrations" apps = Apps() apps = Apps(["migrations"]) # We shouldn't be able to render yet ms = ModelState.from_model(FooBar) with self.assertRaises(InvalidBasesError): ms.render(apps) # Once the parent models are in the app registry, it should be fine ModelState.from_model(Foo).render(apps) self.assertSequenceEqual(ModelState.from_model(Foo).bases, [models.Model]) ModelState.from_model(Bar).render(apps) self.assertSequenceEqual(ModelState.from_model(Bar).bases, [models.Model]) ModelState.from_model(FooBar).render(apps) self.assertSequenceEqual(ModelState.from_model(FooBar).bases, ['migrations.foo', 'migrations.bar']) ModelState.from_model(SubFooBar).render(apps) self.assertSequenceEqual(ModelState.from_model(SubFooBar).bases, ['migrations.foobar']) def test_render_project_dependencies(self): """ The ProjectState render method correctly renders models to account for inter-model base dependencies. """ new_apps = Apps() class A(models.Model): class Meta: app_label = "migrations" apps = new_apps class B(A): class Meta: app_label = "migrations" apps = new_apps class C(B): class Meta: app_label = "migrations" apps = new_apps class D(A): class Meta: app_label = "migrations" apps = new_apps class E(B): class Meta: app_label = "migrations" apps = new_apps proxy = True class F(D): class Meta: app_label = "migrations" apps = new_apps proxy = True # Make a ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.add_model(ModelState.from_model(D)) project_state.add_model(ModelState.from_model(E)) project_state.add_model(ModelState.from_model(F)) final_apps = project_state.apps self.assertEqual(len(final_apps.get_models()), 6) # Now make an invalid ProjectState and make sure it fails project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.add_model(ModelState.from_model(F)) with self.assertRaises(InvalidBasesError): project_state.apps def test_render_unique_app_labels(self): """ The ProjectState render method doesn't raise an ImproperlyConfigured exception about unique labels if two dotted app names have the same last part. """ class A(models.Model): class Meta: app_label = "django.contrib.auth" class B(models.Model): class Meta: app_label = "vendor.auth" # Make a ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) self.assertEqual(len(project_state.apps.get_models()), 2) def test_reload_related_model_on_non_relational_fields(self): """ The model is reloaded even on changes that are not involved in relations. Other models pointing to or from it are also reloaded. """ project_state = ProjectState() project_state.apps # Render project state. project_state.add_model(ModelState('migrations', 'A', [])) project_state.add_model(ModelState('migrations', 'B', [ ('a', models.ForeignKey('A', models.CASCADE)), ])) project_state.add_model(ModelState('migrations', 'C', [ ('b', models.ForeignKey('B', models.CASCADE)), ('name', models.TextField()), ])) project_state.add_model(ModelState('migrations', 'D', [ ('a', models.ForeignKey('A', models.CASCADE)), ])) operation = AlterField( model_name='C', name='name', field=models.TextField(blank=True), ) operation.state_forwards('migrations', project_state) project_state.reload_model('migrations', 'a', delay=True) A = project_state.apps.get_model('migrations.A') B = project_state.apps.get_model('migrations.B') D = project_state.apps.get_model('migrations.D') self.assertIs(B._meta.get_field('a').related_model, A) self.assertIs(D._meta.get_field('a').related_model, A) def test_reload_model_relationship_consistency(self): project_state = ProjectState() project_state.add_model(ModelState('migrations', 'A', [])) project_state.add_model(ModelState('migrations', 'B', [ ('a', models.ForeignKey('A', models.CASCADE)), ])) project_state.add_model(ModelState('migrations', 'C', [ ('b', models.ForeignKey('B', models.CASCADE)), ])) A = project_state.apps.get_model('migrations.A') B = project_state.apps.get_model('migrations.B') C = project_state.apps.get_model('migrations.C') self.assertEqual([r.related_model for r in A._meta.related_objects], [B]) self.assertEqual([r.related_model for r in B._meta.related_objects], [C]) self.assertEqual([r.related_model for r in C._meta.related_objects], []) project_state.reload_model('migrations', 'a', delay=True) A = project_state.apps.get_model('migrations.A') B = project_state.apps.get_model('migrations.B') C = project_state.apps.get_model('migrations.C') self.assertEqual([r.related_model for r in A._meta.related_objects], [B]) self.assertEqual([r.related_model for r in B._meta.related_objects], [C]) self.assertEqual([r.related_model for r in C._meta.related_objects], []) def test_add_relations(self): """ #24573 - Adding relations to existing models should reload the referenced models too. """ new_apps = Apps() class A(models.Model): class Meta: app_label = 'something' apps = new_apps class B(A): class Meta: app_label = 'something' apps = new_apps class C(models.Model): class Meta: app_label = 'something' apps = new_apps project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.apps # We need to work with rendered models old_state = project_state.clone() model_a_old = old_state.apps.get_model('something', 'A') model_b_old = old_state.apps.get_model('something', 'B') model_c_old = old_state.apps.get_model('something', 'C') # The relations between the old models are correct self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old) self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old) operation = AddField('c', 'to_a', models.OneToOneField( 'something.A', models.CASCADE, related_name='from_c', )) operation.state_forwards('something', project_state) model_a_new = project_state.apps.get_model('something', 'A') model_b_new = project_state.apps.get_model('something', 'B') model_c_new = project_state.apps.get_model('something', 'C') # All models have changed self.assertIsNot(model_a_old, model_a_new) self.assertIsNot(model_b_old, model_b_new) self.assertIsNot(model_c_old, model_c_new) # The relations between the old models still hold self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old) self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old) # The relations between the new models correct self.assertIs(model_a_new._meta.get_field('b').related_model, model_b_new) self.assertIs(model_b_new._meta.get_field('a_ptr').related_model, model_a_new) self.assertIs(model_a_new._meta.get_field('from_c').related_model, model_c_new) self.assertIs(model_c_new._meta.get_field('to_a').related_model, model_a_new) def test_remove_relations(self): """ #24225 - Relations between models are updated while remaining the relations and references for models of an old state. """ new_apps = Apps() class A(models.Model): class Meta: app_label = "something" apps = new_apps class B(models.Model): to_a = models.ForeignKey(A, models.CASCADE) class Meta: app_label = "something" apps = new_apps def get_model_a(state): return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0] project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1) old_state = project_state.clone() operation = RemoveField("b", "to_a") operation.state_forwards("something", project_state) # Model from old_state still has the relation model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) self.assertEqual(len(model_a_old._meta.related_objects), 1) self.assertEqual(len(model_a_new._meta.related_objects), 0) # Same test for deleted model project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) old_state = project_state.clone() operation = DeleteModel("b") operation.state_forwards("something", project_state) model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) self.assertEqual(len(model_a_old._meta.related_objects), 1) self.assertEqual(len(model_a_new._meta.related_objects), 0) def test_self_relation(self): """ #24513 - Modifying an object pointing to itself would cause it to be rendered twice and thus breaking its related M2M through objects. """ class A(models.Model): to_a = models.ManyToManyField('something.A', symmetrical=False) class Meta: app_label = "something" def get_model_a(state): return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0] project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1) old_state = project_state.clone() operation = AlterField( model_name="a", name="to_a", field=models.ManyToManyField("something.A", symmetrical=False, blank=True) ) # At this point the model would be rendered twice causing its related # M2M through objects to point to an old copy and thus breaking their # attribute lookup. operation.state_forwards("something", project_state) model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) # The old model's _meta is still consistent field_to_a_old = model_a_old._meta.get_field("to_a") self.assertEqual(field_to_a_old.m2m_field_name(), "from_a") self.assertEqual(field_to_a_old.m2m_reverse_field_name(), "to_a") self.assertIs(field_to_a_old.related_model, model_a_old) self.assertIs(field_to_a_old.remote_field.through._meta.get_field('to_a').related_model, model_a_old) self.assertIs(field_to_a_old.remote_field.through._meta.get_field('from_a').related_model, model_a_old) # The new model's _meta is still consistent field_to_a_new = model_a_new._meta.get_field("to_a") self.assertEqual(field_to_a_new.m2m_field_name(), "from_a") self.assertEqual(field_to_a_new.m2m_reverse_field_name(), "to_a") self.assertIs(field_to_a_new.related_model, model_a_new) self.assertIs(field_to_a_new.remote_field.through._meta.get_field('to_a').related_model, model_a_new) self.assertIs(field_to_a_new.remote_field.through._meta.get_field('from_a').related_model, model_a_new) def test_equality(self): """ == and != are implemented correctly. """ # Test two things that should be equal project_state = ProjectState() project_state.add_model(ModelState( "migrations", "Tag", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ("hidden", models.BooleanField()), ], {}, None, )) project_state.apps # Fill the apps cached property other_state = project_state.clone() self.assertEqual(project_state, project_state) self.assertEqual(project_state, other_state) self.assertIs(project_state != project_state, False) self.assertIs(project_state != other_state, False) self.assertNotEqual(project_state.apps, other_state.apps) # Make a very small change (max_len 99) and see if that affects it project_state = ProjectState() project_state.add_model(ModelState( "migrations", "Tag", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=99)), ("hidden", models.BooleanField()), ], {}, None, )) self.assertNotEqual(project_state, other_state) self.assertIs(project_state == other_state, False) def test_dangling_references_throw_error(self): new_apps = Apps() class Author(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Publisher(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) publisher = models.ForeignKey(Publisher, models.CASCADE) class Meta: app_label = "migrations" apps = new_apps class Magazine(models.Model): authors = models.ManyToManyField(Author) class Meta: app_label = "migrations" apps = new_apps # Make a valid ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Publisher)) project_state.add_model(ModelState.from_model(Book)) project_state.add_model(ModelState.from_model(Magazine)) self.assertEqual(len(project_state.apps.get_models()), 4) # now make an invalid one with a ForeignKey project_state = ProjectState() project_state.add_model(ModelState.from_model(Book)) msg = ( "The field migrations.Book.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Book.publisher was declared with a lazy reference " "to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps # And another with ManyToManyField. project_state = ProjectState() project_state.add_model(ModelState.from_model(Magazine)) msg = ( "The field migrations.Magazine.authors was declared with a lazy reference " "to 'migrations.author\', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Magazine_authors.author was declared with a lazy reference " "to \'migrations.author\', but app 'migrations' doesn't provide model 'author'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps # And now with multiple models and multiple fields. project_state.add_model(ModelState.from_model(Book)) msg = ( "The field migrations.Book.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Book.publisher was declared with a lazy reference " "to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'.\n" "The field migrations.Magazine.authors was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Magazine_authors.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps def test_reference_mixed_case_app_label(self): new_apps = Apps() class Author(models.Model): class Meta: app_label = 'MiXedCase_migrations' apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) class Meta: app_label = 'MiXedCase_migrations' apps = new_apps class Magazine(models.Model): authors = models.ManyToManyField(Author) class Meta: app_label = 'MiXedCase_migrations' apps = new_apps project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Book)) project_state.add_model(ModelState.from_model(Magazine)) self.assertEqual(len(project_state.apps.get_models()), 3) def test_real_apps(self): """ Including real apps can resolve dangling FK errors. This test relies on the fact that contenttypes is always loaded. """ new_apps = Apps() class TestModel(models.Model): ct = models.ForeignKey("contenttypes.ContentType", models.CASCADE) class Meta: app_label = "migrations" apps = new_apps # If we just stick it into an empty state it should fail project_state = ProjectState() project_state.add_model(ModelState.from_model(TestModel)) with self.assertRaises(ValueError): project_state.apps # If we include the real app it should succeed project_state = ProjectState(real_apps={'contenttypes'}) project_state.add_model(ModelState.from_model(TestModel)) rendered_state = project_state.apps self.assertEqual( len([x for x in rendered_state.get_models() if x._meta.app_label == "migrations"]), 1, ) def test_real_apps_non_set(self): with self.assertRaises(AssertionError): ProjectState(real_apps=['contenttypes']) def test_ignore_order_wrt(self): """ Makes sure ProjectState doesn't include OrderWrt fields when making from existing models. """ new_apps = Apps() class Author(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) class Meta: app_label = "migrations" apps = new_apps order_with_respect_to = "author" # Make a valid ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Book)) self.assertEqual( list(project_state.models['migrations', 'book'].fields), ["id", "author"], ) def test_modelstate_get_field_order_wrt(self): new_apps = Apps() class Author(models.Model): name = models.TextField() class Meta: app_label = 'migrations' apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) class Meta: app_label = 'migrations' apps = new_apps order_with_respect_to = 'author' model_state = ModelState.from_model(Book) order_wrt_field = model_state.get_field('_order') self.assertIsInstance(order_wrt_field, models.ForeignKey) self.assertEqual(order_wrt_field.related_model, 'migrations.author') def test_modelstate_get_field_no_order_wrt_order_field(self): new_apps = Apps() class HistoricalRecord(models.Model): _order = models.PositiveSmallIntegerField() class Meta: app_label = 'migrations' apps = new_apps model_state = ModelState.from_model(HistoricalRecord) order_field = model_state.get_field('_order') self.assertIsNone(order_field.related_model) self.assertIsInstance(order_field, models.PositiveSmallIntegerField) def test_manager_refer_correct_model_version(self): """ #24147 - Managers refer to the correct version of a historical model """ project_state = ProjectState() project_state.add_model(ModelState( app_label="migrations", name="Tag", fields=[ ("id", models.AutoField(primary_key=True)), ("hidden", models.BooleanField()), ], managers=[ ('food_mgr', FoodManager('a', 'b')), ('food_qs', FoodQuerySet.as_manager()), ] )) old_model = project_state.apps.get_model('migrations', 'tag') new_state = project_state.clone() operation = RemoveField("tag", "hidden") operation.state_forwards("migrations", new_state) new_model = new_state.apps.get_model('migrations', 'tag') self.assertIsNot(old_model, new_model) self.assertIs(old_model, old_model.food_mgr.model) self.assertIs(old_model, old_model.food_qs.model) self.assertIs(new_model, new_model.food_mgr.model) self.assertIs(new_model, new_model.food_qs.model) self.assertIsNot(old_model.food_mgr, new_model.food_mgr) self.assertIsNot(old_model.food_qs, new_model.food_qs) self.assertIsNot(old_model.food_mgr.model, new_model.food_mgr.model) self.assertIsNot(old_model.food_qs.model, new_model.food_qs.model) def test_choices_iterator(self): """ #24483 - ProjectState.from_apps should not destructively consume Field.choices iterators. """ new_apps = Apps(["migrations"]) choices = [('a', 'A'), ('b', 'B')] class Author(models.Model): name = models.CharField(max_length=255) choice = models.CharField(max_length=255, choices=iter(choices)) class Meta: app_label = "migrations" apps = new_apps ProjectState.from_apps(new_apps) choices_field = Author._meta.get_field('choice') self.assertEqual(list(choices_field.choices), choices) class StateRelationsTests(SimpleTestCase): def get_base_project_state(self): new_apps = Apps() class User(models.Model): class Meta: app_label = 'tests' apps = new_apps class Comment(models.Model): text = models.TextField() user = models.ForeignKey(User, models.CASCADE) comments = models.ManyToManyField('self') class Meta: app_label = 'tests' apps = new_apps class Post(models.Model): text = models.TextField() authors = models.ManyToManyField(User) class Meta: app_label = 'tests' apps = new_apps project_state = ProjectState() project_state.add_model(ModelState.from_model(User)) project_state.add_model(ModelState.from_model(Comment)) project_state.add_model(ModelState.from_model(Post)) return project_state def test_relations_population(self): tests = [ ('add_model', [ ModelState( app_label='migrations', name='Tag', fields=[('id', models.AutoField(primary_key=True))], ), ]), ('remove_model', ['tests', 'comment']), ('rename_model', ['tests', 'comment', 'opinion']), ('add_field', [ 'tests', 'post', 'next_post', models.ForeignKey('self', models.CASCADE), True, ]), ('remove_field', ['tests', 'post', 'text']), ('rename_field', ['tests', 'comment', 'user', 'author']), ('alter_field', [ 'tests', 'comment', 'user', models.IntegerField(), True, ]), ] for method, args in tests: with self.subTest(method=method): project_state = self.get_base_project_state() getattr(project_state, method)(*args) # ProjectState's `_relations` are populated on `relations` access. self.assertIsNone(project_state._relations) self.assertEqual(project_state.relations, project_state._relations) self.assertIsNotNone(project_state._relations) def test_add_model(self): project_state = self.get_base_project_state() self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) self.assertEqual( list(project_state.relations['tests', 'comment']), [('tests', 'comment')], ) self.assertNotIn(('tests', 'post'), project_state.relations) def test_add_model_no_relations(self): project_state = ProjectState() project_state.add_model(ModelState( app_label='migrations', name='Tag', fields=[('id', models.AutoField(primary_key=True))], )) self.assertEqual(project_state.relations, {}) def test_add_model_other_app(self): project_state = self.get_base_project_state() self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) project_state.add_model(ModelState( app_label='tests_other', name='comment', fields=[ ('id', models.AutoField(primary_key=True)), ('user', models.ForeignKey('tests.user', models.CASCADE)), ], )) self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post'), ('tests_other', 'comment')], ) def test_remove_model(self): project_state = self.get_base_project_state() self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) self.assertEqual( list(project_state.relations['tests', 'comment']), [('tests', 'comment')], ) project_state.remove_model('tests', 'comment') self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'post')], ) self.assertNotIn(('tests', 'comment'), project_state.relations) project_state.remove_model('tests', 'post') self.assertEqual(project_state.relations, {}) project_state.remove_model('tests', 'user') self.assertEqual(project_state.relations, {}) def test_rename_model(self): project_state = self.get_base_project_state() self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) self.assertEqual( list(project_state.relations['tests', 'comment']), [('tests', 'comment')], ) related_field = project_state.relations['tests', 'user']['tests', 'comment'] project_state.rename_model('tests', 'comment', 'opinion') self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'post'), ('tests', 'opinion')], ) self.assertEqual( list(project_state.relations['tests', 'opinion']), [('tests', 'opinion')], ) self.assertNotIn(('tests', 'comment'), project_state.relations) self.assertEqual( project_state.relations['tests', 'user']['tests', 'opinion'], related_field, ) project_state.rename_model('tests', 'user', 'author') self.assertEqual( list(project_state.relations['tests', 'author']), [('tests', 'post'), ('tests', 'opinion')], ) self.assertNotIn(('tests', 'user'), project_state.relations) def test_rename_model_no_relations(self): project_state = self.get_base_project_state() self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) related_field = project_state.relations['tests', 'user']['tests', 'post'] self.assertNotIn(('tests', 'post'), project_state.relations) # Rename a model without relations. project_state.rename_model('tests', 'post', 'blog') self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'blog')], ) self.assertNotIn(('tests', 'blog'), project_state.relations) self.assertEqual( related_field, project_state.relations['tests', 'user']['tests', 'blog'], ) def test_add_field(self): project_state = self.get_base_project_state() self.assertNotIn(('tests', 'post'), project_state.relations) # Add a self-referential foreign key. new_field = models.ForeignKey('self', models.CASCADE) project_state.add_field( 'tests', 'post', 'next_post', new_field, preserve_default=True, ) self.assertEqual( list(project_state.relations['tests', 'post']), [('tests', 'post')], ) self.assertEqual( project_state.relations['tests', 'post']['tests', 'post'], {'next_post': new_field}, ) # Add a foreign key. new_field = models.ForeignKey('tests.post', models.CASCADE) project_state.add_field( 'tests', 'comment', 'post', new_field, preserve_default=True, ) self.assertEqual( list(project_state.relations['tests', 'post']), [('tests', 'post'), ('tests', 'comment')], ) self.assertEqual( project_state.relations['tests', 'post']['tests', 'comment'], {'post': new_field}, ) def test_add_field_m2m_with_through(self): project_state = self.get_base_project_state() project_state.add_model(ModelState( app_label='tests', name='Tag', fields=[('id', models.AutoField(primary_key=True))], )) project_state.add_model(ModelState( app_label='tests', name='PostTag', fields=[ ('id', models.AutoField(primary_key=True)), ('post', models.ForeignKey('tests.post', models.CASCADE)), ('tag', models.ForeignKey('tests.tag', models.CASCADE)), ], )) self.assertEqual( list(project_state.relations['tests', 'post']), [('tests', 'posttag')], ) self.assertEqual( list(project_state.relations['tests', 'tag']), [('tests', 'posttag')], ) # Add a many-to-many field with the through model. new_field = models.ManyToManyField('tests.tag', through='tests.posttag') project_state.add_field( 'tests', 'post', 'tags', new_field, preserve_default=True, ) self.assertEqual( list(project_state.relations['tests', 'post']), [('tests', 'posttag')], ) self.assertEqual( list(project_state.relations['tests', 'tag']), [('tests', 'posttag'), ('tests', 'post')], ) self.assertEqual( project_state.relations['tests', 'tag']['tests', 'post'], {'tags': new_field}, ) def test_remove_field(self): project_state = self.get_base_project_state() self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) # Remove a many-to-many field. project_state.remove_field('tests', 'post', 'authors') self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment')], ) # Remove a foreign key. project_state.remove_field('tests', 'comment', 'user') self.assertEqual(project_state.relations['tests', 'user'], {}) def test_remove_field_no_relations(self): project_state = self.get_base_project_state() self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) # Remove a non-relation field. project_state.remove_field('tests', 'post', 'text') self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) def test_rename_field(self): project_state = self.get_base_project_state() field = project_state.models['tests', 'comment'].fields['user'] self.assertEqual( project_state.relations['tests', 'user']['tests', 'comment'], {'user': field}, ) project_state.rename_field('tests', 'comment', 'user', 'author') renamed_field = project_state.models['tests', 'comment'].fields['author'] self.assertEqual( project_state.relations['tests', 'user']['tests', 'comment'], {'author': renamed_field}, ) self.assertEqual(field, renamed_field) def test_rename_field_no_relations(self): project_state = self.get_base_project_state() self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) # Rename a non-relation field. project_state.rename_field('tests', 'post', 'text', 'description') self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) def test_alter_field(self): project_state = self.get_base_project_state() self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) # Alter a foreign key to a non-relation field. project_state.alter_field( 'tests', 'comment', 'user', models.IntegerField(), preserve_default=True, ) self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'post')], ) # Alter a non-relation field to a many-to-many field. m2m_field = models.ManyToManyField('tests.user') project_state.alter_field( 'tests', 'comment', 'user', m2m_field, preserve_default=True, ) self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'post'), ('tests', 'comment')], ) self.assertEqual( project_state.relations['tests', 'user']['tests', 'comment'], {'user': m2m_field}, ) def test_alter_field_m2m_to_fk(self): project_state = self.get_base_project_state() project_state.add_model(ModelState( app_label='tests_other', name='user_other', fields=[('id', models.AutoField(primary_key=True))], )) self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) self.assertNotIn(('tests_other', 'user_other'), project_state.relations) # Alter a many-to-many field to a foreign key. foreign_key = models.ForeignKey('tests_other.user_other', models.CASCADE) project_state.alter_field( 'tests', 'post', 'authors', foreign_key, preserve_default=True, ) self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment')], ) self.assertEqual( list(project_state.relations['tests_other', 'user_other']), [('tests', 'post')], ) self.assertEqual( project_state.relations['tests_other', 'user_other']['tests', 'post'], {'authors': foreign_key}, ) def test_many_relations_to_same_model(self): project_state = self.get_base_project_state() new_field = models.ForeignKey('tests.user', models.CASCADE) project_state.add_field( 'tests', 'comment', 'reviewer', new_field, preserve_default=True, ) self.assertEqual( list(project_state.relations['tests', 'user']), [('tests', 'comment'), ('tests', 'post')], ) comment_rels = project_state.relations['tests', 'user']['tests', 'comment'] # Two foreign keys to the same model. self.assertEqual(len(comment_rels), 2) self.assertEqual(comment_rels['reviewer'], new_field) # Rename the second foreign key. project_state.rename_field('tests', 'comment', 'reviewer', 'supervisor') self.assertEqual(len(comment_rels), 2) self.assertEqual(comment_rels['supervisor'], new_field) # Remove the first foreign key. project_state.remove_field('tests', 'comment', 'user') self.assertEqual(comment_rels, {'supervisor': new_field}) class ModelStateTests(SimpleTestCase): def test_custom_model_base(self): state = ModelState.from_model(ModelWithCustomBase) self.assertEqual(state.bases, (models.Model,)) def test_bound_field_sanity_check(self): field = models.CharField(max_length=1) field.model = models.Model with self.assertRaisesMessage(ValueError, 'ModelState.fields cannot be bound to a model - "field" is.'): ModelState('app', 'Model', [('field', field)]) def test_sanity_check_to(self): field = models.ForeignKey(UnicodeModel, models.CASCADE) with self.assertRaisesMessage( ValueError, 'ModelState.fields cannot refer to a model class - "field.to" does. ' 'Use a string reference instead.' ): ModelState('app', 'Model', [('field', field)]) def test_sanity_check_through(self): field = models.ManyToManyField('UnicodeModel') field.remote_field.through = UnicodeModel with self.assertRaisesMessage( ValueError, 'ModelState.fields cannot refer to a model class - "field.through" does. ' 'Use a string reference instead.' ): ModelState('app', 'Model', [('field', field)]) def test_sanity_index_name(self): field = models.IntegerField() options = {'indexes': [models.Index(fields=['field'])]} msg = ( "Indexes passed to ModelState require a name attribute. <Index: " "fields=['field']> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): ModelState('app', 'Model', [('field', field)], options=options) def test_fields_immutability(self): """ Rendering a model state doesn't alter its internal fields. """ apps = Apps() field = models.CharField(max_length=1) state = ModelState('app', 'Model', [('name', field)]) Model = state.render(apps) self.assertNotEqual(Model._meta.get_field('name'), field) def test_repr(self): field = models.CharField(max_length=1) state = ModelState('app', 'Model', [('name', field)], bases=['app.A', 'app.B', 'app.C']) self.assertEqual(repr(state), "<ModelState: 'app.Model'>") project_state = ProjectState() project_state.add_model(state) with self.assertRaisesMessage(InvalidBasesError, "Cannot resolve bases for [<ModelState: 'app.Model'>]"): project_state.apps def test_fields_ordering_equality(self): state = ModelState( 'migrations', 'Tag', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ('hidden', models.BooleanField()), ], ) reordered_state = ModelState( 'migrations', 'Tag', [ ('id', models.AutoField(primary_key=True)), # Purposely re-ordered. ('hidden', models.BooleanField()), ('name', models.CharField(max_length=100)), ], ) self.assertEqual(state, reordered_state) @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_create_swappable(self): """ Tests making a ProjectState from an Apps with a swappable model """ new_apps = Apps(['migrations']) class Author(models.Model): name = models.CharField(max_length=255) bio = models.TextField() age = models.IntegerField(blank=True, null=True) class Meta: app_label = 'migrations' apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' author_state = ModelState.from_model(Author) self.assertEqual(author_state.app_label, 'migrations') self.assertEqual(author_state.name, 'Author') self.assertEqual(list(author_state.fields), ['id', 'name', 'bio', 'age']) self.assertEqual(author_state.fields['name'].max_length, 255) self.assertIs(author_state.fields['bio'].null, False) self.assertIs(author_state.fields['age'].null, True) self.assertEqual(author_state.options, {'swappable': 'TEST_SWAPPABLE_MODEL', 'indexes': [], "constraints": []}) self.assertEqual(author_state.bases, (models.Model,)) self.assertEqual(author_state.managers, []) @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_create_swappable_from_abstract(self): """ A swappable model inheriting from a hierarchy: concrete -> abstract -> concrete. """ new_apps = Apps(['migrations']) class SearchableLocation(models.Model): keywords = models.CharField(max_length=256) class Meta: app_label = 'migrations' apps = new_apps class Station(SearchableLocation): name = models.CharField(max_length=128) class Meta: abstract = True class BusStation(Station): bus_routes = models.CharField(max_length=128) inbound = models.BooleanField(default=False) class Meta(Station.Meta): app_label = 'migrations' apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' station_state = ModelState.from_model(BusStation) self.assertEqual(station_state.app_label, 'migrations') self.assertEqual(station_state.name, 'BusStation') self.assertEqual( list(station_state.fields), ['searchablelocation_ptr', 'name', 'bus_routes', 'inbound'] ) self.assertEqual(station_state.fields['name'].max_length, 128) self.assertIs(station_state.fields['bus_routes'].null, False) self.assertEqual( station_state.options, {'abstract': False, 'swappable': 'TEST_SWAPPABLE_MODEL', 'indexes': [], 'constraints': []} ) self.assertEqual(station_state.bases, ('migrations.searchablelocation',)) self.assertEqual(station_state.managers, []) @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_custom_manager_swappable(self): """ Tests making a ProjectState from unused models with custom managers """ new_apps = Apps(['migrations']) class Food(models.Model): food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() food_no_mgr = NoMigrationFoodManager('x', 'y') class Meta: app_label = "migrations" apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' food_state = ModelState.from_model(Food) # The default manager is used in migrations self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr']) self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2)) @isolate_apps('migrations', 'django.contrib.contenttypes') def test_order_with_respect_to_private_field(self): class PrivateFieldModel(models.Model): content_type = models.ForeignKey('contenttypes.ContentType', models.CASCADE) object_id = models.PositiveIntegerField() private = GenericForeignKey() class Meta: order_with_respect_to = 'private' state = ModelState.from_model(PrivateFieldModel) self.assertNotIn('order_with_respect_to', state.options) @isolate_apps('migrations') def test_abstract_model_children_inherit_indexes(self): class Abstract(models.Model): name = models.CharField(max_length=50) class Meta: app_label = 'migrations' abstract = True indexes = [models.Index(fields=['name'])] class Child1(Abstract): pass class Child2(Abstract): pass child1_state = ModelState.from_model(Child1) child2_state = ModelState.from_model(Child2) index_names = [index.name for index in child1_state.options['indexes']] self.assertEqual(index_names, ['migrations__name_b0afd7_idx']) index_names = [index.name for index in child2_state.options['indexes']] self.assertEqual(index_names, ['migrations__name_016466_idx']) # Modifying the state doesn't modify the index on the model. child1_state.options['indexes'][0].name = 'bar' self.assertEqual(Child1._meta.indexes[0].name, 'migrations__name_b0afd7_idx') @isolate_apps('migrations') def test_explicit_index_name(self): class TestModel(models.Model): name = models.CharField(max_length=50) class Meta: app_label = 'migrations' indexes = [models.Index(fields=['name'], name='foo_idx')] model_state = ModelState.from_model(TestModel) index_names = [index.name for index in model_state.options['indexes']] self.assertEqual(index_names, ['foo_idx']) @isolate_apps('migrations') def test_from_model_constraints(self): class ModelWithConstraints(models.Model): size = models.IntegerField() class Meta: constraints = [models.CheckConstraint(check=models.Q(size__gt=1), name='size_gt_1')] state = ModelState.from_model(ModelWithConstraints) model_constraints = ModelWithConstraints._meta.constraints state_constraints = state.options['constraints'] self.assertEqual(model_constraints, state_constraints) self.assertIsNot(model_constraints, state_constraints) self.assertIsNot(model_constraints[0], state_constraints[0]) class RelatedModelsTests(SimpleTestCase): def setUp(self): self.apps = Apps(['migrations.related_models_app']) def create_model(self, name, foreign_keys=[], bases=(), abstract=False, proxy=False): test_name = 'related_models_app' assert not (abstract and proxy) meta_contents = { 'abstract': abstract, 'app_label': test_name, 'apps': self.apps, 'proxy': proxy, } meta = type("Meta", (), meta_contents) if not bases: bases = (models.Model,) body = { 'Meta': meta, '__module__': "__fake__", } fname_base = fname = '%s_%%d' % name.lower() for i, fk in enumerate(foreign_keys, 1): fname = fname_base % i body[fname] = fk return type(name, bases, body) def assertRelated(self, model, needle): self.assertEqual( get_related_models_recursive(model), {(n._meta.app_label, n._meta.model_name) for n in needle}, ) def test_unrelated(self): A = self.create_model("A") B = self.create_model("B") self.assertRelated(A, []) self.assertRelated(B, []) def test_direct_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B") self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_direct_hidden_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE, related_name='+')]) B = self.create_model("B") self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_fk_through_proxy(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) C = self.create_model("C", bases=(B,), proxy=True) D = self.create_model("D", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) self.assertRelated(A, [B, C, D]) self.assertRelated(B, [A, C, D]) self.assertRelated(C, [A, B, D]) self.assertRelated(D, [A, B, C]) def test_nested_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) C = self.create_model("C") self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_two_sided(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('A', models.CASCADE)]) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_circle(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) C = self.create_model("C", foreign_keys=[models.ForeignKey('A', models.CASCADE)]) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,)) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_nested_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,)) C = self.create_model("C", bases=(B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_multiple_bases(self): A = self.create_model("A") B = self.create_model("B") C = self.create_model("C", bases=(A, B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_multiple_nested_bases(self): A = self.create_model("A") B = self.create_model("B") C = self.create_model("C", bases=(A, B,)) D = self.create_model("D") E = self.create_model("E", bases=(D,)) F = self.create_model("F", bases=(C, E,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, C, D, E, F]) self.assertRelated(B, [A, C, D, E, F]) self.assertRelated(C, [A, B, D, E, F]) self.assertRelated(D, [A, B, C, E, F]) self.assertRelated(E, [A, B, C, D, F]) self.assertRelated(F, [A, B, C, D, E]) self.assertRelated(Y, [Z]) self.assertRelated(Z, [Y]) def test_base_to_base_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('Y', models.CASCADE)]) B = self.create_model("B", bases=(A,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, Y, Z]) self.assertRelated(B, [A, Y, Z]) self.assertRelated(Y, [A, B, Z]) self.assertRelated(Z, [A, B, Y]) def test_base_to_subclass_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('Z', models.CASCADE)]) B = self.create_model("B", bases=(A,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, Y, Z]) self.assertRelated(B, [A, Y, Z]) self.assertRelated(Y, [A, B, Z]) self.assertRelated(Z, [A, B, Y]) def test_direct_m2m(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B')]) B = self.create_model("B") self.assertRelated(A, [A.a_1.rel.through, B]) self.assertRelated(B, [A, A.a_1.rel.through]) def test_direct_m2m_self(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('A')]) self.assertRelated(A, [A.a_1.rel.through]) def test_intermediate_m2m_self(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('A', through='T')]) T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('A', models.CASCADE), ]) self.assertRelated(A, [T]) self.assertRelated(T, [A]) def test_intermediate_m2m(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), ]) self.assertRelated(A, [B, T]) self.assertRelated(B, [A, T]) self.assertRelated(T, [A, B]) def test_intermediate_m2m_extern_fk(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") Z = self.create_model("Z") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), models.ForeignKey('Z', models.CASCADE), ]) self.assertRelated(A, [B, T, Z]) self.assertRelated(B, [A, T, Z]) self.assertRelated(T, [A, B, Z]) self.assertRelated(Z, [A, B, T]) def test_intermediate_m2m_base(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") S = self.create_model("S") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), ], bases=(S,)) self.assertRelated(A, [B, S, T]) self.assertRelated(B, [A, S, T]) self.assertRelated(S, [A, B, T]) self.assertRelated(T, [A, B, S]) def test_generic_fk(self): A = self.create_model("A", foreign_keys=[ models.ForeignKey('B', models.CASCADE), GenericForeignKey(), ]) B = self.create_model("B", foreign_keys=[ models.ForeignKey('C', models.CASCADE), ]) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_abstract_base(self): A = self.create_model("A", abstract=True) B = self.create_model("B", bases=(A,)) self.assertRelated(A, [B]) self.assertRelated(B, []) def test_nested_abstract_base(self): A = self.create_model("A", abstract=True) B = self.create_model("B", bases=(A,), abstract=True) C = self.create_model("C", bases=(B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [C]) self.assertRelated(C, []) def test_proxy_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) self.assertRelated(A, [B]) self.assertRelated(B, []) def test_nested_proxy_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) C = self.create_model("C", bases=(B,), proxy=True) self.assertRelated(A, [B, C]) self.assertRelated(B, [C]) self.assertRelated(C, []) def test_multiple_mixed_bases(self): A = self.create_model("A", abstract=True) M = self.create_model("M") P = self.create_model("P") Q = self.create_model("Q", bases=(P,), proxy=True) Z = self.create_model("Z", bases=(A, M, Q)) # M has a pointer O2O field p_ptr to P self.assertRelated(A, [M, P, Q, Z]) self.assertRelated(M, [P, Q, Z]) self.assertRelated(P, [M, Q, Z]) self.assertRelated(Q, [M, P, Z]) self.assertRelated(Z, [M, P, Q])
63ff5b9b7e9337bfc5724f5e8572aa01cebf597b1bd16e38e2fe57d3fcca8191
import re from django.forms import CharField, Form, Media from django.http import HttpRequest, HttpResponse from django.middleware.csrf import ( CSRF_TOKEN_LENGTH, CsrfViewMiddleware, _unmask_cipher_token, get_token, ) from django.template import TemplateDoesNotExist, TemplateSyntaxError from django.template.backends.dummy import TemplateStrings from django.test import SimpleTestCase class TemplateStringsTests(SimpleTestCase): engine_class = TemplateStrings backend_name = 'dummy' options = {} @classmethod def setUpClass(cls): super().setUpClass() params = { 'DIRS': [], 'APP_DIRS': True, 'NAME': cls.backend_name, 'OPTIONS': cls.options, } cls.engine = cls.engine_class(params) def test_from_string(self): template = self.engine.from_string("Hello!\n") content = template.render() self.assertEqual(content, "Hello!\n") def test_get_template(self): template = self.engine.get_template('template_backends/hello.html') content = template.render({'name': 'world'}) self.assertEqual(content, "Hello world!\n") def test_get_template_nonexistent(self): with self.assertRaises(TemplateDoesNotExist) as e: self.engine.get_template('template_backends/nonexistent.html') self.assertEqual(e.exception.backend, self.engine) def test_get_template_syntax_error(self): # There's no way to trigger a syntax error with the dummy backend. # The test still lives here to factor it between other backends. if self.backend_name == 'dummy': self.skipTest("test doesn't apply to dummy backend") with self.assertRaises(TemplateSyntaxError): self.engine.get_template('template_backends/syntax_error.html') def test_html_escaping(self): template = self.engine.get_template('template_backends/hello.html') context = {'name': '<script>alert("XSS!");</script>'} content = template.render(context) self.assertIn('&lt;script&gt;', content) self.assertNotIn('<script>', content) def test_django_html_escaping(self): if self.backend_name == 'dummy': self.skipTest("test doesn't apply to dummy backend") class TestForm(Form): test_field = CharField() media = Media(js=['my-script.js']) form = TestForm() template = self.engine.get_template('template_backends/django_escaping.html') content = template.render({'media': media, 'test_form': form}) expected = '{}\n\n{}\n\n{}'.format(media, form, form['test_field']) self.assertHTMLEqual(content, expected) def check_tokens_equivalent(self, token1, token2): self.assertEqual(len(token1), CSRF_TOKEN_LENGTH) self.assertEqual(len(token2), CSRF_TOKEN_LENGTH) token1, token2 = map(_unmask_cipher_token, (token1, token2)) self.assertEqual(token1, token2) def test_csrf_token(self): request = HttpRequest() CsrfViewMiddleware(lambda req: HttpResponse()).process_view(request, lambda r: None, (), {}) template = self.engine.get_template('template_backends/csrf.html') content = template.render(request=request) expected = '<input type="hidden" name="csrfmiddlewaretoken" value="([^"]+)">' match = re.match(expected, content) or re.match(expected.replace('"', "'"), content) self.assertTrue(match, "hidden csrftoken field not found in output") self.check_tokens_equivalent(match[1], get_token(request)) def test_no_directory_traversal(self): with self.assertRaises(TemplateDoesNotExist): self.engine.get_template('../forbidden/template_backends/hello.html') def test_non_ascii_characters(self): template = self.engine.get_template('template_backends/hello.html') content = template.render({'name': 'Jérôme'}) self.assertEqual(content, "Hello Jérôme!\n")
cce6cd8955ecda34c1ca1c5be683f86aed43c930ed33d6823517329436afd732
from datetime import datetime from django.core.exceptions import ValidationError from django.db import models from django.db.models.functions import Lower def validate_answer_to_universe(value): if value != 42: raise ValidationError('This is not the answer to life, universe and everything!', code='not42') class ModelToValidate(models.Model): name = models.CharField(max_length=100) created = models.DateTimeField(default=datetime.now) number = models.IntegerField(db_column='number_val') parent = models.ForeignKey( 'self', models.SET_NULL, blank=True, null=True, limit_choices_to={'number': 10}, ) email = models.EmailField(blank=True) ufm = models.ForeignKey( 'UniqueFieldsModel', models.SET_NULL, to_field='unique_charfield', blank=True, null=True, ) url = models.URLField(blank=True) f_with_custom_validator = models.IntegerField(blank=True, null=True, validators=[validate_answer_to_universe]) f_with_iterable_of_validators = models.IntegerField(blank=True, null=True, validators=(validate_answer_to_universe,)) slug = models.SlugField(blank=True) def clean(self): super().clean() if self.number == 11: raise ValidationError('Invalid number supplied!') class UniqueFieldsModel(models.Model): unique_charfield = models.CharField(max_length=100, unique=True) unique_integerfield = models.IntegerField(unique=True) non_unique_field = models.IntegerField() class CustomPKModel(models.Model): my_pk_field = models.CharField(max_length=100, primary_key=True) class UniqueTogetherModel(models.Model): cfield = models.CharField(max_length=100) ifield = models.IntegerField() efield = models.EmailField() class Meta: unique_together = (('ifield', 'cfield',), ['ifield', 'efield']) class UniqueForDateModel(models.Model): start_date = models.DateField() end_date = models.DateTimeField() count = models.IntegerField(unique_for_date="start_date", unique_for_year="end_date") order = models.IntegerField(unique_for_month="end_date") name = models.CharField(max_length=100) class CustomMessagesModel(models.Model): other = models.IntegerField(blank=True, null=True) number = models.IntegerField( db_column='number_val', error_messages={'null': 'NULL', 'not42': 'AAARGH', 'not_equal': '%s != me'}, validators=[validate_answer_to_universe] ) class AuthorManager(models.Manager): def get_queryset(self): qs = super().get_queryset() return qs.filter(archived=False) class Author(models.Model): name = models.CharField(max_length=100) archived = models.BooleanField(default=False) objects = AuthorManager() class Article(models.Model): title = models.CharField(max_length=100) author = models.ForeignKey(Author, models.CASCADE) pub_date = models.DateTimeField(blank=True) def clean(self): if self.pub_date is None: self.pub_date = datetime.now() class Post(models.Model): title = models.CharField(max_length=50, unique_for_date='posted', blank=True) slug = models.CharField(max_length=50, unique_for_year='posted', blank=True) subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True) posted = models.DateField() class FlexibleDatePost(models.Model): title = models.CharField(max_length=50, unique_for_date='posted', blank=True) slug = models.CharField(max_length=50, unique_for_year='posted', blank=True) subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True) posted = models.DateField(blank=True, null=True) class UniqueErrorsModel(models.Model): name = models.CharField(max_length=100, unique=True, error_messages={'unique': 'Custom unique name message.'}) no = models.IntegerField(unique=True, error_messages={'unique': 'Custom unique number message.'}) class GenericIPAddressTestModel(models.Model): generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True) v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4") v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6") ip_verbose_name = models.GenericIPAddressField("IP Address Verbose", blank=True, null=True) class GenericIPAddrUnpackUniqueTest(models.Model): generic_v4unpack_ip = models.GenericIPAddressField(null=True, blank=True, unique=True, unpack_ipv4=True) class UniqueFuncConstraintModel(models.Model): field = models.CharField(max_length=255) class Meta: required_db_features = {'supports_expression_indexes'} constraints = [ models.UniqueConstraint(Lower('field'), name='func_lower_field_uq'), ]
36a82a219e009a5d250a9117d48f20c84043e07c37196df5cf631ece1adc5c8f
import datetime import unittest from django.apps.registry import Apps from django.core.exceptions import ValidationError from django.db import models from django.test import TestCase from .models import ( CustomPKModel, FlexibleDatePost, ModelToValidate, Post, UniqueErrorsModel, UniqueFieldsModel, UniqueForDateModel, UniqueFuncConstraintModel, UniqueTogetherModel, ) class GetUniqueCheckTests(unittest.TestCase): def test_unique_fields_get_collected(self): m = UniqueFieldsModel() self.assertEqual( ([(UniqueFieldsModel, ('id',)), (UniqueFieldsModel, ('unique_charfield',)), (UniqueFieldsModel, ('unique_integerfield',))], []), m._get_unique_checks() ) def test_unique_together_gets_picked_up_and_converted_to_tuple(self): m = UniqueTogetherModel() self.assertEqual( ([(UniqueTogetherModel, ('ifield', 'cfield')), (UniqueTogetherModel, ('ifield', 'efield')), (UniqueTogetherModel, ('id',))], []), m._get_unique_checks() ) def test_unique_together_normalization(self): """ Test the Meta.unique_together normalization with different sorts of objects. """ data = { '2-tuple': (('foo', 'bar'), (('foo', 'bar'),)), 'list': (['foo', 'bar'], (('foo', 'bar'),)), 'already normalized': ((('foo', 'bar'), ('bar', 'baz')), (('foo', 'bar'), ('bar', 'baz'))), 'set': ({('foo', 'bar'), ('bar', 'baz')}, # Ref #21469 (('foo', 'bar'), ('bar', 'baz'))), } for unique_together, normalized in data.values(): class M(models.Model): foo = models.IntegerField() bar = models.IntegerField() baz = models.IntegerField() Meta = type('Meta', (), { 'unique_together': unique_together, 'apps': Apps() }) checks, _ = M()._get_unique_checks() for t in normalized: check = (M, t) self.assertIn(check, checks) def test_primary_key_is_considered_unique(self): m = CustomPKModel() self.assertEqual(([(CustomPKModel, ('my_pk_field',))], []), m._get_unique_checks()) def test_unique_for_date_gets_picked_up(self): m = UniqueForDateModel() self.assertEqual(( [(UniqueForDateModel, ('id',))], [(UniqueForDateModel, 'date', 'count', 'start_date'), (UniqueForDateModel, 'year', 'count', 'end_date'), (UniqueForDateModel, 'month', 'order', 'end_date')] ), m._get_unique_checks() ) def test_unique_for_date_exclusion(self): m = UniqueForDateModel() self.assertEqual(( [(UniqueForDateModel, ('id',))], [(UniqueForDateModel, 'year', 'count', 'end_date'), (UniqueForDateModel, 'month', 'order', 'end_date')] ), m._get_unique_checks(exclude='start_date') ) def test_func_unique_constraint_ignored(self): m = UniqueFuncConstraintModel() self.assertEqual( m._get_unique_checks(), ([(UniqueFuncConstraintModel, ('id',))], []), ) class PerformUniqueChecksTest(TestCase): def test_primary_key_unique_check_not_performed_when_adding_and_pk_not_specified(self): # Regression test for #12560 with self.assertNumQueries(0): mtv = ModelToValidate(number=10, name='Some Name') setattr(mtv, '_adding', True) mtv.full_clean() def test_primary_key_unique_check_performed_when_adding_and_pk_specified(self): # Regression test for #12560 with self.assertNumQueries(1): mtv = ModelToValidate(number=10, name='Some Name', id=123) setattr(mtv, '_adding', True) mtv.full_clean() def test_primary_key_unique_check_not_performed_when_not_adding(self): # Regression test for #12132 with self.assertNumQueries(0): mtv = ModelToValidate(number=10, name='Some Name') mtv.full_clean() def test_func_unique_check_not_performed(self): with self.assertNumQueries(0): UniqueFuncConstraintModel(field='some name').full_clean() def test_unique_for_date(self): Post.objects.create( title="Django 1.0 is released", slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3), ) p = Post(title="Django 1.0 is released", posted=datetime.date(2008, 9, 3)) with self.assertRaises(ValidationError) as cm: p.full_clean() self.assertEqual(cm.exception.message_dict, {'title': ['Title must be unique for Posted date.']}) # Should work without errors p = Post(title="Work on Django 1.1 begins", posted=datetime.date(2008, 9, 3)) p.full_clean() # Should work without errors p = Post(title="Django 1.0 is released", posted=datetime.datetime(2008, 9, 4)) p.full_clean() p = Post(slug="Django 1.0", posted=datetime.datetime(2008, 1, 1)) with self.assertRaises(ValidationError) as cm: p.full_clean() self.assertEqual(cm.exception.message_dict, {'slug': ['Slug must be unique for Posted year.']}) p = Post(subtitle="Finally", posted=datetime.datetime(2008, 9, 30)) with self.assertRaises(ValidationError) as cm: p.full_clean() self.assertEqual(cm.exception.message_dict, {'subtitle': ['Subtitle must be unique for Posted month.']}) p = Post(title="Django 1.0 is released") with self.assertRaises(ValidationError) as cm: p.full_clean() self.assertEqual(cm.exception.message_dict, {'posted': ['This field cannot be null.']}) def test_unique_for_date_with_nullable_date(self): """ unique_for_date/year/month checks shouldn't trigger when the associated DateField is None. """ FlexibleDatePost.objects.create( title="Django 1.0 is released", slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3), ) p = FlexibleDatePost(title="Django 1.0 is released") p.full_clean() p = FlexibleDatePost(slug="Django 1.0") p.full_clean() p = FlexibleDatePost(subtitle="Finally") p.full_clean() def test_unique_errors(self): UniqueErrorsModel.objects.create(name='Some Name', no=10) m = UniqueErrorsModel(name='Some Name', no=11) with self.assertRaises(ValidationError) as cm: m.full_clean() self.assertEqual(cm.exception.message_dict, {'name': ['Custom unique name message.']}) m = UniqueErrorsModel(name='Some Other Name', no=10) with self.assertRaises(ValidationError) as cm: m.full_clean() self.assertEqual(cm.exception.message_dict, {'no': ['Custom unique number message.']})
b4b38138302dbb9697ae9891ae467d26014a41b70f72424251dd9fb54cd50772
import os import sys import unittest from types import ModuleType, SimpleNamespace from unittest import mock from django.conf import ( ENVIRONMENT_VARIABLE, USE_DEPRECATED_PYTZ_DEPRECATED_MSG, LazySettings, Settings, settings, ) from django.core.exceptions import ImproperlyConfigured from django.http import HttpRequest from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, modify_settings, override_settings, signals, ) from django.test.utils import requires_tz_support from django.urls import clear_script_prefix, set_script_prefix from django.utils.deprecation import RemovedInDjango50Warning @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], ITEMS_OUTER=[1, 2, 3], TEST='override', TEST_OUTER='outer') class FullyDecoratedTranTestCase(TransactionTestCase): available_apps = [] def test_override(self): self.assertEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) self.assertEqual(settings.TEST, 'override') self.assertEqual(settings.TEST_OUTER, 'outer') @modify_settings(ITEMS={ 'append': ['e', 'f'], 'prepend': ['a'], 'remove': ['d', 'c'], }) def test_method_list_override(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'e', 'f']) self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) @modify_settings(ITEMS={ 'append': ['b'], 'prepend': ['d'], 'remove': ['a', 'c', 'e'], }) def test_method_list_override_no_ops(self): self.assertEqual(settings.ITEMS, ['b', 'd']) @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) def test_method_list_override_strings(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) @modify_settings(ITEMS={'remove': ['b', 'd']}) @modify_settings(ITEMS={'append': ['b'], 'prepend': ['d']}) def test_method_list_override_nested_order(self): self.assertEqual(settings.ITEMS, ['d', 'c', 'b']) @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') self.assertEqual(settings.TEST_OUTER, 'outer') def test_decorated_testcase_name(self): self.assertEqual(FullyDecoratedTranTestCase.__name__, 'FullyDecoratedTranTestCase') def test_decorated_testcase_module(self): self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__) @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], TEST='override') class FullyDecoratedTestCase(TestCase): def test_override(self): self.assertEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertEqual(settings.TEST, 'override') @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) self.assertEqual(settings.TEST, 'override2') class ClassDecoratedTestCaseSuper(TestCase): """ Dummy class for testing max recursion error in child class call to super(). Refs #17011. """ def test_max_recursion_error(self): pass @override_settings(TEST='override') class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper): @classmethod def setUpClass(cls): super().setUpClass() cls.foo = getattr(settings, 'TEST', 'BUG') def test_override(self): self.assertEqual(settings.TEST, 'override') def test_setupclass_override(self): """Settings are overridden within setUpClass (#21281).""" self.assertEqual(self.foo, 'override') @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') def test_max_recursion_error(self): """ Overriding a method on a super class and then calling that method on the super class should not trigger infinite recursion. See #17011. """ super().test_max_recursion_error() @modify_settings(ITEMS={'append': 'mother'}) @override_settings(ITEMS=['father'], TEST='override-parent') class ParentDecoratedTestCase(TestCase): pass @modify_settings(ITEMS={'append': ['child']}) @override_settings(TEST='override-child') class ChildDecoratedTestCase(ParentDecoratedTestCase): def test_override_settings_inheritance(self): self.assertEqual(settings.ITEMS, ['father', 'mother', 'child']) self.assertEqual(settings.TEST, 'override-child') class SettingsTests(SimpleTestCase): def setUp(self): self.testvalue = None signals.setting_changed.connect(self.signal_callback) def tearDown(self): signals.setting_changed.disconnect(self.signal_callback) def signal_callback(self, sender, setting, value, **kwargs): if setting == 'TEST': self.testvalue = value def test_override(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) self.assertEqual('test', settings.TEST) del settings.TEST def test_override_change(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test2' self.assertEqual('test', settings.TEST) del settings.TEST def test_override_doesnt_leak(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test' with self.assertRaises(AttributeError): getattr(settings, 'TEST') @override_settings(TEST='override') def test_decorator(self): self.assertEqual('override', settings.TEST) def test_context_manager(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') override = override_settings(TEST='override') with self.assertRaises(AttributeError): getattr(settings, 'TEST') override.enable() self.assertEqual('override', settings.TEST) override.disable() with self.assertRaises(AttributeError): getattr(settings, 'TEST') def test_class_decorator(self): # SimpleTestCase can be decorated by override_settings, but not ut.TestCase class SimpleTestCaseSubclass(SimpleTestCase): pass class UnittestTestCaseSubclass(unittest.TestCase): pass decorated = override_settings(TEST='override')(SimpleTestCaseSubclass) self.assertIsInstance(decorated, type) self.assertTrue(issubclass(decorated, SimpleTestCase)) with self.assertRaisesMessage(Exception, "Only subclasses of Django SimpleTestCase"): decorated = override_settings(TEST='override')(UnittestTestCaseSubclass) def test_signal_callback_context_manager(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.settings(TEST='override'): self.assertEqual(self.testvalue, 'override') self.assertIsNone(self.testvalue) @override_settings(TEST='override') def test_signal_callback_decorator(self): self.assertEqual(self.testvalue, 'override') # # Regression tests for #10130: deleting settings. # def test_settings_delete(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) del settings.TEST msg = "'Settings' object has no attribute 'TEST'" with self.assertRaisesMessage(AttributeError, msg): getattr(settings, 'TEST') def test_settings_delete_wrapped(self): with self.assertRaisesMessage(TypeError, "can't delete _wrapped."): delattr(settings, '_wrapped') def test_override_settings_delete(self): """ Allow deletion of a setting in an overridden settings set (#18824) """ previous_i18n = settings.USE_I18N previous_tz = settings.USE_TZ with self.settings(USE_I18N=False): del settings.USE_I18N with self.assertRaises(AttributeError): getattr(settings, 'USE_I18N') # Should also work for a non-overridden setting del settings.USE_TZ with self.assertRaises(AttributeError): getattr(settings, 'USE_TZ') self.assertNotIn('USE_I18N', dir(settings)) self.assertNotIn('USE_TZ', dir(settings)) self.assertEqual(settings.USE_I18N, previous_i18n) self.assertEqual(settings.USE_TZ, previous_tz) def test_override_settings_nested(self): """ override_settings uses the actual _wrapped attribute at runtime, not when it was instantiated. """ with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.assertRaises(AttributeError): getattr(settings, 'TEST2') inner = override_settings(TEST2='override') with override_settings(TEST='override'): self.assertEqual('override', settings.TEST) with inner: self.assertEqual('override', settings.TEST) self.assertEqual('override', settings.TEST2) # inner's __exit__ should have restored the settings of the outer # context manager, not those when the class was instantiated self.assertEqual('override', settings.TEST) with self.assertRaises(AttributeError): getattr(settings, 'TEST2') with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.assertRaises(AttributeError): getattr(settings, 'TEST2') @override_settings(SECRET_KEY='') def test_no_secret_key(self): msg = 'The SECRET_KEY setting must not be empty.' with self.assertRaisesMessage(ImproperlyConfigured, msg): settings.SECRET_KEY def test_no_settings_module(self): msg = ( 'Requested setting%s, but settings are not configured. You ' 'must either define the environment variable DJANGO_SETTINGS_MODULE ' 'or call settings.configure() before accessing settings.' ) orig_settings = os.environ[ENVIRONMENT_VARIABLE] os.environ[ENVIRONMENT_VARIABLE] = '' try: with self.assertRaisesMessage(ImproperlyConfigured, msg % 's'): settings._setup() with self.assertRaisesMessage(ImproperlyConfigured, msg % ' TEST'): settings._setup('TEST') finally: os.environ[ENVIRONMENT_VARIABLE] = orig_settings def test_already_configured(self): with self.assertRaisesMessage(RuntimeError, 'Settings already configured.'): settings.configure() def test_nonupper_settings_prohibited_in_configure(self): s = LazySettings() with self.assertRaisesMessage(TypeError, "Setting 'foo' must be uppercase."): s.configure(foo='bar') def test_nonupper_settings_ignored_in_default_settings(self): s = LazySettings() s.configure(SimpleNamespace(foo='bar')) with self.assertRaises(AttributeError): getattr(s, 'foo') @requires_tz_support @mock.patch('django.conf.global_settings.TIME_ZONE', 'test') def test_incorrect_timezone(self): with self.assertRaisesMessage(ValueError, 'Incorrect timezone setting: test'): settings._setup() def test_use_tz_false_deprecation(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' sys.modules['fake_settings_module'] = settings_module msg = ( 'The default value of USE_TZ will change from False to True in ' 'Django 5.0. Set USE_TZ to False in your project settings if you ' 'want to keep the current default behavior.' ) try: with self.assertRaisesMessage(RemovedInDjango50Warning, msg): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] def test_use_deprecated_pytz_deprecation(self): settings_module = ModuleType('fake_settings_module') settings_module.USE_DEPRECATED_PYTZ = True settings_module.USE_TZ = True sys.modules['fake_settings_module'] = settings_module try: with self.assertRaisesMessage(RemovedInDjango50Warning, USE_DEPRECATED_PYTZ_DEPRECATED_MSG): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] holder = LazySettings() with self.assertRaisesMessage(RemovedInDjango50Warning, USE_DEPRECATED_PYTZ_DEPRECATED_MSG): holder.configure(USE_DEPRECATED_PYTZ=True) class TestComplexSettingOverride(SimpleTestCase): def setUp(self): self.old_warn_override_settings = signals.COMPLEX_OVERRIDE_SETTINGS.copy() signals.COMPLEX_OVERRIDE_SETTINGS.add('TEST_WARN') def tearDown(self): signals.COMPLEX_OVERRIDE_SETTINGS = self.old_warn_override_settings self.assertNotIn('TEST_WARN', signals.COMPLEX_OVERRIDE_SETTINGS) def test_complex_override_warning(self): """Regression test for #19031""" msg = 'Overriding setting TEST_WARN can lead to unexpected behavior.' with self.assertWarnsMessage(UserWarning, msg) as cm: with override_settings(TEST_WARN='override'): self.assertEqual(settings.TEST_WARN, 'override') self.assertEqual(cm.filename, __file__) class SecureProxySslHeaderTest(SimpleTestCase): @override_settings(SECURE_PROXY_SSL_HEADER=None) def test_none(self): req = HttpRequest() self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_set_without_xheader(self): req = HttpRequest() self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_set_with_xheader_wrong(self): req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTO'] = 'wrongvalue' self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_set_with_xheader_right(self): req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTO'] = 'https' self.assertIs(req.is_secure(), True) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_xheader_preferred_to_underlying_request(self): class ProxyRequest(HttpRequest): def _get_scheme(self): """Proxy always connecting via HTTPS""" return 'https' # Client connects via HTTP. req = ProxyRequest() req.META['HTTP_X_FORWARDED_PROTO'] = 'http' self.assertIs(req.is_secure(), False) class IsOverriddenTest(SimpleTestCase): def test_configure(self): s = LazySettings() s.configure(SECRET_KEY='foo') self.assertTrue(s.is_overridden('SECRET_KEY')) def test_module(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' settings_module.USE_TZ = False sys.modules['fake_settings_module'] = settings_module try: s = Settings('fake_settings_module') self.assertTrue(s.is_overridden('SECRET_KEY')) self.assertFalse(s.is_overridden('ALLOWED_HOSTS')) finally: del sys.modules['fake_settings_module'] def test_override(self): self.assertFalse(settings.is_overridden('ALLOWED_HOSTS')) with override_settings(ALLOWED_HOSTS=[]): self.assertTrue(settings.is_overridden('ALLOWED_HOSTS')) def test_unevaluated_lazysettings_repr(self): lazy_settings = LazySettings() expected = '<LazySettings [Unevaluated]>' self.assertEqual(repr(lazy_settings), expected) def test_evaluated_lazysettings_repr(self): lazy_settings = LazySettings() module = os.environ.get(ENVIRONMENT_VARIABLE) expected = '<LazySettings "%s">' % module # Force evaluation of the lazy object. lazy_settings.APPEND_SLASH self.assertEqual(repr(lazy_settings), expected) def test_usersettingsholder_repr(self): lazy_settings = LazySettings() lazy_settings.configure(APPEND_SLASH=False) expected = '<UserSettingsHolder>' self.assertEqual(repr(lazy_settings._wrapped), expected) def test_settings_repr(self): module = os.environ.get(ENVIRONMENT_VARIABLE) lazy_settings = Settings(module) expected = '<Settings "%s">' % module self.assertEqual(repr(lazy_settings), expected) class TestListSettings(SimpleTestCase): """ Make sure settings that should be lists or tuples throw ImproperlyConfigured if they are set to a string instead of a list or tuple. """ list_or_tuple_settings = ( 'ALLOWED_HOSTS', "INSTALLED_APPS", "TEMPLATE_DIRS", "LOCALE_PATHS", ) def test_tuple_settings(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' msg = 'The %s setting must be a list or a tuple.' for setting in self.list_or_tuple_settings: setattr(settings_module, setting, ('non_list_or_tuple_value')) sys.modules['fake_settings_module'] = settings_module try: with self.assertRaisesMessage(ImproperlyConfigured, msg % setting): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] delattr(settings_module, setting) class SettingChangeEnterException(Exception): pass class SettingChangeExitException(Exception): pass class OverrideSettingsIsolationOnExceptionTests(SimpleTestCase): """ The override_settings context manager restore settings if one of the receivers of "setting_changed" signal fails. Check the three cases of receiver failure detailed in receiver(). In each case, ALL receivers are called when exiting the context manager. """ def setUp(self): signals.setting_changed.connect(self.receiver) self.addCleanup(signals.setting_changed.disconnect, self.receiver) # Create a spy that's connected to the `setting_changed` signal and # executed AFTER `self.receiver`. self.spy_receiver = mock.Mock() signals.setting_changed.connect(self.spy_receiver) self.addCleanup(signals.setting_changed.disconnect, self.spy_receiver) def receiver(self, **kwargs): """ A receiver that fails while certain settings are being changed. - SETTING_BOTH raises an error while receiving the signal on both entering and exiting the context manager. - SETTING_ENTER raises an error only on enter. - SETTING_EXIT raises an error only on exit. """ setting = kwargs['setting'] enter = kwargs['enter'] if setting in ('SETTING_BOTH', 'SETTING_ENTER') and enter: raise SettingChangeEnterException if setting in ('SETTING_BOTH', 'SETTING_EXIT') and not enter: raise SettingChangeExitException def check_settings(self): """Assert that settings for these tests aren't present.""" self.assertFalse(hasattr(settings, 'SETTING_BOTH')) self.assertFalse(hasattr(settings, 'SETTING_ENTER')) self.assertFalse(hasattr(settings, 'SETTING_EXIT')) self.assertFalse(hasattr(settings, 'SETTING_PASS')) def check_spy_receiver_exit_calls(self, call_count): """ Assert that `self.spy_receiver` was called exactly `call_count` times with the ``enter=False`` keyword argument. """ kwargs_with_exit = [ kwargs for args, kwargs in self.spy_receiver.call_args_list if ('enter', False) in kwargs.items() ] self.assertEqual(len(kwargs_with_exit), call_count) def test_override_settings_both(self): """Receiver fails on both enter and exit.""" with self.assertRaises(SettingChangeEnterException): with override_settings(SETTING_PASS='BOTH', SETTING_BOTH='BOTH'): pass self.check_settings() # Two settings were touched, so expect two calls of `spy_receiver`. self.check_spy_receiver_exit_calls(call_count=2) def test_override_settings_enter(self): """Receiver fails on enter only.""" with self.assertRaises(SettingChangeEnterException): with override_settings(SETTING_PASS='ENTER', SETTING_ENTER='ENTER'): pass self.check_settings() # Two settings were touched, so expect two calls of `spy_receiver`. self.check_spy_receiver_exit_calls(call_count=2) def test_override_settings_exit(self): """Receiver fails on exit only.""" with self.assertRaises(SettingChangeExitException): with override_settings(SETTING_PASS='EXIT', SETTING_EXIT='EXIT'): pass self.check_settings() # Two settings were touched, so expect two calls of `spy_receiver`. self.check_spy_receiver_exit_calls(call_count=2) def test_override_settings_reusable_on_enter(self): """ Error is raised correctly when reusing the same override_settings instance. """ @override_settings(SETTING_ENTER='ENTER') def decorated_function(): pass with self.assertRaises(SettingChangeEnterException): decorated_function() signals.setting_changed.disconnect(self.receiver) # This call shouldn't raise any errors. decorated_function() class MediaURLStaticURLPrefixTest(SimpleTestCase): def set_script_name(self, val): clear_script_prefix() if val is not None: set_script_prefix(val) def test_not_prefixed(self): # Don't add SCRIPT_NAME prefix to absolute paths, URLs, or None. tests = ( '/path/', 'http://myhost.com/path/', 'http://myhost/path/', 'https://myhost/path/', None, ) for setting in ('MEDIA_URL', 'STATIC_URL'): for path in tests: new_settings = {setting: path} with self.settings(**new_settings): for script_name in ['/somesubpath', '/somesubpath/', '/', '', None]: with self.subTest(script_name=script_name, **new_settings): try: self.set_script_name(script_name) self.assertEqual(getattr(settings, setting), path) finally: clear_script_prefix() def test_add_script_name_prefix(self): tests = ( # Relative paths. ('/somesubpath', 'path/', '/somesubpath/path/'), ('/somesubpath/', 'path/', '/somesubpath/path/'), ('/', 'path/', '/path/'), # Invalid URLs. ('/somesubpath/', 'htp://myhost.com/path/', '/somesubpath/htp://myhost.com/path/'), # Blank settings. ('/somesubpath/', '', '/somesubpath/'), ) for setting in ('MEDIA_URL', 'STATIC_URL'): for script_name, path, expected_path in tests: new_settings = {setting: path} with self.settings(**new_settings): with self.subTest(script_name=script_name, **new_settings): try: self.set_script_name(script_name) self.assertEqual(getattr(settings, setting), expected_path) finally: clear_script_prefix()
5a0e9f87264c84b2a79927dcb12942f754ff64bbc0338759a9da5a6276776eb9
from django.test import TestCase from .models import Person class PropertyTests(TestCase): @classmethod def setUpTestData(cls): cls.a = Person.objects.create(first_name='John', last_name='Lennon') def test_getter(self): self.assertEqual(self.a.full_name, 'John Lennon') def test_setter(self): # The "full_name" property hasn't provided a "set" method. with self.assertRaises(AttributeError): setattr(self.a, 'full_name', 'Paul McCartney') # And cannot be used to initialize the class. with self.assertRaises(AttributeError): Person(full_name='Paul McCartney') # But "full_name_2" has, and it can be used to initialize the class. a2 = Person(full_name_2='Paul McCartney') a2.save() self.assertEqual(a2.first_name, 'Paul')
8bbc0fea77f7ac512686775b88ed7ba7cbcd41eb565dd7e36efa8d456d5ef309
import builtins import getpass import os import sys from datetime import date from io import StringIO from unittest import mock from django.apps import apps from django.contrib.auth import get_permission_codename, management from django.contrib.auth.management import ( create_permissions, get_default_username, ) from django.contrib.auth.management.commands import ( changepassword, createsuperuser, ) from django.contrib.auth.models import Group, Permission, User from django.contrib.contenttypes.models import ContentType from django.core.management import call_command from django.core.management.base import CommandError from django.db import migrations from django.test import TestCase, override_settings from django.utils.translation import gettext_lazy as _ from .models import ( CustomUser, CustomUserNonUniqueUsername, CustomUserWithFK, CustomUserWithM2M, Email, Organization, UserProxy, ) MOCK_INPUT_KEY_TO_PROMPTS = { # @mock_inputs dict key: [expected prompt messages], 'bypass': ['Bypass password validation and create user anyway? [y/N]: '], 'email': ['Email address: '], 'date_of_birth': ['Date of birth: '], 'first_name': ['First name: '], 'username': ['Username: ', lambda: "Username (leave blank to use '%s'): " % get_default_username()], } def mock_inputs(inputs): """ Decorator to temporarily replace input/getpass to allow interactive createsuperuser. """ def inner(test_func): def wrapped(*args): class mock_getpass: @staticmethod def getpass(prompt=b'Password: ', stream=None): if callable(inputs['password']): return inputs['password']() return inputs['password'] def mock_input(prompt): assert '__proxy__' not in prompt response = None for key, val in inputs.items(): if val == 'KeyboardInterrupt': raise KeyboardInterrupt # get() fallback because sometimes 'key' is the actual # prompt rather than a shortcut name. prompt_msgs = MOCK_INPUT_KEY_TO_PROMPTS.get(key, key) if isinstance(prompt_msgs, list): prompt_msgs = [msg() if callable(msg) else msg for msg in prompt_msgs] if prompt in prompt_msgs: if callable(val): response = val() else: response = val break if response is None: raise ValueError('Mock input for %r not found.' % prompt) return response old_getpass = createsuperuser.getpass old_input = builtins.input createsuperuser.getpass = mock_getpass builtins.input = mock_input try: test_func(*args) finally: createsuperuser.getpass = old_getpass builtins.input = old_input return wrapped return inner class MockTTY: """ A fake stdin object that pretends to be a TTY to be used in conjunction with mock_inputs. """ def isatty(self): return True class MockInputTests(TestCase): @mock_inputs({'username': 'alice'}) def test_input_not_found(self): with self.assertRaisesMessage(ValueError, "Mock input for 'Email address: ' not found."): call_command('createsuperuser', stdin=MockTTY()) class GetDefaultUsernameTestCase(TestCase): databases = {'default', 'other'} def setUp(self): self.old_get_system_username = management.get_system_username def tearDown(self): management.get_system_username = self.old_get_system_username def test_actual_implementation(self): self.assertIsInstance(management.get_system_username(), str) def test_simple(self): management.get_system_username = lambda: 'joe' self.assertEqual(management.get_default_username(), 'joe') def test_existing(self): User.objects.create(username='joe') management.get_system_username = lambda: 'joe' self.assertEqual(management.get_default_username(), '') self.assertEqual( management.get_default_username(check_db=False), 'joe') def test_i18n(self): # 'Julia' with accented 'u': management.get_system_username = lambda: 'J\xfalia' self.assertEqual(management.get_default_username(), 'julia') def test_with_database(self): User.objects.create(username='joe') management.get_system_username = lambda: 'joe' self.assertEqual(management.get_default_username(), '') self.assertEqual(management.get_default_username(database='other'), 'joe') User.objects.using('other').create(username='joe') self.assertEqual(management.get_default_username(database='other'), '') @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}, ]) class ChangepasswordManagementCommandTestCase(TestCase): @classmethod def setUpTestData(cls): cls.user = User.objects.create_user(username='joe', password='qwerty') def setUp(self): self.stdout = StringIO() self.stderr = StringIO() def tearDown(self): self.stdout.close() self.stderr.close() @mock.patch.object(getpass, 'getpass', return_value='password') def test_get_pass(self, mock_get_pass): call_command('changepassword', username='joe', stdout=self.stdout) self.assertIs(User.objects.get(username='joe').check_password('password'), True) @mock.patch.object(getpass, 'getpass', return_value='') def test_get_pass_no_input(self, mock_get_pass): with self.assertRaisesMessage(CommandError, 'aborted'): call_command('changepassword', username='joe', stdout=self.stdout) @mock.patch.object(changepassword.Command, '_get_pass', return_value='new_password') def test_system_username(self, mock_get_pass): """The system username is used if --username isn't provided.""" username = getpass.getuser() User.objects.create_user(username=username, password='qwerty') call_command('changepassword', stdout=self.stdout) self.assertIs(User.objects.get(username=username).check_password('new_password'), True) def test_nonexistent_username(self): with self.assertRaisesMessage(CommandError, "user 'test' does not exist"): call_command('changepassword', username='test', stdout=self.stdout) @mock.patch.object(changepassword.Command, '_get_pass', return_value='not qwerty') def test_that_changepassword_command_changes_joes_password(self, mock_get_pass): "Executing the changepassword management command should change joe's password" self.assertTrue(self.user.check_password('qwerty')) call_command('changepassword', username='joe', stdout=self.stdout) command_output = self.stdout.getvalue().strip() self.assertEqual( command_output, "Changing password for user 'joe'\nPassword changed successfully for user 'joe'" ) self.assertTrue(User.objects.get(username="joe").check_password("not qwerty")) @mock.patch.object(changepassword.Command, '_get_pass', side_effect=lambda *args: str(args)) def test_that_max_tries_exits_1(self, mock_get_pass): """ A CommandError should be thrown by handle() if the user enters in mismatched passwords three times. """ msg = "Aborting password change for user 'joe' after 3 attempts" with self.assertRaisesMessage(CommandError, msg): call_command('changepassword', username='joe', stdout=self.stdout, stderr=self.stderr) @mock.patch.object(changepassword.Command, '_get_pass', return_value='1234567890') def test_password_validation(self, mock_get_pass): """ A CommandError should be raised if the user enters in passwords which fail validation three times. """ abort_msg = "Aborting password change for user 'joe' after 3 attempts" with self.assertRaisesMessage(CommandError, abort_msg): call_command('changepassword', username='joe', stdout=self.stdout, stderr=self.stderr) self.assertIn('This password is entirely numeric.', self.stderr.getvalue()) @mock.patch.object(changepassword.Command, '_get_pass', return_value='not qwerty') def test_that_changepassword_command_works_with_nonascii_output(self, mock_get_pass): """ #21627 -- Executing the changepassword management command should allow non-ASCII characters from the User object representation. """ # 'Julia' with accented 'u': User.objects.create_user(username='J\xfalia', password='qwerty') call_command('changepassword', username='J\xfalia', stdout=self.stdout) class MultiDBChangepasswordManagementCommandTestCase(TestCase): databases = {'default', 'other'} @mock.patch.object(changepassword.Command, '_get_pass', return_value='not qwerty') def test_that_changepassword_command_with_database_option_uses_given_db(self, mock_get_pass): """ changepassword --database should operate on the specified DB. """ user = User.objects.db_manager('other').create_user(username='joe', password='qwerty') self.assertTrue(user.check_password('qwerty')) out = StringIO() call_command('changepassword', username='joe', database='other', stdout=out) command_output = out.getvalue().strip() self.assertEqual( command_output, "Changing password for user 'joe'\nPassword changed successfully for user 'joe'" ) self.assertTrue(User.objects.using('other').get(username="joe").check_password('not qwerty')) @override_settings( SILENCED_SYSTEM_CHECKS=['fields.W342'], # ForeignKey(unique=True) AUTH_PASSWORD_VALIDATORS=[{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}], ) class CreatesuperuserManagementCommandTestCase(TestCase): def test_no_email_argument(self): new_io = StringIO() with self.assertRaisesMessage(CommandError, 'You must use --email with --noinput.'): call_command('createsuperuser', interactive=False, username='joe', stdout=new_io) def test_basic_usage(self): "Check the operation of the createsuperuser management command" # We can use the management command to create a superuser new_io = StringIO() call_command( "createsuperuser", interactive=False, username="joe", email="[email protected]", stdout=new_io ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = User.objects.get(username="joe") self.assertEqual(u.email, '[email protected]') # created password should be unusable self.assertFalse(u.has_usable_password()) def test_non_ascii_verbose_name(self): @mock_inputs({ 'password': "nopasswd", "Uživatel (leave blank to use '%s'): " % get_default_username(): 'foo', # username (cz) 'email': '[email protected]', }) def test(self): username_field = User._meta.get_field('username') old_verbose_name = username_field.verbose_name username_field.verbose_name = _('u\u017eivatel') new_io = StringIO() try: call_command( "createsuperuser", interactive=True, stdout=new_io, stdin=MockTTY(), ) finally: username_field.verbose_name = old_verbose_name command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') test(self) def test_verbosity_zero(self): # We can suppress output on the management command new_io = StringIO() call_command( "createsuperuser", interactive=False, username="joe2", email="[email protected]", verbosity=0, stdout=new_io ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, '') u = User.objects.get(username="joe2") self.assertEqual(u.email, '[email protected]') self.assertFalse(u.has_usable_password()) def test_email_in_username(self): call_command( "createsuperuser", interactive=False, username="[email protected]", email="[email protected]", verbosity=0, ) u = User._default_manager.get(username="[email protected]") self.assertEqual(u.email, '[email protected]') self.assertFalse(u.has_usable_password()) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUser') def test_swappable_user(self): "A superuser can be created when a custom user model is in use" # We can use the management command to create a superuser # We skip validation because the temporary substitution of the # swappable User model messes with validation. new_io = StringIO() call_command( "createsuperuser", interactive=False, email="[email protected]", date_of_birth="1976-04-01", first_name='Joe', stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUser._default_manager.get(email="[email protected]") self.assertEqual(u.date_of_birth, date(1976, 4, 1)) # created password should be unusable self.assertFalse(u.has_usable_password()) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUser') def test_swappable_user_missing_required_field(self): "A Custom superuser won't be created when a required field isn't provided" # We can use the management command to create a superuser # We skip validation because the temporary substitution of the # swappable User model messes with validation. new_io = StringIO() with self.assertRaisesMessage(CommandError, 'You must use --email with --noinput.'): call_command( "createsuperuser", interactive=False, stdout=new_io, stderr=new_io, ) self.assertEqual(CustomUser._default_manager.count(), 0) @override_settings( AUTH_USER_MODEL='auth_tests.CustomUserNonUniqueUsername', AUTHENTICATION_BACKENDS=['my.custom.backend'], ) def test_swappable_user_username_non_unique(self): @mock_inputs({ 'username': 'joe', 'password': 'nopasswd', }) def createsuperuser(): new_io = StringIO() call_command( "createsuperuser", interactive=True, email="[email protected]", stdout=new_io, stdin=MockTTY(), ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') for i in range(2): createsuperuser() users = CustomUserNonUniqueUsername.objects.filter(username="joe") self.assertEqual(users.count(), 2) def test_skip_if_not_in_TTY(self): """ If the command is not called from a TTY, it should be skipped and a message should be displayed (#7423). """ class FakeStdin: """A fake stdin object that has isatty() return False.""" def isatty(self): return False out = StringIO() call_command( "createsuperuser", stdin=FakeStdin(), stdout=out, interactive=True, ) self.assertEqual(User._default_manager.count(), 0) self.assertIn("Superuser creation skipped", out.getvalue()) def test_passing_stdin(self): """ You can pass a stdin object as an option and it should be available on self.stdin. If no such option is passed, it defaults to sys.stdin. """ sentinel = object() command = createsuperuser.Command() call_command( command, stdin=sentinel, interactive=False, verbosity=0, username='janet', email='[email protected]', ) self.assertIs(command.stdin, sentinel) command = createsuperuser.Command() call_command( command, interactive=False, verbosity=0, username='joe', email='[email protected]', ) self.assertIs(command.stdin, sys.stdin) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithFK') def test_fields_with_fk(self): new_io = StringIO() group = Group.objects.create(name='mygroup') email = Email.objects.create(email='[email protected]') call_command( 'createsuperuser', interactive=False, username=email.pk, email=email.email, group=group.pk, stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUserWithFK._default_manager.get(email=email) self.assertEqual(u.username, email) self.assertEqual(u.group, group) non_existent_email = '[email protected]' msg = 'email instance with email %r does not exist.' % non_existent_email with self.assertRaisesMessage(CommandError, msg): call_command( 'createsuperuser', interactive=False, username=email.pk, email=non_existent_email, stdout=new_io, ) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithFK') def test_fields_with_fk_interactive(self): new_io = StringIO() group = Group.objects.create(name='mygroup') email = Email.objects.create(email='[email protected]') @mock_inputs({ 'password': 'nopasswd', 'Username (Email.id): ': email.pk, 'Email (Email.email): ': email.email, 'Group (Group.id): ': group.pk, }) def test(self): call_command( 'createsuperuser', interactive=True, stdout=new_io, stdin=MockTTY(), ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUserWithFK._default_manager.get(email=email) self.assertEqual(u.username, email) self.assertEqual(u.group, group) test(self) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithFK') def test_fields_with_fk_via_option_interactive(self): new_io = StringIO() group = Group.objects.create(name='mygroup') email = Email.objects.create(email='[email protected]') @mock_inputs({'password': 'nopasswd'}) def test(self): call_command( 'createsuperuser', interactive=True, username=email.pk, email=email.email, group=group.pk, stdout=new_io, stdin=MockTTY(), ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUserWithFK._default_manager.get(email=email) self.assertEqual(u.username, email) self.assertEqual(u.group, group) test(self) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithFK') def test_validate_fk(self): email = Email.objects.create(email='[email protected]') Group.objects.all().delete() nonexistent_group_id = 1 msg = f'group instance with id {nonexistent_group_id} does not exist.' with self.assertRaisesMessage(CommandError, msg): call_command( 'createsuperuser', interactive=False, username=email.pk, email=email.email, group=nonexistent_group_id, verbosity=0, ) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithFK') def test_validate_fk_environment_variable(self): email = Email.objects.create(email='[email protected]') Group.objects.all().delete() nonexistent_group_id = 1 msg = f'group instance with id {nonexistent_group_id} does not exist.' with mock.patch.dict( os.environ, {'DJANGO_SUPERUSER_GROUP': str(nonexistent_group_id)}, ): with self.assertRaisesMessage(CommandError, msg): call_command( 'createsuperuser', interactive=False, username=email.pk, email=email.email, verbosity=0, ) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithFK') def test_validate_fk_via_option_interactive(self): email = Email.objects.create(email='[email protected]') Group.objects.all().delete() nonexistent_group_id = 1 msg = f'group instance with id {nonexistent_group_id} does not exist.' @mock_inputs({ 'password': 'nopasswd', 'Username (Email.id): ': email.pk, 'Email (Email.email): ': email.email, }) def test(self): with self.assertRaisesMessage(CommandError, msg): call_command( 'createsuperuser', group=nonexistent_group_id, stdin=MockTTY(), verbosity=0, ) test(self) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithM2m') def test_fields_with_m2m(self): new_io = StringIO() org_id_1 = Organization.objects.create(name='Organization 1').pk org_id_2 = Organization.objects.create(name='Organization 2').pk call_command( 'createsuperuser', interactive=False, username='joe', orgs=[org_id_1, org_id_2], stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') user = CustomUserWithM2M._default_manager.get(username='joe') self.assertEqual(user.orgs.count(), 2) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithM2M') def test_fields_with_m2m_interactive(self): new_io = StringIO() org_id_1 = Organization.objects.create(name='Organization 1').pk org_id_2 = Organization.objects.create(name='Organization 2').pk @mock_inputs({ 'password': 'nopasswd', 'Username: ': 'joe', 'Orgs (Organization.id): ': '%s, %s' % (org_id_1, org_id_2), }) def test(self): call_command( 'createsuperuser', interactive=True, stdout=new_io, stdin=MockTTY(), ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') user = CustomUserWithM2M._default_manager.get(username='joe') self.assertEqual(user.orgs.count(), 2) test(self) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithM2M') def test_fields_with_m2m_interactive_blank(self): new_io = StringIO() org_id = Organization.objects.create(name='Organization').pk entered_orgs = [str(org_id), ' '] def return_orgs(): return entered_orgs.pop() @mock_inputs({ 'password': 'nopasswd', 'Username: ': 'joe', 'Orgs (Organization.id): ': return_orgs, }) def test(self): call_command( 'createsuperuser', interactive=True, stdout=new_io, stderr=new_io, stdin=MockTTY(), ) self.assertEqual( new_io.getvalue().strip(), 'Error: This field cannot be blank.\n' 'Superuser created successfully.', ) test(self) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithM2MThrough') def test_fields_with_m2m_and_through(self): msg = ( "Required field 'orgs' specifies a many-to-many relation through " "model, which is not supported." ) with self.assertRaisesMessage(CommandError, msg): call_command('createsuperuser') def test_default_username(self): """createsuperuser uses a default username when one isn't provided.""" # Get the default username before creating a user. default_username = get_default_username() new_io = StringIO() entered_passwords = ['password', 'password'] def return_passwords(): return entered_passwords.pop(0) @mock_inputs({'password': return_passwords, 'username': '', 'email': ''}) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual(new_io.getvalue().strip(), 'Superuser created successfully.') self.assertTrue(User.objects.filter(username=default_username).exists()) test(self) def test_password_validation(self): """ Creation should fail if the password fails validation. """ new_io = StringIO() entered_passwords = ['1234567890', '1234567890', 'password', 'password'] def bad_then_good_password(): return entered_passwords.pop(0) @mock_inputs({ 'password': bad_then_good_password, 'username': 'joe1234567890', 'email': '', 'bypass': 'n', }) def test(self): call_command( "createsuperuser", interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "This password is entirely numeric.\n" "Superuser created successfully." ) test(self) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, ]) def test_validate_password_against_username(self): new_io = StringIO() username = 'supremelycomplex' entered_passwords = [username, username, 'superduperunguessablepassword', 'superduperunguessablepassword'] def bad_then_good_password(): return entered_passwords.pop(0) @mock_inputs({ 'password': bad_then_good_password, 'username': username, 'email': '', 'bypass': 'n', }) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), 'The password is too similar to the username.\n' 'Superuser created successfully.' ) test(self) @override_settings( AUTH_USER_MODEL='auth_tests.CustomUser', AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, ] ) def test_validate_password_against_required_fields(self): new_io = StringIO() first_name = 'josephine' entered_passwords = [first_name, first_name, 'superduperunguessablepassword', 'superduperunguessablepassword'] def bad_then_good_password(): return entered_passwords.pop(0) @mock_inputs({ 'password': bad_then_good_password, 'username': 'whatever', 'first_name': first_name, 'date_of_birth': '1970-01-01', 'email': '[email protected]', 'bypass': 'n', }) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "The password is too similar to the first name.\n" "Superuser created successfully." ) test(self) @override_settings( AUTH_USER_MODEL='auth_tests.CustomUser', AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, ] ) def test_validate_password_against_required_fields_via_option(self): new_io = StringIO() first_name = 'josephine' entered_passwords = [ first_name, first_name, 'superduperunguessablepassword', 'superduperunguessablepassword', ] def bad_then_good_password(): return entered_passwords.pop(0) @mock_inputs({ 'password': bad_then_good_password, 'bypass': 'n', }) def test(self): call_command( 'createsuperuser', interactive=True, first_name=first_name, date_of_birth='1970-01-01', email='[email protected]', stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), 'The password is too similar to the first name.\n' 'Superuser created successfully.' ) test(self) def test_blank_username(self): """Creation fails if --username is blank.""" new_io = StringIO() with self.assertRaisesMessage(CommandError, 'Username cannot be blank.'): call_command( 'createsuperuser', username='', stdin=MockTTY(), stdout=new_io, stderr=new_io, ) def test_blank_username_non_interactive(self): new_io = StringIO() with self.assertRaisesMessage(CommandError, 'Username cannot be blank.'): call_command( 'createsuperuser', username='', interactive=False, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) def test_password_validation_bypass(self): """ Password validation can be bypassed by entering 'y' at the prompt. """ new_io = StringIO() @mock_inputs({ 'password': '1234567890', 'username': 'joe1234567890', 'email': '', 'bypass': 'y', }) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), 'This password is entirely numeric.\n' 'Superuser created successfully.' ) test(self) def test_invalid_username(self): """Creation fails if the username fails validation.""" user_field = User._meta.get_field(User.USERNAME_FIELD) new_io = StringIO() entered_passwords = ['password', 'password'] # Enter an invalid (too long) username first and then a valid one. invalid_username = ('x' * user_field.max_length) + 'y' entered_usernames = [invalid_username, 'janet'] def return_passwords(): return entered_passwords.pop(0) def return_usernames(): return entered_usernames.pop(0) @mock_inputs({'password': return_passwords, 'username': return_usernames, 'email': ''}) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), 'Error: Ensure this value has at most %s characters (it has %s).\n' 'Superuser created successfully.' % (user_field.max_length, len(invalid_username)) ) test(self) @mock_inputs({'username': 'KeyboardInterrupt'}) def test_keyboard_interrupt(self): new_io = StringIO() with self.assertRaises(SystemExit): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual(new_io.getvalue(), '\nOperation cancelled.\n') def test_existing_username(self): """Creation fails if the username already exists.""" user = User.objects.create(username='janet') new_io = StringIO() entered_passwords = ['password', 'password'] # Enter the existing username first and then a new one. entered_usernames = [user.username, 'joe'] def return_passwords(): return entered_passwords.pop(0) def return_usernames(): return entered_usernames.pop(0) @mock_inputs({'password': return_passwords, 'username': return_usernames, 'email': ''}) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), 'Error: That username is already taken.\n' 'Superuser created successfully.' ) test(self) def test_existing_username_non_interactive(self): """Creation fails if the username already exists.""" User.objects.create(username='janet') new_io = StringIO() with self.assertRaisesMessage(CommandError, "Error: That username is already taken."): call_command( 'createsuperuser', username='janet', email='', interactive=False, stdout=new_io, ) def test_existing_username_provided_via_option_and_interactive(self): """call_command() gets username='janet' and interactive=True.""" new_io = StringIO() entered_passwords = ['password', 'password'] User.objects.create(username='janet') def return_passwords(): return entered_passwords.pop(0) @mock_inputs({ 'password': return_passwords, 'username': 'janet1', 'email': '[email protected]' }) def test(self): call_command( 'createsuperuser', username='janet', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) msg = 'Error: That username is already taken.\nSuperuser created successfully.' self.assertEqual(new_io.getvalue().strip(), msg) test(self) def test_validation_mismatched_passwords(self): """ Creation should fail if the user enters mismatched passwords. """ new_io = StringIO() # The first two passwords do not match, but the second two do match and # are valid. entered_passwords = ["password", "not password", "password2", "password2"] def mismatched_passwords_then_matched(): return entered_passwords.pop(0) @mock_inputs({ 'password': mismatched_passwords_then_matched, 'username': 'joe1234567890', 'email': '', }) def test(self): call_command( "createsuperuser", interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "Error: Your passwords didn't match.\n" "Superuser created successfully." ) test(self) def test_validation_blank_password_entered(self): """ Creation should fail if the user enters blank passwords. """ new_io = StringIO() # The first two passwords are empty strings, but the second two are # valid. entered_passwords = ["", "", "password2", "password2"] def blank_passwords_then_valid(): return entered_passwords.pop(0) @mock_inputs({ 'password': blank_passwords_then_valid, 'username': 'joe1234567890', 'email': '', }) def test(self): call_command( "createsuperuser", interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "Error: Blank passwords aren't allowed.\n" "Superuser created successfully." ) test(self) @override_settings(AUTH_USER_MODEL='auth_tests.NoPasswordUser') def test_usermodel_without_password(self): new_io = StringIO() call_command( 'createsuperuser', interactive=False, stdin=MockTTY(), stdout=new_io, stderr=new_io, username='username', ) self.assertEqual(new_io.getvalue().strip(), 'Superuser created successfully.') @override_settings(AUTH_USER_MODEL='auth_tests.NoPasswordUser') def test_usermodel_without_password_interactive(self): new_io = StringIO() @mock_inputs({'username': 'username'}) def test(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual(new_io.getvalue().strip(), 'Superuser created successfully.') test(self) @mock.patch.dict(os.environ, { 'DJANGO_SUPERUSER_PASSWORD': 'test_password', 'DJANGO_SUPERUSER_USERNAME': 'test_superuser', 'DJANGO_SUPERUSER_EMAIL': '[email protected]', 'DJANGO_SUPERUSER_FIRST_NAME': 'ignored_first_name', }) def test_environment_variable_non_interactive(self): call_command('createsuperuser', interactive=False, verbosity=0) user = User.objects.get(username='test_superuser') self.assertEqual(user.email, '[email protected]') self.assertTrue(user.check_password('test_password')) # Environment variables are ignored for non-required fields. self.assertEqual(user.first_name, '') @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithM2m') def test_environment_variable_m2m_non_interactive(self): new_io = StringIO() org_id_1 = Organization.objects.create(name='Organization 1').pk org_id_2 = Organization.objects.create(name='Organization 2').pk with mock.patch.dict(os.environ, { 'DJANGO_SUPERUSER_ORGS': f'{org_id_1},{org_id_2}', }): call_command( 'createsuperuser', interactive=False, username='joe', stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') user = CustomUserWithM2M._default_manager.get(username='joe') self.assertEqual(user.orgs.count(), 2) @mock.patch.dict(os.environ, { 'DJANGO_SUPERUSER_USERNAME': 'test_superuser', 'DJANGO_SUPERUSER_EMAIL': '[email protected]', }) def test_ignore_environment_variable_non_interactive(self): # Environment variables are ignored in non-interactive mode, if # provided by a command line arguments. call_command( 'createsuperuser', interactive=False, username='cmd_superuser', email='[email protected]', verbosity=0, ) user = User.objects.get(username='cmd_superuser') self.assertEqual(user.email, '[email protected]') self.assertFalse(user.has_usable_password()) @mock.patch.dict(os.environ, { 'DJANGO_SUPERUSER_PASSWORD': 'test_password', 'DJANGO_SUPERUSER_USERNAME': 'test_superuser', 'DJANGO_SUPERUSER_EMAIL': '[email protected]', }) def test_ignore_environment_variable_interactive(self): # Environment variables are ignored in interactive mode. @mock_inputs({'password': 'cmd_password'}) def test(self): call_command( 'createsuperuser', interactive=True, username='cmd_superuser', email='[email protected]', stdin=MockTTY(), verbosity=0, ) user = User.objects.get(username='cmd_superuser') self.assertEqual(user.email, '[email protected]') self.assertTrue(user.check_password('cmd_password')) test(self) class MultiDBCreatesuperuserTestCase(TestCase): databases = {'default', 'other'} def test_createsuperuser_command_with_database_option(self): """ createsuperuser --database should operate on the specified DB. """ new_io = StringIO() call_command( 'createsuperuser', interactive=False, username='joe', email='[email protected]', database='other', stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') user = User.objects.using('other').get(username='joe') self.assertEqual(user.email, '[email protected]') def test_createsuperuser_command_suggested_username_with_database_option(self): default_username = get_default_username(database='other') qs = User.objects.using('other') @mock_inputs({'password': 'nopasswd', 'username': '', 'email': ''}) def test_other_create_with_suggested_username(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), verbosity=0, database='other', ) self.assertIs(qs.filter(username=default_username).exists(), True) test_other_create_with_suggested_username(self) @mock_inputs({'password': 'nopasswd', 'Username: ': 'other', 'email': ''}) def test_other_no_suggestion(self): call_command( 'createsuperuser', interactive=True, stdin=MockTTY(), verbosity=0, database='other', ) self.assertIs(qs.filter(username='other').exists(), True) test_other_no_suggestion(self) class CreatePermissionsTests(TestCase): def setUp(self): self._original_permissions = Permission._meta.permissions[:] self._original_default_permissions = Permission._meta.default_permissions self.app_config = apps.get_app_config('auth') def tearDown(self): Permission._meta.permissions = self._original_permissions Permission._meta.default_permissions = self._original_default_permissions ContentType.objects.clear_cache() def test_default_permissions(self): permission_content_type = ContentType.objects.get_by_natural_key('auth', 'permission') Permission._meta.permissions = [ ('my_custom_permission', 'Some permission'), ] create_permissions(self.app_config, verbosity=0) # view/add/change/delete permission by default + custom permission self.assertEqual(Permission.objects.filter( content_type=permission_content_type, ).count(), 5) Permission.objects.filter(content_type=permission_content_type).delete() Permission._meta.default_permissions = [] create_permissions(self.app_config, verbosity=0) # custom permission only since default permissions is empty self.assertEqual(Permission.objects.filter( content_type=permission_content_type, ).count(), 1) def test_unavailable_models(self): """ #24075 - Permissions shouldn't be created or deleted if the ContentType or Permission models aren't available. """ state = migrations.state.ProjectState() # Unavailable contenttypes.ContentType with self.assertNumQueries(0): create_permissions(self.app_config, verbosity=0, apps=state.apps) # Unavailable auth.Permission state = migrations.state.ProjectState(real_apps={'contenttypes'}) with self.assertNumQueries(0): create_permissions(self.app_config, verbosity=0, apps=state.apps) def test_create_permissions_checks_contenttypes_created(self): """ `post_migrate` handler ordering isn't guaranteed. Simulate a case where create_permissions() is called before create_contenttypes(). """ # Warm the manager cache. ContentType.objects.get_for_model(Group) # Apply a deletion as if e.g. a database 'flush' had been executed. ContentType.objects.filter(app_label='auth', model='group').delete() # This fails with a foreign key constraint without the fix. create_permissions(apps.get_app_config('auth'), interactive=False, verbosity=0) def test_permission_with_proxy_content_type_created(self): """ A proxy model's permissions use its own content type rather than the content type of the concrete model. """ opts = UserProxy._meta codename = get_permission_codename('add', opts) self.assertTrue( Permission.objects.filter( content_type__model=opts.model_name, content_type__app_label=opts.app_label, codename=codename, ).exists() )
354195f2cff203a55b3da26a5f74a501d7531a51f746394e96fdf3669c651089
import sys from datetime import date from unittest import mock from django.contrib.auth import ( BACKEND_SESSION_KEY, SESSION_KEY, _clean_credentials, authenticate, get_user, signals, ) from django.contrib.auth.backends import BaseBackend, ModelBackend from django.contrib.auth.hashers import MD5PasswordHasher from django.contrib.auth.models import AnonymousUser, Group, Permission, User from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ImproperlyConfigured, PermissionDenied from django.http import HttpRequest from django.test import ( RequestFactory, SimpleTestCase, TestCase, modify_settings, override_settings, ) from django.views.debug import technical_500_response from django.views.decorators.debug import sensitive_variables from .models import ( CustomPermissionsUser, CustomUser, CustomUserWithoutIsActiveField, ExtensionUser, UUIDUser, ) class SimpleBackend(BaseBackend): def get_user_permissions(self, user_obj, obj=None): return ['user_perm'] def get_group_permissions(self, user_obj, obj=None): return ['group_perm'] @override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleBackend']) class BaseBackendTest(TestCase): @classmethod def setUpTestData(cls): cls.user = User.objects.create_user('test', '[email protected]', 'test') def test_get_user_permissions(self): self.assertEqual(self.user.get_user_permissions(), {'user_perm'}) def test_get_group_permissions(self): self.assertEqual(self.user.get_group_permissions(), {'group_perm'}) def test_get_all_permissions(self): self.assertEqual(self.user.get_all_permissions(), {'user_perm', 'group_perm'}) def test_has_perm(self): self.assertIs(self.user.has_perm('user_perm'), True) self.assertIs(self.user.has_perm('group_perm'), True) self.assertIs(self.user.has_perm('other_perm', TestObj()), False) def test_has_perms_perm_list_invalid(self): msg = 'perm_list must be an iterable of permissions.' with self.assertRaisesMessage(ValueError, msg): self.user.has_perms('user_perm') with self.assertRaisesMessage(ValueError, msg): self.user.has_perms(object()) class CountingMD5PasswordHasher(MD5PasswordHasher): """Hasher that counts how many times it computes a hash.""" calls = 0 def encode(self, *args, **kwargs): type(self).calls += 1 return super().encode(*args, **kwargs) class BaseModelBackendTest: """ A base class for tests that need to validate the ModelBackend with different User models. Subclasses should define a class level UserModel attribute, and a create_users() method to construct two users for test purposes. """ backend = 'django.contrib.auth.backends.ModelBackend' def setUp(self): self.patched_settings = modify_settings( AUTHENTICATION_BACKENDS={'append': self.backend}, ) self.patched_settings.enable() self.create_users() def tearDown(self): self.patched_settings.disable() # The custom_perms test messes with ContentTypes, which will # be cached; flush the cache to ensure there are no side effects # Refs #14975, #14925 ContentType.objects.clear_cache() def test_has_perm(self): user = self.UserModel._default_manager.get(pk=self.user.pk) self.assertIs(user.has_perm('auth.test'), False) user.is_staff = True user.save() self.assertIs(user.has_perm('auth.test'), False) user.is_superuser = True user.save() self.assertIs(user.has_perm('auth.test'), True) user.is_staff = True user.is_superuser = True user.is_active = False user.save() self.assertIs(user.has_perm('auth.test'), False) def test_custom_perms(self): user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) # reloading user to purge the _perm_cache user = self.UserModel._default_manager.get(pk=self.user.pk) self.assertEqual(user.get_all_permissions(), {'auth.test'}) self.assertEqual(user.get_user_permissions(), {'auth.test'}) self.assertEqual(user.get_group_permissions(), set()) self.assertIs(user.has_module_perms('Group'), False) self.assertIs(user.has_module_perms('auth'), True) perm = Permission.objects.create(name='test2', content_type=content_type, codename='test2') user.user_permissions.add(perm) perm = Permission.objects.create(name='test3', content_type=content_type, codename='test3') user.user_permissions.add(perm) user = self.UserModel._default_manager.get(pk=self.user.pk) expected_user_perms = {'auth.test2', 'auth.test', 'auth.test3'} self.assertEqual(user.get_all_permissions(), expected_user_perms) self.assertIs(user.has_perm('test'), False) self.assertIs(user.has_perm('auth.test'), True) self.assertIs(user.has_perms(['auth.test2', 'auth.test3']), True) perm = Permission.objects.create(name='test_group', content_type=content_type, codename='test_group') group = Group.objects.create(name='test_group') group.permissions.add(perm) user.groups.add(group) user = self.UserModel._default_manager.get(pk=self.user.pk) self.assertEqual(user.get_all_permissions(), {*expected_user_perms, 'auth.test_group'}) self.assertEqual(user.get_user_permissions(), expected_user_perms) self.assertEqual(user.get_group_permissions(), {'auth.test_group'}) self.assertIs(user.has_perms(['auth.test3', 'auth.test_group']), True) user = AnonymousUser() self.assertIs(user.has_perm('test'), False) self.assertIs(user.has_perms(['auth.test2', 'auth.test3']), False) def test_has_no_object_perm(self): """Regressiontest for #12462""" user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') user.user_permissions.add(perm) self.assertIs(user.has_perm('auth.test', 'object'), False) self.assertEqual(user.get_all_permissions('object'), set()) self.assertIs(user.has_perm('auth.test'), True) self.assertEqual(user.get_all_permissions(), {'auth.test'}) def test_anonymous_has_no_permissions(self): """ #17903 -- Anonymous users shouldn't have permissions in ModelBackend.get_(all|user|group)_permissions(). """ backend = ModelBackend() user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user') group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group') user.user_permissions.add(user_perm) group = Group.objects.create(name='test_group') user.groups.add(group) group.permissions.add(group_perm) self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'}) self.assertEqual(backend.get_user_permissions(user), {'auth.test_user'}) self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'}) with mock.patch.object(self.UserModel, 'is_anonymous', True): self.assertEqual(backend.get_all_permissions(user), set()) self.assertEqual(backend.get_user_permissions(user), set()) self.assertEqual(backend.get_group_permissions(user), set()) def test_inactive_has_no_permissions(self): """ #17903 -- Inactive users shouldn't have permissions in ModelBackend.get_(all|user|group)_permissions(). """ backend = ModelBackend() user = self.UserModel._default_manager.get(pk=self.user.pk) content_type = ContentType.objects.get_for_model(Group) user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user') group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group') user.user_permissions.add(user_perm) group = Group.objects.create(name='test_group') user.groups.add(group) group.permissions.add(group_perm) self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'}) self.assertEqual(backend.get_user_permissions(user), {'auth.test_user'}) self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'}) user.is_active = False user.save() self.assertEqual(backend.get_all_permissions(user), set()) self.assertEqual(backend.get_user_permissions(user), set()) self.assertEqual(backend.get_group_permissions(user), set()) def test_get_all_superuser_permissions(self): """A superuser has all permissions. Refs #14795.""" user = self.UserModel._default_manager.get(pk=self.superuser.pk) self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all())) @override_settings(PASSWORD_HASHERS=['auth_tests.test_auth_backends.CountingMD5PasswordHasher']) def test_authentication_timing(self): """Hasher is run once regardless of whether the user exists. Refs #20760.""" # Re-set the password, because this tests overrides PASSWORD_HASHERS self.user.set_password('test') self.user.save() CountingMD5PasswordHasher.calls = 0 username = getattr(self.user, self.UserModel.USERNAME_FIELD) authenticate(username=username, password='test') self.assertEqual(CountingMD5PasswordHasher.calls, 1) CountingMD5PasswordHasher.calls = 0 authenticate(username='no_such_user', password='test') self.assertEqual(CountingMD5PasswordHasher.calls, 1) @override_settings(PASSWORD_HASHERS=['auth_tests.test_auth_backends.CountingMD5PasswordHasher']) def test_authentication_without_credentials(self): CountingMD5PasswordHasher.calls = 0 for credentials in ( {}, {'username': getattr(self.user, self.UserModel.USERNAME_FIELD)}, {'password': 'test'}, ): with self.subTest(credentials=credentials): with self.assertNumQueries(0): authenticate(**credentials) self.assertEqual(CountingMD5PasswordHasher.calls, 0) class ModelBackendTest(BaseModelBackendTest, TestCase): """ Tests for the ModelBackend using the default User model. """ UserModel = User user_credentials = {'username': 'test', 'password': 'test'} def create_users(self): self.user = User.objects.create_user(email='[email protected]', **self.user_credentials) self.superuser = User.objects.create_superuser( username='test2', email='[email protected]', password='test', ) def test_authenticate_inactive(self): """ An inactive user can't authenticate. """ self.assertEqual(authenticate(**self.user_credentials), self.user) self.user.is_active = False self.user.save() self.assertIsNone(authenticate(**self.user_credentials)) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithoutIsActiveField') def test_authenticate_user_without_is_active_field(self): """ A custom user without an `is_active` field is allowed to authenticate. """ user = CustomUserWithoutIsActiveField.objects._create_user( username='test', email='[email protected]', password='test', ) self.assertEqual(authenticate(username='test', password='test'), user) @override_settings(AUTH_USER_MODEL='auth_tests.ExtensionUser') class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase): """ Tests for the ModelBackend using the custom ExtensionUser model. This isn't a perfect test, because both the User and ExtensionUser are synchronized to the database, which wouldn't ordinary happen in production. As a result, it doesn't catch errors caused by the non- existence of the User table. The specific problem is queries on .filter(groups__user) et al, which makes an implicit assumption that the user model is called 'User'. In production, the auth.User table won't exist, so the requested join won't exist either; in testing, the auth.User *does* exist, and so does the join. However, the join table won't contain any useful data; for testing, we check that the data we expect actually does exist. """ UserModel = ExtensionUser def create_users(self): self.user = ExtensionUser._default_manager.create_user( username='test', email='[email protected]', password='test', date_of_birth=date(2006, 4, 25) ) self.superuser = ExtensionUser._default_manager.create_superuser( username='test2', email='[email protected]', password='test', date_of_birth=date(1976, 11, 8) ) @override_settings(AUTH_USER_MODEL='auth_tests.CustomPermissionsUser') class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase): """ Tests for the ModelBackend using the CustomPermissionsUser model. As with the ExtensionUser test, this isn't a perfect test, because both the User and CustomPermissionsUser are synchronized to the database, which wouldn't ordinary happen in production. """ UserModel = CustomPermissionsUser def create_users(self): self.user = CustomPermissionsUser._default_manager.create_user( email='[email protected]', password='test', date_of_birth=date(2006, 4, 25) ) self.superuser = CustomPermissionsUser._default_manager.create_superuser( email='[email protected]', password='test', date_of_birth=date(1976, 11, 8) ) @override_settings(AUTH_USER_MODEL='auth_tests.CustomUser') class CustomUserModelBackendAuthenticateTest(TestCase): """ The model backend can accept a credentials kwarg labeled with custom user model's USERNAME_FIELD. """ def test_authenticate(self): test_user = CustomUser._default_manager.create_user( email='[email protected]', password='test', date_of_birth=date(2006, 4, 25) ) authenticated_user = authenticate(email='[email protected]', password='test') self.assertEqual(test_user, authenticated_user) @override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser') class UUIDUserTests(TestCase): def test_login(self): """ A custom user with a UUID primary key should be able to login. """ user = UUIDUser.objects.create_user(username='uuid', password='test') self.assertTrue(self.client.login(username='uuid', password='test')) self.assertEqual(UUIDUser.objects.get(pk=self.client.session[SESSION_KEY]), user) class TestObj: pass class SimpleRowlevelBackend: def has_perm(self, user, perm, obj=None): if not obj: return # We only support row level perms if isinstance(obj, TestObj): if user.username == 'test2': return True elif user.is_anonymous and perm == 'anon': return True elif not user.is_active and perm == 'inactive': return True return False def has_module_perms(self, user, app_label): return (user.is_anonymous or user.is_active) and app_label == 'app1' def get_all_permissions(self, user, obj=None): if not obj: return [] # We only support row level perms if not isinstance(obj, TestObj): return ['none'] if user.is_anonymous: return ['anon'] if user.username == 'test2': return ['simple', 'advanced'] else: return ['simple'] def get_group_permissions(self, user, obj=None): if not obj: return # We only support row level perms if not isinstance(obj, TestObj): return ['none'] if 'test_group' in [group.name for group in user.groups.all()]: return ['group_perm'] else: return ['none'] @modify_settings(AUTHENTICATION_BACKENDS={ 'append': 'auth_tests.test_auth_backends.SimpleRowlevelBackend', }) class RowlevelBackendTest(TestCase): """ Tests for auth backend that supports object level permissions """ @classmethod def setUpTestData(cls): cls.user1 = User.objects.create_user('test', '[email protected]', 'test') cls.user2 = User.objects.create_user('test2', '[email protected]', 'test') cls.user3 = User.objects.create_user('test3', '[email protected]', 'test') def tearDown(self): # The get_group_permissions test messes with ContentTypes, which will # be cached; flush the cache to ensure there are no side effects # Refs #14975, #14925 ContentType.objects.clear_cache() def test_has_perm(self): self.assertIs(self.user1.has_perm('perm', TestObj()), False) self.assertIs(self.user2.has_perm('perm', TestObj()), True) self.assertIs(self.user2.has_perm('perm'), False) self.assertIs(self.user2.has_perms(['simple', 'advanced'], TestObj()), True) self.assertIs(self.user3.has_perm('perm', TestObj()), False) self.assertIs(self.user3.has_perm('anon', TestObj()), False) self.assertIs(self.user3.has_perms(['simple', 'advanced'], TestObj()), False) def test_get_all_permissions(self): self.assertEqual(self.user1.get_all_permissions(TestObj()), {'simple'}) self.assertEqual(self.user2.get_all_permissions(TestObj()), {'simple', 'advanced'}) self.assertEqual(self.user2.get_all_permissions(), set()) def test_get_group_permissions(self): group = Group.objects.create(name='test_group') self.user3.groups.add(group) self.assertEqual(self.user3.get_group_permissions(TestObj()), {'group_perm'}) @override_settings( AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'], ) class AnonymousUserBackendTest(SimpleTestCase): """ Tests for AnonymousUser delegating to backend. """ def setUp(self): self.user1 = AnonymousUser() def test_has_perm(self): self.assertIs(self.user1.has_perm('perm', TestObj()), False) self.assertIs(self.user1.has_perm('anon', TestObj()), True) def test_has_perms(self): self.assertIs(self.user1.has_perms(['anon'], TestObj()), True) self.assertIs(self.user1.has_perms(['anon', 'perm'], TestObj()), False) def test_has_perms_perm_list_invalid(self): msg = 'perm_list must be an iterable of permissions.' with self.assertRaisesMessage(ValueError, msg): self.user1.has_perms('perm') with self.assertRaisesMessage(ValueError, msg): self.user1.has_perms(object()) def test_has_module_perms(self): self.assertIs(self.user1.has_module_perms("app1"), True) self.assertIs(self.user1.has_module_perms("app2"), False) def test_get_all_permissions(self): self.assertEqual(self.user1.get_all_permissions(TestObj()), {'anon'}) @override_settings(AUTHENTICATION_BACKENDS=[]) class NoBackendsTest(TestCase): """ An appropriate error is raised if no auth backends are provided. """ @classmethod def setUpTestData(cls): cls.user = User.objects.create_user('test', '[email protected]', 'test') def test_raises_exception(self): msg = ( 'No authentication backends have been defined. ' 'Does AUTHENTICATION_BACKENDS contain anything?' ) with self.assertRaisesMessage(ImproperlyConfigured, msg): self.user.has_perm(('perm', TestObj())) @override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend']) class InActiveUserBackendTest(TestCase): """ Tests for an inactive user """ @classmethod def setUpTestData(cls): cls.user1 = User.objects.create_user('test', '[email protected]', 'test') cls.user1.is_active = False cls.user1.save() def test_has_perm(self): self.assertIs(self.user1.has_perm('perm', TestObj()), False) self.assertIs(self.user1.has_perm('inactive', TestObj()), True) def test_has_module_perms(self): self.assertIs(self.user1.has_module_perms("app1"), False) self.assertIs(self.user1.has_module_perms("app2"), False) class PermissionDeniedBackend: """ Always raises PermissionDenied in `authenticate`, `has_perm` and `has_module_perms`. """ def authenticate(self, request, username=None, password=None): raise PermissionDenied def has_perm(self, user_obj, perm, obj=None): raise PermissionDenied def has_module_perms(self, user_obj, app_label): raise PermissionDenied class PermissionDeniedBackendTest(TestCase): """ Other backends are not checked once a backend raises PermissionDenied """ backend = 'auth_tests.test_auth_backends.PermissionDeniedBackend' @classmethod def setUpTestData(cls): cls.user1 = User.objects.create_user('test', '[email protected]', 'test') def setUp(self): self.user_login_failed = [] signals.user_login_failed.connect(self.user_login_failed_listener) def tearDown(self): signals.user_login_failed.disconnect(self.user_login_failed_listener) def user_login_failed_listener(self, sender, credentials, **kwargs): self.user_login_failed.append(credentials) @modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend}) def test_permission_denied(self): "user is not authenticated after a backend raises permission denied #2550" self.assertIsNone(authenticate(username='test', password='test')) # user_login_failed signal is sent. self.assertEqual(self.user_login_failed, [{'password': '********************', 'username': 'test'}]) @modify_settings(AUTHENTICATION_BACKENDS={'append': backend}) def test_authenticates(self): self.assertEqual(authenticate(username='test', password='test'), self.user1) @modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend}) def test_has_perm_denied(self): content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') self.user1.user_permissions.add(perm) self.assertIs(self.user1.has_perm('auth.test'), False) self.assertIs(self.user1.has_module_perms('auth'), False) @modify_settings(AUTHENTICATION_BACKENDS={'append': backend}) def test_has_perm(self): content_type = ContentType.objects.get_for_model(Group) perm = Permission.objects.create(name='test', content_type=content_type, codename='test') self.user1.user_permissions.add(perm) self.assertIs(self.user1.has_perm('auth.test'), True) self.assertIs(self.user1.has_module_perms('auth'), True) class NewModelBackend(ModelBackend): pass class ChangedBackendSettingsTest(TestCase): """ Tests for changes in the settings.AUTHENTICATION_BACKENDS """ backend = 'auth_tests.test_auth_backends.NewModelBackend' TEST_USERNAME = 'test_user' TEST_PASSWORD = 'test_password' TEST_EMAIL = '[email protected]' @classmethod def setUpTestData(cls): User.objects.create_user(cls.TEST_USERNAME, cls.TEST_EMAIL, cls.TEST_PASSWORD) @override_settings(AUTHENTICATION_BACKENDS=[backend]) def test_changed_backend_settings(self): """ Removing a backend configured in AUTHENTICATION_BACKENDS makes already logged-in users disconnect. """ # Get a session for the test user self.assertTrue(self.client.login( username=self.TEST_USERNAME, password=self.TEST_PASSWORD, )) # Prepare a request object request = HttpRequest() request.session = self.client.session # Remove NewModelBackend with self.settings(AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend']): # Get the user from the request user = get_user(request) # Assert that the user retrieval is successful and the user is # anonymous as the backend is not longer available. self.assertIsNotNone(user) self.assertTrue(user.is_anonymous) class TypeErrorBackend: """ Always raises TypeError. """ @sensitive_variables('password') def authenticate(self, request, username=None, password=None): raise TypeError class SkippedBackend: def authenticate(self): # Doesn't accept any credentials so is skipped by authenticate(). pass class SkippedBackendWithDecoratedMethod: @sensitive_variables() def authenticate(self): pass class AuthenticateTests(TestCase): @classmethod def setUpTestData(cls): cls.user1 = User.objects.create_user('test', '[email protected]', 'test') def setUp(self): self.sensitive_password = 'mypassword' @override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.TypeErrorBackend']) def test_type_error_raised(self): """A TypeError within a backend is propagated properly (#18171).""" with self.assertRaises(TypeError): authenticate(username='test', password='test') @override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.TypeErrorBackend']) def test_authenticate_sensitive_variables(self): try: authenticate(username='testusername', password=self.sensitive_password) except TypeError: exc_info = sys.exc_info() rf = RequestFactory() response = technical_500_response(rf.get('/'), *exc_info) self.assertNotContains(response, self.sensitive_password, status_code=500) self.assertContains(response, 'TypeErrorBackend', status_code=500) self.assertContains( response, '<tr><td>credentials</td><td class="code">' '<pre>&#39;********************&#39;</pre></td></tr>', html=True, status_code=500, ) def test_clean_credentials_sensitive_variables(self): try: # Passing in a list to cause an exception _clean_credentials([1, self.sensitive_password]) except TypeError: exc_info = sys.exc_info() rf = RequestFactory() response = technical_500_response(rf.get('/'), *exc_info) self.assertNotContains(response, self.sensitive_password, status_code=500) self.assertContains( response, '<tr><td>credentials</td><td class="code">' '<pre>&#39;********************&#39;</pre></td></tr>', html=True, status_code=500, ) @override_settings(AUTHENTICATION_BACKENDS=( 'auth_tests.test_auth_backends.SkippedBackend', 'django.contrib.auth.backends.ModelBackend', )) def test_skips_backends_without_arguments(self): """ A backend (SkippedBackend) is ignored if it doesn't accept the credentials as arguments. """ self.assertEqual(authenticate(username='test', password='test'), self.user1) @override_settings(AUTHENTICATION_BACKENDS=( 'auth_tests.test_auth_backends.SkippedBackendWithDecoratedMethod', 'django.contrib.auth.backends.ModelBackend', )) def test_skips_backends_with_decorated_method(self): self.assertEqual(authenticate(username='test', password='test'), self.user1) class ImproperlyConfiguredUserModelTest(TestCase): """ An exception from within get_user_model() is propagated and doesn't raise an UnboundLocalError (#21439). """ @classmethod def setUpTestData(cls): cls.user1 = User.objects.create_user('test', '[email protected]', 'test') def setUp(self): self.client.login(username='test', password='test') @override_settings(AUTH_USER_MODEL='thismodel.doesntexist') def test_does_not_shadow_exception(self): # Prepare a request object request = HttpRequest() request.session = self.client.session msg = ( "AUTH_USER_MODEL refers to model 'thismodel.doesntexist' " "that has not been installed" ) with self.assertRaisesMessage(ImproperlyConfigured, msg): get_user(request) class ImportedModelBackend(ModelBackend): pass class CustomModelBackend(ModelBackend): pass class OtherModelBackend(ModelBackend): pass class ImportedBackendTests(TestCase): """ #23925 - The backend path added to the session should be the same as the one defined in AUTHENTICATION_BACKENDS setting. """ backend = 'auth_tests.backend_alias.ImportedModelBackend' @override_settings(AUTHENTICATION_BACKENDS=[backend]) def test_backend_path(self): username = 'username' password = 'password' User.objects.create_user(username, 'email', password) self.assertTrue(self.client.login(username=username, password=password)) request = HttpRequest() request.session = self.client.session self.assertEqual(request.session[BACKEND_SESSION_KEY], self.backend) class SelectingBackendTests(TestCase): backend = 'auth_tests.test_auth_backends.CustomModelBackend' other_backend = 'auth_tests.test_auth_backends.OtherModelBackend' username = 'username' password = 'password' def assertBackendInSession(self, backend): request = HttpRequest() request.session = self.client.session self.assertEqual(request.session[BACKEND_SESSION_KEY], backend) @override_settings(AUTHENTICATION_BACKENDS=[backend]) def test_backend_path_login_without_authenticate_single_backend(self): user = User.objects.create_user(self.username, 'email', self.password) self.client._login(user) self.assertBackendInSession(self.backend) @override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend]) def test_backend_path_login_without_authenticate_multiple_backends(self): user = User.objects.create_user(self.username, 'email', self.password) expected_message = ( 'You have multiple authentication backends configured and ' 'therefore must provide the `backend` argument or set the ' '`backend` attribute on the user.' ) with self.assertRaisesMessage(ValueError, expected_message): self.client._login(user) def test_non_string_backend(self): user = User.objects.create_user(self.username, 'email', self.password) expected_message = ( 'backend must be a dotted import path string (got ' '<class \'django.contrib.auth.backends.ModelBackend\'>).' ) with self.assertRaisesMessage(TypeError, expected_message): self.client._login(user, backend=ModelBackend) @override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend]) def test_backend_path_login_with_explicit_backends(self): user = User.objects.create_user(self.username, 'email', self.password) self.client._login(user, self.other_backend) self.assertBackendInSession(self.other_backend) @override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']) class AllowAllUsersModelBackendTest(TestCase): """ Inactive users may authenticate with the AllowAllUsersModelBackend. """ user_credentials = {'username': 'test', 'password': 'test'} @classmethod def setUpTestData(cls): cls.user = User.objects.create_user( email='[email protected]', is_active=False, **cls.user_credentials ) def test_authenticate(self): self.assertFalse(self.user.is_active) self.assertEqual(authenticate(**self.user_credentials), self.user) def test_get_user(self): self.client.force_login(self.user) request = HttpRequest() request.session = self.client.session user = get_user(request) self.assertEqual(user, self.user)
638b8463cb1ef579b0129db1829de227be79c3d49b32aab0112f0bf068e27972
import os from django.contrib.auth import validators from django.contrib.auth.models import User from django.contrib.auth.password_validation import ( CommonPasswordValidator, MinimumLengthValidator, NumericPasswordValidator, UserAttributeSimilarityValidator, get_default_password_validators, get_password_validators, password_changed, password_validators_help_text_html, password_validators_help_texts, validate_password, ) from django.core.exceptions import ValidationError from django.db import models from django.test import SimpleTestCase, TestCase, override_settings from django.test.utils import isolate_apps from django.utils.html import conditional_escape @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'}, {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 12, }}, ]) class PasswordValidationTest(SimpleTestCase): def test_get_default_password_validators(self): validators = get_default_password_validators() self.assertEqual(len(validators), 2) self.assertEqual(validators[0].__class__.__name__, 'CommonPasswordValidator') self.assertEqual(validators[1].__class__.__name__, 'MinimumLengthValidator') self.assertEqual(validators[1].min_length, 12) def test_get_password_validators_custom(self): validator_config = [{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'}] validators = get_password_validators(validator_config) self.assertEqual(len(validators), 1) self.assertEqual(validators[0].__class__.__name__, 'CommonPasswordValidator') self.assertEqual(get_password_validators([]), []) def test_validate_password(self): self.assertIsNone(validate_password('sufficiently-long')) msg_too_short = 'This password is too short. It must contain at least 12 characters.' with self.assertRaises(ValidationError) as cm: validate_password('django4242') self.assertEqual(cm.exception.messages, [msg_too_short]) self.assertEqual(cm.exception.error_list[0].code, 'password_too_short') with self.assertRaises(ValidationError) as cm: validate_password('password') self.assertEqual(cm.exception.messages, ['This password is too common.', msg_too_short]) self.assertEqual(cm.exception.error_list[0].code, 'password_too_common') self.assertIsNone(validate_password('password', password_validators=[])) def test_password_changed(self): self.assertIsNone(password_changed('password')) def test_password_changed_with_custom_validator(self): class Validator: def password_changed(self, password, user): self.password = password self.user = user user = object() validator = Validator() password_changed('password', user=user, password_validators=(validator,)) self.assertIs(validator.user, user) self.assertEqual(validator.password, 'password') def test_password_validators_help_texts(self): help_texts = password_validators_help_texts() self.assertEqual(len(help_texts), 2) self.assertIn('12 characters', help_texts[1]) self.assertEqual(password_validators_help_texts(password_validators=[]), []) def test_password_validators_help_text_html(self): help_text = password_validators_help_text_html() self.assertEqual(help_text.count('<li>'), 2) self.assertIn('12 characters', help_text) def test_password_validators_help_text_html_escaping(self): class AmpersandValidator: def get_help_text(self): return 'Must contain &' help_text = password_validators_help_text_html([AmpersandValidator()]) self.assertEqual(help_text, '<ul><li>Must contain &amp;</li></ul>') # help_text is marked safe and therefore unchanged by conditional_escape(). self.assertEqual(help_text, conditional_escape(help_text)) @override_settings(AUTH_PASSWORD_VALIDATORS=[]) def test_empty_password_validator_help_text_html(self): self.assertEqual(password_validators_help_text_html(), '') class MinimumLengthValidatorTest(SimpleTestCase): def test_validate(self): expected_error = "This password is too short. It must contain at least %d characters." self.assertIsNone(MinimumLengthValidator().validate('12345678')) self.assertIsNone(MinimumLengthValidator(min_length=3).validate('123')) with self.assertRaises(ValidationError) as cm: MinimumLengthValidator().validate('1234567') self.assertEqual(cm.exception.messages, [expected_error % 8]) self.assertEqual(cm.exception.error_list[0].code, 'password_too_short') with self.assertRaises(ValidationError) as cm: MinimumLengthValidator(min_length=3).validate('12') self.assertEqual(cm.exception.messages, [expected_error % 3]) def test_help_text(self): self.assertEqual( MinimumLengthValidator().get_help_text(), "Your password must contain at least 8 characters." ) class UserAttributeSimilarityValidatorTest(TestCase): def test_validate(self): user = User.objects.create_user( username='testclient', password='password', email='[email protected]', first_name='Test', last_name='Client', ) expected_error = "The password is too similar to the %s." self.assertIsNone(UserAttributeSimilarityValidator().validate('testclient')) with self.assertRaises(ValidationError) as cm: UserAttributeSimilarityValidator().validate('testclient', user=user), self.assertEqual(cm.exception.messages, [expected_error % "username"]) self.assertEqual(cm.exception.error_list[0].code, 'password_too_similar') with self.assertRaises(ValidationError) as cm: UserAttributeSimilarityValidator().validate('example.com', user=user), self.assertEqual(cm.exception.messages, [expected_error % "email address"]) with self.assertRaises(ValidationError) as cm: UserAttributeSimilarityValidator( user_attributes=['first_name'], max_similarity=0.3, ).validate('testclient', user=user) self.assertEqual(cm.exception.messages, [expected_error % "first name"]) # max_similarity=1 doesn't allow passwords that are identical to the # attribute's value. with self.assertRaises(ValidationError) as cm: UserAttributeSimilarityValidator( user_attributes=['first_name'], max_similarity=1, ).validate(user.first_name, user=user) self.assertEqual(cm.exception.messages, [expected_error % "first name"]) # Very low max_similarity is rejected. msg = 'max_similarity must be at least 0.1' with self.assertRaisesMessage(ValueError, msg): UserAttributeSimilarityValidator(max_similarity=0.09) # Passes validation. self.assertIsNone( UserAttributeSimilarityValidator(user_attributes=['first_name']).validate('testclient', user=user) ) @isolate_apps('auth_tests') def test_validate_property(self): class TestUser(models.Model): pass @property def username(self): return 'foobar' with self.assertRaises(ValidationError) as cm: UserAttributeSimilarityValidator().validate('foobar', user=TestUser()), self.assertEqual(cm.exception.messages, ['The password is too similar to the username.']) def test_help_text(self): self.assertEqual( UserAttributeSimilarityValidator().get_help_text(), 'Your password can’t be too similar to your other personal information.' ) class CommonPasswordValidatorTest(SimpleTestCase): def test_validate(self): expected_error = "This password is too common." self.assertIsNone(CommonPasswordValidator().validate('a-safe-password')) with self.assertRaises(ValidationError) as cm: CommonPasswordValidator().validate('godzilla') self.assertEqual(cm.exception.messages, [expected_error]) def test_validate_custom_list(self): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'common-passwords-custom.txt') validator = CommonPasswordValidator(password_list_path=path) expected_error = "This password is too common." self.assertIsNone(validator.validate('a-safe-password')) with self.assertRaises(ValidationError) as cm: validator.validate('from-my-custom-list') self.assertEqual(cm.exception.messages, [expected_error]) self.assertEqual(cm.exception.error_list[0].code, 'password_too_common') def test_validate_django_supplied_file(self): validator = CommonPasswordValidator() for password in validator.passwords: self.assertEqual(password, password.lower()) def test_help_text(self): self.assertEqual( CommonPasswordValidator().get_help_text(), 'Your password can’t be a commonly used password.' ) class NumericPasswordValidatorTest(SimpleTestCase): def test_validate(self): expected_error = "This password is entirely numeric." self.assertIsNone(NumericPasswordValidator().validate('a-safe-password')) with self.assertRaises(ValidationError) as cm: NumericPasswordValidator().validate('42424242') self.assertEqual(cm.exception.messages, [expected_error]) self.assertEqual(cm.exception.error_list[0].code, 'password_entirely_numeric') def test_help_text(self): self.assertEqual( NumericPasswordValidator().get_help_text(), 'Your password can’t be entirely numeric.' ) class UsernameValidatorsTests(SimpleTestCase): def test_unicode_validator(self): valid_usernames = ['joe', 'René', 'ᴮᴵᴳᴮᴵᴿᴰ', 'أحمد'] invalid_usernames = [ "o'connell", "عبد ال", "zerowidth\u200Bspace", "nonbreaking\u00A0space", "en\u2013dash", 'trailingnewline\u000A', ] v = validators.UnicodeUsernameValidator() for valid in valid_usernames: with self.subTest(valid=valid): v(valid) for invalid in invalid_usernames: with self.subTest(invalid=invalid): with self.assertRaises(ValidationError): v(invalid) def test_ascii_validator(self): valid_usernames = ['glenn', 'GLEnN', 'jean-marc'] invalid_usernames = ["o'connell", 'Éric', 'jean marc', "أحمد", 'trailingnewline\n'] v = validators.ASCIIUsernameValidator() for valid in valid_usernames: with self.subTest(valid=valid): v(valid) for invalid in invalid_usernames: with self.subTest(invalid=invalid): with self.assertRaises(ValidationError): v(invalid)
223f72472eb2456ee97f5d7126279f0af51891c0bc38b8f423e3b31031954afe
from unittest import mock, skipUnless from django.conf.global_settings import PASSWORD_HASHERS from django.contrib.auth.hashers import ( UNUSABLE_PASSWORD_PREFIX, UNUSABLE_PASSWORD_SUFFIX_LENGTH, BasePasswordHasher, BCryptPasswordHasher, BCryptSHA256PasswordHasher, MD5PasswordHasher, PBKDF2PasswordHasher, PBKDF2SHA1PasswordHasher, ScryptPasswordHasher, SHA1PasswordHasher, check_password, get_hasher, identify_hasher, is_password_usable, make_password, ) from django.test import SimpleTestCase from django.test.utils import override_settings try: import crypt except ImportError: crypt = None else: # On some platforms (e.g. OpenBSD), crypt.crypt() always return None. if crypt.crypt('') is None: crypt = None try: import bcrypt except ImportError: bcrypt = None try: import argon2 except ImportError: argon2 = None class PBKDF2SingleIterationHasher(PBKDF2PasswordHasher): iterations = 1 @override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS) class TestUtilsHashPass(SimpleTestCase): def test_simple(self): encoded = make_password('lètmein') self.assertTrue(encoded.startswith('pbkdf2_sha256$')) self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) # Blank passwords blank_encoded = make_password('') self.assertTrue(blank_encoded.startswith('pbkdf2_sha256$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) def test_bytes(self): encoded = make_password(b'bytes_password') self.assertTrue(encoded.startswith('pbkdf2_sha256$')) self.assertIs(is_password_usable(encoded), True) self.assertIs(check_password(b'bytes_password', encoded), True) def test_invalid_password(self): msg = 'Password must be a string or bytes, got int.' with self.assertRaisesMessage(TypeError, msg): make_password(1) def test_pbkdf2(self): encoded = make_password('lètmein', 'seasalt', 'pbkdf2_sha256') self.assertEqual(encoded, 'pbkdf2_sha256$390000$seasalt$8xBlGd3jVgvJ+92hWPxi5ww0uuAuAnKgC45eudxro7c=') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "pbkdf2_sha256") # Blank passwords blank_encoded = make_password('', 'seasalt', 'pbkdf2_sha256') self.assertTrue(blank_encoded.startswith('pbkdf2_sha256$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) # Salt entropy check. hasher = get_hasher('pbkdf2_sha256') encoded_weak_salt = make_password('lètmein', 'iodizedsalt', 'pbkdf2_sha256') encoded_strong_salt = make_password('lètmein', hasher.salt(), 'pbkdf2_sha256') self.assertIs(hasher.must_update(encoded_weak_salt), True) self.assertIs(hasher.must_update(encoded_strong_salt), False) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher']) def test_sha1(self): encoded = make_password('lètmein', 'seasalt', 'sha1') self.assertEqual(encoded, 'sha1$seasalt$cff36ea83f5706ce9aa7454e63e431fc726b2dc8') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "sha1") # Blank passwords blank_encoded = make_password('', 'seasalt', 'sha1') self.assertTrue(blank_encoded.startswith('sha1$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) # Salt entropy check. hasher = get_hasher('sha1') encoded_weak_salt = make_password('lètmein', 'iodizedsalt', 'sha1') encoded_strong_salt = make_password('lètmein', hasher.salt(), 'sha1') self.assertIs(hasher.must_update(encoded_weak_salt), True) self.assertIs(hasher.must_update(encoded_strong_salt), False) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.MD5PasswordHasher']) def test_md5(self): encoded = make_password('lètmein', 'seasalt', 'md5') self.assertEqual(encoded, 'md5$seasalt$3f86d0d3d465b7b458c231bf3555c0e3') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "md5") # Blank passwords blank_encoded = make_password('', 'seasalt', 'md5') self.assertTrue(blank_encoded.startswith('md5$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) # Salt entropy check. hasher = get_hasher('md5') encoded_weak_salt = make_password('lètmein', 'iodizedsalt', 'md5') encoded_strong_salt = make_password('lètmein', hasher.salt(), 'md5') self.assertIs(hasher.must_update(encoded_weak_salt), True) self.assertIs(hasher.must_update(encoded_strong_salt), False) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.UnsaltedMD5PasswordHasher']) def test_unsalted_md5(self): encoded = make_password('lètmein', '', 'unsalted_md5') self.assertEqual(encoded, '88a434c88cca4e900f7874cd98123f43') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_md5") # Alternate unsalted syntax alt_encoded = "md5$$%s" % encoded self.assertTrue(is_password_usable(alt_encoded)) self.assertTrue(check_password('lètmein', alt_encoded)) self.assertFalse(check_password('lètmeinz', alt_encoded)) # Blank passwords blank_encoded = make_password('', '', 'unsalted_md5') self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.UnsaltedMD5PasswordHasher']) def test_unsalted_md5_encode_invalid_salt(self): hasher = get_hasher('unsalted_md5') msg = 'salt must be empty.' with self.assertRaisesMessage(ValueError, msg): hasher.encode('password', salt='salt') @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher']) def test_unsalted_sha1(self): encoded = make_password('lètmein', '', 'unsalted_sha1') self.assertEqual(encoded, 'sha1$$6d138ca3ae545631b3abd71a4f076ce759c5700b') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_sha1") # Raw SHA1 isn't acceptable alt_encoded = encoded[6:] self.assertFalse(check_password('lètmein', alt_encoded)) # Blank passwords blank_encoded = make_password('', '', 'unsalted_sha1') self.assertTrue(blank_encoded.startswith('sha1$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher']) def test_unsalted_sha1_encode_invalid_salt(self): hasher = get_hasher('unsalted_sha1') msg = 'salt must be empty.' with self.assertRaisesMessage(ValueError, msg): hasher.encode('password', salt='salt') @skipUnless(crypt, "no crypt module to generate password.") @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.CryptPasswordHasher']) def test_crypt(self): encoded = make_password('lètmei', 'ab', 'crypt') self.assertEqual(encoded, 'crypt$$ab1Hv2Lg7ltQo') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmei', encoded)) self.assertFalse(check_password('lètmeiz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "crypt") # Blank passwords blank_encoded = make_password('', 'ab', 'crypt') self.assertTrue(blank_encoded.startswith('crypt$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @skipUnless(crypt, 'no crypt module to generate password.') @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.CryptPasswordHasher']) def test_crypt_encode_invalid_salt(self): hasher = get_hasher('crypt') msg = 'salt must be of length 2.' with self.assertRaisesMessage(ValueError, msg): hasher.encode('password', salt='a') @skipUnless(crypt, 'no crypt module to generate password.') @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.CryptPasswordHasher']) def test_crypt_encode_invalid_hash(self): hasher = get_hasher('crypt') msg = 'hash must be provided.' with mock.patch('crypt.crypt', return_value=None): with self.assertRaisesMessage(TypeError, msg): hasher.encode('password', salt='ab') @skipUnless(bcrypt, "bcrypt not installed") def test_bcrypt_sha256(self): encoded = make_password('lètmein', hasher='bcrypt_sha256') self.assertTrue(is_password_usable(encoded)) self.assertTrue(encoded.startswith('bcrypt_sha256$')) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt_sha256") # password truncation no longer works password = ( 'VSK0UYV6FFQVZ0KG88DYN9WADAADZO1CTSIVDJUNZSUML6IBX7LN7ZS3R5' 'JGB3RGZ7VI7G7DJQ9NI8BQFSRPTG6UWTTVESA5ZPUN' ) encoded = make_password(password, hasher='bcrypt_sha256') self.assertTrue(check_password(password, encoded)) self.assertFalse(check_password(password[:72], encoded)) # Blank passwords blank_encoded = make_password('', hasher='bcrypt_sha256') self.assertTrue(blank_encoded.startswith('bcrypt_sha256$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @skipUnless(bcrypt, "bcrypt not installed") @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.BCryptPasswordHasher']) def test_bcrypt(self): encoded = make_password('lètmein', hasher='bcrypt') self.assertTrue(is_password_usable(encoded)) self.assertTrue(encoded.startswith('bcrypt$')) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt") # Blank passwords blank_encoded = make_password('', hasher='bcrypt') self.assertTrue(blank_encoded.startswith('bcrypt$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @skipUnless(bcrypt, "bcrypt not installed") @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.BCryptPasswordHasher']) def test_bcrypt_upgrade(self): hasher = get_hasher('bcrypt') self.assertEqual('bcrypt', hasher.algorithm) self.assertNotEqual(hasher.rounds, 4) old_rounds = hasher.rounds try: # Generate a password with 4 rounds. hasher.rounds = 4 encoded = make_password('letmein', hasher='bcrypt') rounds = hasher.safe_summary(encoded)['work factor'] self.assertEqual(rounds, 4) state = {'upgraded': False} def setter(password): state['upgraded'] = True # No upgrade is triggered. self.assertTrue(check_password('letmein', encoded, setter, 'bcrypt')) self.assertFalse(state['upgraded']) # Revert to the old rounds count and ... hasher.rounds = old_rounds # ... check if the password would get updated to the new count. self.assertTrue(check_password('letmein', encoded, setter, 'bcrypt')) self.assertTrue(state['upgraded']) finally: hasher.rounds = old_rounds @skipUnless(bcrypt, "bcrypt not installed") @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.BCryptPasswordHasher']) def test_bcrypt_harden_runtime(self): hasher = get_hasher('bcrypt') self.assertEqual('bcrypt', hasher.algorithm) with mock.patch.object(hasher, 'rounds', 4): encoded = make_password('letmein', hasher='bcrypt') with mock.patch.object(hasher, 'rounds', 6), \ mock.patch.object(hasher, 'encode', side_effect=hasher.encode): hasher.harden_runtime('wrong_password', encoded) # Increasing rounds from 4 to 6 means an increase of 4 in workload, # therefore hardening should run 3 times to make the timing the # same (the original encode() call already ran once). self.assertEqual(hasher.encode.call_count, 3) # Get the original salt (includes the original workload factor) algorithm, data = encoded.split('$', 1) expected_call = (('wrong_password', data[:29].encode()),) self.assertEqual(hasher.encode.call_args_list, [expected_call] * 3) def test_unusable(self): encoded = make_password(None) self.assertEqual(len(encoded), len(UNUSABLE_PASSWORD_PREFIX) + UNUSABLE_PASSWORD_SUFFIX_LENGTH) self.assertFalse(is_password_usable(encoded)) self.assertFalse(check_password(None, encoded)) self.assertFalse(check_password(encoded, encoded)) self.assertFalse(check_password(UNUSABLE_PASSWORD_PREFIX, encoded)) self.assertFalse(check_password('', encoded)) self.assertFalse(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) with self.assertRaisesMessage(ValueError, 'Unknown password hashing algorith'): identify_hasher(encoded) # Assert that the unusable passwords actually contain a random part. # This might fail one day due to a hash collision. self.assertNotEqual(encoded, make_password(None), "Random password collision?") def test_unspecified_password(self): """ Makes sure specifying no plain password with a valid encoded password returns `False`. """ self.assertFalse(check_password(None, make_password('lètmein'))) def test_bad_algorithm(self): msg = ( "Unknown password hashing algorithm '%s'. Did you specify it in " "the PASSWORD_HASHERS setting?" ) with self.assertRaisesMessage(ValueError, msg % 'lolcat'): make_password('lètmein', hasher='lolcat') with self.assertRaisesMessage(ValueError, msg % 'lolcat'): identify_hasher('lolcat$salt$hash') def test_is_password_usable(self): passwords = ('lètmein_badencoded', '', None) for password in passwords: with self.subTest(password=password): self.assertIs(is_password_usable(password), True) def test_low_level_pbkdf2(self): hasher = PBKDF2PasswordHasher() encoded = hasher.encode('lètmein', 'seasalt2') self.assertEqual(encoded, 'pbkdf2_sha256$390000$seasalt2$geC/uZ92nRXDSjSxeoiBqYyRcrLzMm8xK3ro1QS1uo8=') self.assertTrue(hasher.verify('lètmein', encoded)) def test_low_level_pbkdf2_sha1(self): hasher = PBKDF2SHA1PasswordHasher() encoded = hasher.encode('lètmein', 'seasalt2') self.assertEqual(encoded, 'pbkdf2_sha1$390000$seasalt2$aDapRanzW8aHTz97v2TcfHzWD+I=') self.assertTrue(hasher.verify('lètmein', encoded)) @skipUnless(bcrypt, 'bcrypt not installed') def test_bcrypt_salt_check(self): hasher = BCryptPasswordHasher() encoded = hasher.encode('lètmein', hasher.salt()) self.assertIs(hasher.must_update(encoded), False) @skipUnless(bcrypt, 'bcrypt not installed') def test_bcryptsha256_salt_check(self): hasher = BCryptSHA256PasswordHasher() encoded = hasher.encode('lètmein', hasher.salt()) self.assertIs(hasher.must_update(encoded), False) @override_settings( PASSWORD_HASHERS=[ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', ], ) def test_upgrade(self): self.assertEqual('pbkdf2_sha256', get_hasher('default').algorithm) for algo in ('sha1', 'md5'): with self.subTest(algo=algo): encoded = make_password('lètmein', hasher=algo) state = {'upgraded': False} def setter(password): state['upgraded'] = True self.assertTrue(check_password('lètmein', encoded, setter)) self.assertTrue(state['upgraded']) def test_no_upgrade(self): encoded = make_password('lètmein') state = {'upgraded': False} def setter(): state['upgraded'] = True self.assertFalse(check_password('WRONG', encoded, setter)) self.assertFalse(state['upgraded']) @override_settings( PASSWORD_HASHERS=[ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', ], ) def test_no_upgrade_on_incorrect_pass(self): self.assertEqual('pbkdf2_sha256', get_hasher('default').algorithm) for algo in ('sha1', 'md5'): with self.subTest(algo=algo): encoded = make_password('lètmein', hasher=algo) state = {'upgraded': False} def setter(): state['upgraded'] = True self.assertFalse(check_password('WRONG', encoded, setter)) self.assertFalse(state['upgraded']) def test_pbkdf2_upgrade(self): hasher = get_hasher('default') self.assertEqual('pbkdf2_sha256', hasher.algorithm) self.assertNotEqual(hasher.iterations, 1) old_iterations = hasher.iterations try: # Generate a password with 1 iteration. hasher.iterations = 1 encoded = make_password('letmein') algo, iterations, salt, hash = encoded.split('$', 3) self.assertEqual(iterations, '1') state = {'upgraded': False} def setter(password): state['upgraded'] = True # No upgrade is triggered self.assertTrue(check_password('letmein', encoded, setter)) self.assertFalse(state['upgraded']) # Revert to the old iteration count and ... hasher.iterations = old_iterations # ... check if the password would get updated to the new iteration count. self.assertTrue(check_password('letmein', encoded, setter)) self.assertTrue(state['upgraded']) finally: hasher.iterations = old_iterations def test_pbkdf2_harden_runtime(self): hasher = get_hasher('default') self.assertEqual('pbkdf2_sha256', hasher.algorithm) with mock.patch.object(hasher, 'iterations', 1): encoded = make_password('letmein') with mock.patch.object(hasher, 'iterations', 6), \ mock.patch.object(hasher, 'encode', side_effect=hasher.encode): hasher.harden_runtime('wrong_password', encoded) # Encode should get called once ... self.assertEqual(hasher.encode.call_count, 1) # ... with the original salt and 5 iterations. algorithm, iterations, salt, hash = encoded.split('$', 3) expected_call = (('wrong_password', salt, 5),) self.assertEqual(hasher.encode.call_args, expected_call) def test_pbkdf2_upgrade_new_hasher(self): hasher = get_hasher('default') self.assertEqual('pbkdf2_sha256', hasher.algorithm) self.assertNotEqual(hasher.iterations, 1) state = {'upgraded': False} def setter(password): state['upgraded'] = True with self.settings(PASSWORD_HASHERS=[ 'auth_tests.test_hashers.PBKDF2SingleIterationHasher']): encoded = make_password('letmein') algo, iterations, salt, hash = encoded.split('$', 3) self.assertEqual(iterations, '1') # No upgrade is triggered self.assertTrue(check_password('letmein', encoded, setter)) self.assertFalse(state['upgraded']) # Revert to the old iteration count and check if the password would get # updated to the new iteration count. with self.settings(PASSWORD_HASHERS=[ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'auth_tests.test_hashers.PBKDF2SingleIterationHasher']): self.assertTrue(check_password('letmein', encoded, setter)) self.assertTrue(state['upgraded']) def test_check_password_calls_harden_runtime(self): hasher = get_hasher('default') encoded = make_password('letmein') with mock.patch.object(hasher, 'harden_runtime'), \ mock.patch.object(hasher, 'must_update', return_value=True): # Correct password supplied, no hardening needed check_password('letmein', encoded) self.assertEqual(hasher.harden_runtime.call_count, 0) # Wrong password supplied, hardening needed check_password('wrong_password', encoded) self.assertEqual(hasher.harden_runtime.call_count, 1) def test_encode_invalid_salt(self): hasher_classes = [ MD5PasswordHasher, PBKDF2PasswordHasher, PBKDF2SHA1PasswordHasher, ScryptPasswordHasher, SHA1PasswordHasher, ] msg = 'salt must be provided and cannot contain $.' for hasher_class in hasher_classes: hasher = hasher_class() for salt in [None, '', 'sea$salt']: with self.subTest(hasher_class.__name__, salt=salt): with self.assertRaisesMessage(ValueError, msg): hasher.encode('password', salt) def test_encode_password_required(self): hasher_classes = [ MD5PasswordHasher, PBKDF2PasswordHasher, PBKDF2SHA1PasswordHasher, ScryptPasswordHasher, SHA1PasswordHasher, ] msg = 'password must be provided.' for hasher_class in hasher_classes: hasher = hasher_class() with self.subTest(hasher_class.__name__): with self.assertRaisesMessage(TypeError, msg): hasher.encode(None, 'seasalt') class BasePasswordHasherTests(SimpleTestCase): not_implemented_msg = 'subclasses of BasePasswordHasher must provide %s() method' def setUp(self): self.hasher = BasePasswordHasher() def test_load_library_no_algorithm(self): msg = "Hasher 'BasePasswordHasher' doesn't specify a library attribute" with self.assertRaisesMessage(ValueError, msg): self.hasher._load_library() def test_load_library_importerror(self): PlainHasher = type('PlainHasher', (BasePasswordHasher,), {'algorithm': 'plain', 'library': 'plain'}) msg = "Couldn't load 'PlainHasher' algorithm library: No module named 'plain'" with self.assertRaisesMessage(ValueError, msg): PlainHasher()._load_library() def test_attributes(self): self.assertIsNone(self.hasher.algorithm) self.assertIsNone(self.hasher.library) def test_encode(self): msg = self.not_implemented_msg % 'an encode' with self.assertRaisesMessage(NotImplementedError, msg): self.hasher.encode('password', 'salt') def test_decode(self): msg = self.not_implemented_msg % 'a decode' with self.assertRaisesMessage(NotImplementedError, msg): self.hasher.decode('encoded') def test_harden_runtime(self): msg = 'subclasses of BasePasswordHasher should provide a harden_runtime() method' with self.assertWarnsMessage(Warning, msg): self.hasher.harden_runtime('password', 'encoded') def test_must_update(self): self.assertIs(self.hasher.must_update('encoded'), False) def test_safe_summary(self): msg = self.not_implemented_msg % 'a safe_summary' with self.assertRaisesMessage(NotImplementedError, msg): self.hasher.safe_summary('encoded') def test_verify(self): msg = self.not_implemented_msg % 'a verify' with self.assertRaisesMessage(NotImplementedError, msg): self.hasher.verify('password', 'encoded') @skipUnless(argon2, "argon2-cffi not installed") @override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS) class TestUtilsHashPassArgon2(SimpleTestCase): def test_argon2(self): encoded = make_password('lètmein', hasher='argon2') self.assertTrue(is_password_usable(encoded)) self.assertTrue(encoded.startswith('argon2$argon2id$')) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, 'argon2') # Blank passwords blank_encoded = make_password('', hasher='argon2') self.assertTrue(blank_encoded.startswith('argon2$argon2id$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) # Old hashes without version attribute encoded = ( 'argon2$argon2i$m=8,t=1,p=1$c29tZXNhbHQ$gwQOXSNhxiOxPOA0+PY10P9QFO' '4NAYysnqRt1GSQLE55m+2GYDt9FEjPMHhP2Cuf0nOEXXMocVrsJAtNSsKyfg' ) self.assertTrue(check_password('secret', encoded)) self.assertFalse(check_password('wrong', encoded)) # Old hashes with version attribute. encoded = ( 'argon2$argon2i$v=19$m=8,t=1,p=1$c2FsdHNhbHQ$YC9+jJCrQhs5R6db7LlN8Q' ) self.assertIs(check_password('secret', encoded), True) self.assertIs(check_password('wrong', encoded), False) # Salt entropy check. hasher = get_hasher('argon2') encoded_weak_salt = make_password('lètmein', 'iodizedsalt', 'argon2') encoded_strong_salt = make_password('lètmein', hasher.salt(), 'argon2') self.assertIs(hasher.must_update(encoded_weak_salt), True) self.assertIs(hasher.must_update(encoded_strong_salt), False) def test_argon2_decode(self): salt = 'abcdefghijk' encoded = make_password('lètmein', salt=salt, hasher='argon2') hasher = get_hasher('argon2') decoded = hasher.decode(encoded) self.assertEqual(decoded['memory_cost'], hasher.memory_cost) self.assertEqual(decoded['parallelism'], hasher.parallelism) self.assertEqual(decoded['salt'], salt) self.assertEqual(decoded['time_cost'], hasher.time_cost) def test_argon2_upgrade(self): self._test_argon2_upgrade('time_cost', 'time cost', 1) self._test_argon2_upgrade('memory_cost', 'memory cost', 64) self._test_argon2_upgrade('parallelism', 'parallelism', 1) def test_argon2_version_upgrade(self): hasher = get_hasher('argon2') state = {'upgraded': False} encoded = ( 'argon2$argon2id$v=19$m=102400,t=2,p=8$Y041dExhNkljRUUy$TMa6A8fPJh' 'CAUXRhJXCXdw' ) def setter(password): state['upgraded'] = True old_m = hasher.memory_cost old_t = hasher.time_cost old_p = hasher.parallelism try: hasher.memory_cost = 8 hasher.time_cost = 1 hasher.parallelism = 1 self.assertTrue(check_password('secret', encoded, setter, 'argon2')) self.assertTrue(state['upgraded']) finally: hasher.memory_cost = old_m hasher.time_cost = old_t hasher.parallelism = old_p def _test_argon2_upgrade(self, attr, summary_key, new_value): hasher = get_hasher('argon2') self.assertEqual('argon2', hasher.algorithm) self.assertNotEqual(getattr(hasher, attr), new_value) old_value = getattr(hasher, attr) try: # Generate hash with attr set to 1 setattr(hasher, attr, new_value) encoded = make_password('letmein', hasher='argon2') attr_value = hasher.safe_summary(encoded)[summary_key] self.assertEqual(attr_value, new_value) state = {'upgraded': False} def setter(password): state['upgraded'] = True # No upgrade is triggered. self.assertTrue(check_password('letmein', encoded, setter, 'argon2')) self.assertFalse(state['upgraded']) # Revert to the old rounds count and ... setattr(hasher, attr, old_value) # ... check if the password would get updated to the new count. self.assertTrue(check_password('letmein', encoded, setter, 'argon2')) self.assertTrue(state['upgraded']) finally: setattr(hasher, attr, old_value) @override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS) class TestUtilsHashPassScrypt(SimpleTestCase): def test_scrypt(self): encoded = make_password('lètmein', 'seasalt', 'scrypt') self.assertEqual( encoded, 'scrypt$16384$seasalt$8$1$Qj3+9PPyRjSJIebHnG81TMjsqtaIGxNQG/aEB/NY' 'afTJ7tibgfYz71m0ldQESkXFRkdVCBhhY8mx7rQwite/Pw==' ) self.assertIs(is_password_usable(encoded), True) self.assertIs(check_password('lètmein', encoded), True) self.assertIs(check_password('lètmeinz', encoded), False) self.assertEqual(identify_hasher(encoded).algorithm, "scrypt") # Blank passwords. blank_encoded = make_password('', 'seasalt', 'scrypt') self.assertIs(blank_encoded.startswith('scrypt$'), True) self.assertIs(is_password_usable(blank_encoded), True) self.assertIs(check_password('', blank_encoded), True) self.assertIs(check_password(' ', blank_encoded), False) def test_scrypt_decode(self): encoded = make_password('lètmein', 'seasalt', 'scrypt') hasher = get_hasher('scrypt') decoded = hasher.decode(encoded) tests = [ ('block_size', hasher.block_size), ('parallelism', hasher.parallelism), ('salt', 'seasalt'), ('work_factor', hasher.work_factor), ] for key, excepted in tests: with self.subTest(key=key): self.assertEqual(decoded[key], excepted) def _test_scrypt_upgrade(self, attr, summary_key, new_value): hasher = get_hasher('scrypt') self.assertEqual(hasher.algorithm, 'scrypt') self.assertNotEqual(getattr(hasher, attr), new_value) old_value = getattr(hasher, attr) try: # Generate hash with attr set to the new value. setattr(hasher, attr, new_value) encoded = make_password('lètmein', 'seasalt', 'scrypt') attr_value = hasher.safe_summary(encoded)[summary_key] self.assertEqual(attr_value, new_value) state = {'upgraded': False} def setter(password): state['upgraded'] = True # No update is triggered. self.assertIs(check_password('lètmein', encoded, setter, 'scrypt'), True) self.assertIs(state['upgraded'], False) # Revert to the old value. setattr(hasher, attr, old_value) # Password is updated. self.assertIs(check_password('lètmein', encoded, setter, 'scrypt'), True) self.assertIs(state['upgraded'], True) finally: setattr(hasher, attr, old_value) def test_scrypt_upgrade(self): tests = [ ('work_factor', 'work factor', 2 ** 11), ('block_size', 'block size', 10), ('parallelism', 'parallelism', 2), ] for attr, summary_key, new_value in tests: with self.subTest(attr=attr): self._test_scrypt_upgrade(attr, summary_key, new_value)
fc50e70f9dc52ad5a27ff9c424c63eb75177ff09084015e0331e8ed0bfee90d7
import os from django.core.exceptions import SuspiciousFileOperation from django.core.files.base import ContentFile from django.core.files.storage import FileSystemStorage, Storage from django.db.models import FileField from django.test import SimpleTestCase class AWSS3Storage(Storage): """ Simulate an AWS S3 storage which uses Unix-like paths and allows any characters in file names but where there aren't actual folders but just keys. """ prefix = 'mys3folder/' def _save(self, name, content): """ This method is important to test that Storage.save() doesn't replace '\' with '/' (rather FileSystemStorage.save() does). """ return name def get_valid_name(self, name): return name def get_available_name(self, name, max_length=None): return name def generate_filename(self, filename): """ This is the method that's important to override when using S3 so that os.path() isn't called, which would break S3 keys. """ return self.prefix + self.get_valid_name(filename) class GenerateFilenameStorageTests(SimpleTestCase): def test_storage_dangerous_paths(self): candidates = [ ('/tmp/..', '..'), ('/tmp/.', '.'), ('', ''), ] s = FileSystemStorage() msg = "Could not derive file name from '%s'" for file_name, base_name in candidates: with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg % base_name): s.get_available_name(file_name) with self.assertRaisesMessage(SuspiciousFileOperation, msg % base_name): s.generate_filename(file_name) def test_storage_dangerous_paths_dir_name(self): candidates = [ ('tmp/../path', 'tmp/..'), ('tmp\\..\\path', 'tmp/..'), ('/tmp/../path', '/tmp/..'), ('\\tmp\\..\\path', '/tmp/..'), ] s = FileSystemStorage() for file_name, path in candidates: msg = "Detected path traversal attempt in '%s'" % path with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg): s.get_available_name(file_name) with self.assertRaisesMessage(SuspiciousFileOperation, msg): s.generate_filename(file_name) def test_filefield_dangerous_filename(self): candidates = [ ('..', 'some/folder/..'), ('.', 'some/folder/.'), ('', 'some/folder/'), ('???', '???'), ('$.$.$', '$.$.$'), ] f = FileField(upload_to='some/folder/') for file_name, msg_file_name in candidates: msg = f"Could not derive file name from '{msg_file_name}'" with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, file_name) def test_filefield_dangerous_filename_dot_segments(self): f = FileField(upload_to='some/folder/') msg = "Detected path traversal attempt in 'some/folder/../path'" with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, '../path') def test_filefield_generate_filename_absolute_path(self): f = FileField(upload_to='some/folder/') candidates = [ '/tmp/path', '/tmp/../path', ] for file_name in candidates: msg = f"Detected path traversal attempt in '{file_name}'" with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, file_name) def test_filefield_generate_filename(self): f = FileField(upload_to='some/folder/') self.assertEqual( f.generate_filename(None, 'test with space.txt'), os.path.normpath('some/folder/test_with_space.txt') ) def test_filefield_generate_filename_with_upload_to(self): def upload_to(instance, filename): return 'some/folder/' + filename f = FileField(upload_to=upload_to) self.assertEqual( f.generate_filename(None, 'test with space.txt'), os.path.normpath('some/folder/test_with_space.txt') ) def test_filefield_generate_filename_upload_to_overrides_dangerous_filename(self): def upload_to(instance, filename): return 'test.txt' f = FileField(upload_to=upload_to) candidates = [ '/tmp/.', '/tmp/..', '/tmp/../path', '/tmp/path', 'some/folder/', 'some/folder/.', 'some/folder/..', 'some/folder/???', 'some/folder/$.$.$', 'some/../test.txt', '', ] for file_name in candidates: with self.subTest(file_name=file_name): self.assertEqual(f.generate_filename(None, file_name), 'test.txt') def test_filefield_generate_filename_upload_to_absolute_path(self): def upload_to(instance, filename): return '/tmp/' + filename f = FileField(upload_to=upload_to) candidates = [ 'path', '../path', '???', '$.$.$', ] for file_name in candidates: msg = f"Detected path traversal attempt in '/tmp/{file_name}'" with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, file_name) def test_filefield_generate_filename_upload_to_dangerous_filename(self): def upload_to(instance, filename): return '/tmp/' + filename f = FileField(upload_to=upload_to) candidates = ['..', '.', ''] for file_name in candidates: msg = f"Could not derive file name from '/tmp/{file_name}'" with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, file_name) def test_filefield_awss3_storage(self): """ Simulate a FileField with an S3 storage which uses keys rather than folders and names. FileField and Storage shouldn't have any os.path() calls that break the key. """ storage = AWSS3Storage() folder = 'not/a/folder/' f = FileField(upload_to=folder, storage=storage) key = 'my-file-key\\with odd characters' data = ContentFile('test') expected_key = AWSS3Storage.prefix + folder + key # Simulate call to f.save() result_key = f.generate_filename(None, key) self.assertEqual(result_key, expected_key) result_key = storage.save(result_key, data) self.assertEqual(result_key, expected_key) # Repeat test with a callable. def upload_to(instance, filename): # Return a non-normalized path on purpose. return folder + filename f = FileField(upload_to=upload_to, storage=storage) # Simulate call to f.save() result_key = f.generate_filename(None, key) self.assertEqual(result_key, expected_key) result_key = storage.save(result_key, data) self.assertEqual(result_key, expected_key)
c638db089a21f1c7b9fd9200c5c50e09ba6638be7cfded5aa0abcb5e83dbe979
import os import shutil import sys import tempfile import threading import time import unittest from datetime import datetime, timedelta from io import StringIO from pathlib import Path from urllib.request import urlopen from django.core.cache import cache from django.core.exceptions import SuspiciousFileOperation from django.core.files.base import ContentFile, File from django.core.files.storage import ( FileSystemStorage, Storage as BaseStorage, default_storage, get_storage_class, ) from django.core.files.uploadedfile import ( InMemoryUploadedFile, SimpleUploadedFile, TemporaryUploadedFile, ) from django.db.models import FileField from django.db.models.fields.files import FileDescriptor from django.test import ( LiveServerTestCase, SimpleTestCase, TestCase, override_settings, ) from django.test.utils import requires_tz_support from django.urls import NoReverseMatch, reverse_lazy from django.utils import timezone from django.utils._os import symlinks_supported from .models import ( Storage, callable_storage, temp_storage, temp_storage_location, ) FILE_SUFFIX_REGEX = '[A-Za-z0-9]{7}' class GetStorageClassTests(SimpleTestCase): def test_get_filesystem_storage(self): """ get_storage_class returns the class for a storage backend name/path. """ self.assertEqual( get_storage_class('django.core.files.storage.FileSystemStorage'), FileSystemStorage) def test_get_invalid_storage_module(self): """ get_storage_class raises an error if the requested import don't exist. """ with self.assertRaisesMessage(ImportError, "No module named 'storage'"): get_storage_class('storage.NonexistentStorage') def test_get_nonexistent_storage_class(self): """ get_storage_class raises an error if the requested class don't exist. """ with self.assertRaises(ImportError): get_storage_class('django.core.files.storage.NonexistentStorage') def test_get_nonexistent_storage_module(self): """ get_storage_class raises an error if the requested module don't exist. """ with self.assertRaisesMessage(ImportError, "No module named 'django.core.files.nonexistent_storage'"): get_storage_class('django.core.files.nonexistent_storage.NonexistentStorage') class FileSystemStorageTests(unittest.TestCase): def test_deconstruction(self): path, args, kwargs = temp_storage.deconstruct() self.assertEqual(path, "django.core.files.storage.FileSystemStorage") self.assertEqual(args, ()) self.assertEqual(kwargs, {'location': temp_storage_location}) kwargs_orig = { 'location': temp_storage_location, 'base_url': 'http://myfiles.example.com/' } storage = FileSystemStorage(**kwargs_orig) path, args, kwargs = storage.deconstruct() self.assertEqual(kwargs, kwargs_orig) def test_lazy_base_url_init(self): """ FileSystemStorage.__init__() shouldn't evaluate base_url. """ storage = FileSystemStorage(base_url=reverse_lazy('app:url')) with self.assertRaises(NoReverseMatch): storage.url(storage.base_url) class FileStorageTests(SimpleTestCase): storage_class = FileSystemStorage def setUp(self): self.temp_dir = tempfile.mkdtemp() self.storage = self.storage_class(location=self.temp_dir, base_url='/test_media_url/') # Set up a second temporary directory which is ensured to have a mixed # case name. self.temp_dir2 = tempfile.mkdtemp(suffix='aBc') def tearDown(self): shutil.rmtree(self.temp_dir) shutil.rmtree(self.temp_dir2) def test_empty_location(self): """ Makes sure an exception is raised if the location is empty """ storage = self.storage_class(location='') self.assertEqual(storage.base_location, '') self.assertEqual(storage.location, os.getcwd()) def test_file_access_options(self): """ Standard file access options are available, and work as expected. """ self.assertFalse(self.storage.exists('storage_test')) f = self.storage.open('storage_test', 'w') f.write('storage contents') f.close() self.assertTrue(self.storage.exists('storage_test')) f = self.storage.open('storage_test', 'r') self.assertEqual(f.read(), 'storage contents') f.close() self.storage.delete('storage_test') self.assertFalse(self.storage.exists('storage_test')) def _test_file_time_getter(self, getter): # Check for correct behavior under both USE_TZ=True and USE_TZ=False. # The tests are similar since they both set up a situation where the # system time zone, Django's TIME_ZONE, and UTC are distinct. self._test_file_time_getter_tz_handling_on(getter) self._test_file_time_getter_tz_handling_off(getter) @override_settings(USE_TZ=True, TIME_ZONE='Africa/Algiers') def _test_file_time_getter_tz_handling_on(self, getter): # Django's TZ (and hence the system TZ) is set to Africa/Algiers which # is UTC+1 and has no DST change. We can set the Django TZ to something # else so that UTC, Django's TIME_ZONE, and the system timezone are all # different. now_in_algiers = timezone.make_aware(datetime.now()) with timezone.override(timezone.get_fixed_timezone(-300)): # At this point the system TZ is +1 and the Django TZ # is -5. The following will be aware in UTC. now = timezone.now() self.assertFalse(self.storage.exists('test.file.tz.on')) f = ContentFile('custom contents') f_name = self.storage.save('test.file.tz.on', f) self.addCleanup(self.storage.delete, f_name) dt = getter(f_name) # dt should be aware, in UTC self.assertTrue(timezone.is_aware(dt)) self.assertEqual(now.tzname(), dt.tzname()) # The three timezones are indeed distinct. naive_now = datetime.now() algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now) django_offset = timezone.get_current_timezone().utcoffset(naive_now) utc_offset = timezone.utc.utcoffset(naive_now) self.assertGreater(algiers_offset, utc_offset) self.assertLess(django_offset, utc_offset) # dt and now should be the same effective time. self.assertLess(abs(dt - now), timedelta(seconds=2)) @override_settings(USE_TZ=False, TIME_ZONE='Africa/Algiers') def _test_file_time_getter_tz_handling_off(self, getter): # Django's TZ (and hence the system TZ) is set to Africa/Algiers which # is UTC+1 and has no DST change. We can set the Django TZ to something # else so that UTC, Django's TIME_ZONE, and the system timezone are all # different. now_in_algiers = timezone.make_aware(datetime.now()) with timezone.override(timezone.get_fixed_timezone(-300)): # At this point the system TZ is +1 and the Django TZ # is -5. self.assertFalse(self.storage.exists('test.file.tz.off')) f = ContentFile('custom contents') f_name = self.storage.save('test.file.tz.off', f) self.addCleanup(self.storage.delete, f_name) dt = getter(f_name) # dt should be naive, in system (+1) TZ self.assertTrue(timezone.is_naive(dt)) # The three timezones are indeed distinct. naive_now = datetime.now() algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now) django_offset = timezone.get_current_timezone().utcoffset(naive_now) utc_offset = timezone.utc.utcoffset(naive_now) self.assertGreater(algiers_offset, utc_offset) self.assertLess(django_offset, utc_offset) # dt and naive_now should be the same effective time. self.assertLess(abs(dt - naive_now), timedelta(seconds=2)) # If we convert dt to an aware object using the Algiers # timezone then it should be the same effective time to # now_in_algiers. _dt = timezone.make_aware(dt, now_in_algiers.tzinfo) self.assertLess(abs(_dt - now_in_algiers), timedelta(seconds=2)) def test_file_get_accessed_time(self): """ File storage returns a Datetime object for the last accessed time of a file. """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f_name = self.storage.save('test.file', f) self.addCleanup(self.storage.delete, f_name) atime = self.storage.get_accessed_time(f_name) self.assertEqual(atime, datetime.fromtimestamp(os.path.getatime(self.storage.path(f_name)))) self.assertLess(timezone.now() - self.storage.get_accessed_time(f_name), timedelta(seconds=2)) @requires_tz_support def test_file_get_accessed_time_timezone(self): self._test_file_time_getter(self.storage.get_accessed_time) def test_file_get_created_time(self): """ File storage returns a datetime for the creation time of a file. """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f_name = self.storage.save('test.file', f) self.addCleanup(self.storage.delete, f_name) ctime = self.storage.get_created_time(f_name) self.assertEqual(ctime, datetime.fromtimestamp(os.path.getctime(self.storage.path(f_name)))) self.assertLess(timezone.now() - self.storage.get_created_time(f_name), timedelta(seconds=2)) @requires_tz_support def test_file_get_created_time_timezone(self): self._test_file_time_getter(self.storage.get_created_time) def test_file_get_modified_time(self): """ File storage returns a datetime for the last modified time of a file. """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f_name = self.storage.save('test.file', f) self.addCleanup(self.storage.delete, f_name) mtime = self.storage.get_modified_time(f_name) self.assertEqual(mtime, datetime.fromtimestamp(os.path.getmtime(self.storage.path(f_name)))) self.assertLess(timezone.now() - self.storage.get_modified_time(f_name), timedelta(seconds=2)) @requires_tz_support def test_file_get_modified_time_timezone(self): self._test_file_time_getter(self.storage.get_modified_time) def test_file_save_without_name(self): """ File storage extracts the filename from the content object if no name is given explicitly. """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f.name = 'test.file' storage_f_name = self.storage.save(None, f) self.assertEqual(storage_f_name, f.name) self.assertTrue(os.path.exists(os.path.join(self.temp_dir, f.name))) self.storage.delete(storage_f_name) def test_file_save_with_path(self): """ Saving a pathname should create intermediate directories as necessary. """ self.assertFalse(self.storage.exists('path/to')) self.storage.save('path/to/test.file', ContentFile('file saved with path')) self.assertTrue(self.storage.exists('path/to')) with self.storage.open('path/to/test.file') as f: self.assertEqual(f.read(), b'file saved with path') self.assertTrue(os.path.exists( os.path.join(self.temp_dir, 'path', 'to', 'test.file'))) self.storage.delete('path/to/test.file') def test_file_save_abs_path(self): test_name = 'path/to/test.file' f = ContentFile('file saved with path') f_name = self.storage.save(os.path.join(self.temp_dir, test_name), f) self.assertEqual(f_name, test_name) @unittest.skipUnless(symlinks_supported(), 'Must be able to symlink to run this test.') def test_file_save_broken_symlink(self): """A new path is created on save when a broken symlink is supplied.""" nonexistent_file_path = os.path.join(self.temp_dir, 'nonexistent.txt') broken_symlink_path = os.path.join(self.temp_dir, 'symlink.txt') os.symlink(nonexistent_file_path, broken_symlink_path) f = ContentFile('some content') f_name = self.storage.save(broken_symlink_path, f) self.assertIs(os.path.exists(os.path.join(self.temp_dir, f_name)), True) def test_save_doesnt_close(self): with TemporaryUploadedFile('test', 'text/plain', 1, 'utf8') as file: file.write(b'1') file.seek(0) self.assertFalse(file.closed) self.storage.save('path/to/test.file', file) self.assertFalse(file.closed) self.assertFalse(file.file.closed) file = InMemoryUploadedFile(StringIO('1'), '', 'test', 'text/plain', 1, 'utf8') with file: self.assertFalse(file.closed) self.storage.save('path/to/test.file', file) self.assertFalse(file.closed) self.assertFalse(file.file.closed) def test_file_path(self): """ File storage returns the full path of a file """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f_name = self.storage.save('test.file', f) self.assertEqual(self.storage.path(f_name), os.path.join(self.temp_dir, f_name)) self.storage.delete(f_name) def test_file_url(self): """ File storage returns a url to access a given file from the web. """ self.assertEqual(self.storage.url('test.file'), self.storage.base_url + 'test.file') # should encode special chars except ~!*()' # like encodeURIComponent() JavaScript function do self.assertEqual( self.storage.url(r"~!*()'@#$%^&*abc`+ =.file"), "/test_media_url/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file" ) self.assertEqual(self.storage.url("ab\0c"), "/test_media_url/ab%00c") # should translate os path separator(s) to the url path separator self.assertEqual(self.storage.url("""a/b\\c.file"""), "/test_media_url/a/b/c.file") # #25905: remove leading slashes from file names to prevent unsafe url output self.assertEqual(self.storage.url("/evil.com"), "/test_media_url/evil.com") self.assertEqual(self.storage.url(r"\evil.com"), "/test_media_url/evil.com") self.assertEqual(self.storage.url("///evil.com"), "/test_media_url/evil.com") self.assertEqual(self.storage.url(r"\\\evil.com"), "/test_media_url/evil.com") self.assertEqual(self.storage.url(None), "/test_media_url/") def test_base_url(self): """ File storage returns a url even when its base_url is unset or modified. """ self.storage.base_url = None with self.assertRaises(ValueError): self.storage.url('test.file') # #22717: missing ending slash in base_url should be auto-corrected storage = self.storage_class(location=self.temp_dir, base_url='/no_ending_slash') self.assertEqual( storage.url('test.file'), '%s%s' % (storage.base_url, 'test.file') ) def test_listdir(self): """ File storage returns a tuple containing directories and files. """ self.assertFalse(self.storage.exists('storage_test_1')) self.assertFalse(self.storage.exists('storage_test_2')) self.assertFalse(self.storage.exists('storage_dir_1')) self.storage.save('storage_test_1', ContentFile('custom content')) self.storage.save('storage_test_2', ContentFile('custom content')) os.mkdir(os.path.join(self.temp_dir, 'storage_dir_1')) self.addCleanup(self.storage.delete, 'storage_test_1') self.addCleanup(self.storage.delete, 'storage_test_2') for directory in ('', Path('')): with self.subTest(directory=directory): dirs, files = self.storage.listdir(directory) self.assertEqual(set(dirs), {'storage_dir_1'}) self.assertEqual(set(files), {'storage_test_1', 'storage_test_2'}) def test_file_storage_prevents_directory_traversal(self): """ File storage prevents directory traversal (files can only be accessed if they're below the storage location). """ with self.assertRaises(SuspiciousFileOperation): self.storage.exists('..') with self.assertRaises(SuspiciousFileOperation): self.storage.exists('/etc/passwd') def test_file_storage_preserves_filename_case(self): """The storage backend should preserve case of filenames.""" # Create a storage backend associated with the mixed case name # directory. other_temp_storage = self.storage_class(location=self.temp_dir2) # Ask that storage backend to store a file with a mixed case filename. mixed_case = 'CaSe_SeNsItIvE' file = other_temp_storage.open(mixed_case, 'w') file.write('storage contents') file.close() self.assertEqual(os.path.join(self.temp_dir2, mixed_case), other_temp_storage.path(mixed_case)) other_temp_storage.delete(mixed_case) def test_makedirs_race_handling(self): """ File storage should be robust against directory creation race conditions. """ real_makedirs = os.makedirs # Monkey-patch os.makedirs, to simulate a normal call, a raced call, # and an error. def fake_makedirs(path, mode=0o777, exist_ok=False): if path == os.path.join(self.temp_dir, 'normal'): real_makedirs(path, mode, exist_ok) elif path == os.path.join(self.temp_dir, 'raced'): real_makedirs(path, mode, exist_ok) if not exist_ok: raise FileExistsError() elif path == os.path.join(self.temp_dir, 'error'): raise PermissionError() else: self.fail('unexpected argument %r' % path) try: os.makedirs = fake_makedirs self.storage.save('normal/test.file', ContentFile('saved normally')) with self.storage.open('normal/test.file') as f: self.assertEqual(f.read(), b'saved normally') self.storage.save('raced/test.file', ContentFile('saved with race')) with self.storage.open('raced/test.file') as f: self.assertEqual(f.read(), b'saved with race') # Exceptions aside from FileExistsError are raised. with self.assertRaises(PermissionError): self.storage.save('error/test.file', ContentFile('not saved')) finally: os.makedirs = real_makedirs def test_remove_race_handling(self): """ File storage should be robust against file removal race conditions. """ real_remove = os.remove # Monkey-patch os.remove, to simulate a normal call, a raced call, # and an error. def fake_remove(path): if path == os.path.join(self.temp_dir, 'normal.file'): real_remove(path) elif path == os.path.join(self.temp_dir, 'raced.file'): real_remove(path) raise FileNotFoundError() elif path == os.path.join(self.temp_dir, 'error.file'): raise PermissionError() else: self.fail('unexpected argument %r' % path) try: os.remove = fake_remove self.storage.save('normal.file', ContentFile('delete normally')) self.storage.delete('normal.file') self.assertFalse(self.storage.exists('normal.file')) self.storage.save('raced.file', ContentFile('delete with race')) self.storage.delete('raced.file') self.assertFalse(self.storage.exists('normal.file')) # Exceptions aside from FileNotFoundError are raised. self.storage.save('error.file', ContentFile('delete with error')) with self.assertRaises(PermissionError): self.storage.delete('error.file') finally: os.remove = real_remove def test_file_chunks_error(self): """ Test behavior when file.chunks() is raising an error """ f1 = ContentFile('chunks fails') def failing_chunks(): raise OSError f1.chunks = failing_chunks with self.assertRaises(OSError): self.storage.save('error.file', f1) def test_delete_no_name(self): """ Calling delete with an empty name should not try to remove the base storage directory, but fail loudly (#20660). """ msg = 'The name must be given to delete().' with self.assertRaisesMessage(ValueError, msg): self.storage.delete(None) with self.assertRaisesMessage(ValueError, msg): self.storage.delete('') def test_delete_deletes_directories(self): tmp_dir = tempfile.mkdtemp(dir=self.storage.location) self.storage.delete(tmp_dir) self.assertFalse(os.path.exists(tmp_dir)) @override_settings( MEDIA_ROOT='media_root', MEDIA_URL='media_url/', FILE_UPLOAD_PERMISSIONS=0o777, FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777, ) def test_setting_changed(self): """ Properties using settings values as defaults should be updated on referenced settings change while specified values should be unchanged. """ storage = self.storage_class( location='explicit_location', base_url='explicit_base_url/', file_permissions_mode=0o666, directory_permissions_mode=0o666, ) defaults_storage = self.storage_class() settings = { 'MEDIA_ROOT': 'overridden_media_root', 'MEDIA_URL': '/overridden_media_url/', 'FILE_UPLOAD_PERMISSIONS': 0o333, 'FILE_UPLOAD_DIRECTORY_PERMISSIONS': 0o333, } with self.settings(**settings): self.assertEqual(storage.base_location, 'explicit_location') self.assertIn('explicit_location', storage.location) self.assertEqual(storage.base_url, 'explicit_base_url/') self.assertEqual(storage.file_permissions_mode, 0o666) self.assertEqual(storage.directory_permissions_mode, 0o666) self.assertEqual(defaults_storage.base_location, settings['MEDIA_ROOT']) self.assertIn(settings['MEDIA_ROOT'], defaults_storage.location) self.assertEqual(defaults_storage.base_url, settings['MEDIA_URL']) self.assertEqual(defaults_storage.file_permissions_mode, settings['FILE_UPLOAD_PERMISSIONS']) self.assertEqual( defaults_storage.directory_permissions_mode, settings['FILE_UPLOAD_DIRECTORY_PERMISSIONS'] ) def test_file_methods_pathlib_path(self): p = Path('test.file') self.assertFalse(self.storage.exists(p)) f = ContentFile('custom contents') f_name = self.storage.save(p, f) # Storage basic methods. self.assertEqual(self.storage.path(p), os.path.join(self.temp_dir, p)) self.assertEqual(self.storage.size(p), 15) self.assertEqual(self.storage.url(p), self.storage.base_url + f_name) with self.storage.open(p) as f: self.assertEqual(f.read(), b'custom contents') self.addCleanup(self.storage.delete, p) class CustomStorage(FileSystemStorage): def get_available_name(self, name, max_length=None): """ Append numbers to duplicate files rather than underscores, like Trac. """ basename, *ext = os.path.splitext(name) number = 2 while self.exists(name): name = ''.join([basename, '.', str(number)] + ext) number += 1 return name class CustomStorageTests(FileStorageTests): storage_class = CustomStorage def test_custom_get_available_name(self): first = self.storage.save('custom_storage', ContentFile('custom contents')) self.assertEqual(first, 'custom_storage') second = self.storage.save('custom_storage', ContentFile('more contents')) self.assertEqual(second, 'custom_storage.2') self.storage.delete(first) self.storage.delete(second) class OverwritingStorage(FileSystemStorage): """ Overwrite existing files instead of appending a suffix to generate an unused name. """ # Mask out O_EXCL so os.open() doesn't raise OSError if the file exists. OS_OPEN_FLAGS = FileSystemStorage.OS_OPEN_FLAGS & ~os.O_EXCL def get_available_name(self, name, max_length=None): """Override the effort to find an used name.""" return name class OverwritingStorageTests(FileStorageTests): storage_class = OverwritingStorage def test_save_overwrite_behavior(self): """Saving to same file name twice overwrites the first file.""" name = 'test.file' self.assertFalse(self.storage.exists(name)) content_1 = b'content one' content_2 = b'second content' f_1 = ContentFile(content_1) f_2 = ContentFile(content_2) stored_name_1 = self.storage.save(name, f_1) try: self.assertEqual(stored_name_1, name) self.assertTrue(self.storage.exists(name)) self.assertTrue(os.path.exists(os.path.join(self.temp_dir, name))) with self.storage.open(name) as fp: self.assertEqual(fp.read(), content_1) stored_name_2 = self.storage.save(name, f_2) self.assertEqual(stored_name_2, name) self.assertTrue(self.storage.exists(name)) self.assertTrue(os.path.exists(os.path.join(self.temp_dir, name))) with self.storage.open(name) as fp: self.assertEqual(fp.read(), content_2) finally: self.storage.delete(name) class DiscardingFalseContentStorage(FileSystemStorage): def _save(self, name, content): if content: return super()._save(name, content) return '' class DiscardingFalseContentStorageTests(FileStorageTests): storage_class = DiscardingFalseContentStorage def test_custom_storage_discarding_empty_content(self): """ When Storage.save() wraps a file-like object in File, it should include the name argument so that bool(file) evaluates to True (#26495). """ output = StringIO('content') self.storage.save('tests/stringio', output) self.assertTrue(self.storage.exists('tests/stringio')) with self.storage.open('tests/stringio') as f: self.assertEqual(f.read(), b'content') class FileFieldStorageTests(TestCase): def tearDown(self): shutil.rmtree(temp_storage_location) def _storage_max_filename_length(self, storage): """ Query filesystem for maximum filename length (e.g. AUFS has 242). """ dir_to_test = storage.location while not os.path.exists(dir_to_test): dir_to_test = os.path.dirname(dir_to_test) try: return os.pathconf(dir_to_test, 'PC_NAME_MAX') except Exception: return 255 # Should be safe on most backends def test_files(self): self.assertIsInstance(Storage.normal, FileDescriptor) # An object without a file has limited functionality. obj1 = Storage() self.assertEqual(obj1.normal.name, "") with self.assertRaises(ValueError): obj1.normal.size # Saving a file enables full functionality. obj1.normal.save("django_test.txt", ContentFile("content")) self.assertEqual(obj1.normal.name, "tests/django_test.txt") self.assertEqual(obj1.normal.size, 7) self.assertEqual(obj1.normal.read(), b"content") obj1.normal.close() # File objects can be assigned to FileField attributes, but shouldn't # get committed until the model it's attached to is saved. obj1.normal = SimpleUploadedFile("assignment.txt", b"content") dirs, files = temp_storage.listdir("tests") self.assertEqual(dirs, []) self.assertNotIn("assignment.txt", files) obj1.save() dirs, files = temp_storage.listdir("tests") self.assertEqual(sorted(files), ["assignment.txt", "django_test.txt"]) # Save another file with the same name. obj2 = Storage() obj2.normal.save("django_test.txt", ContentFile("more content")) obj2_name = obj2.normal.name self.assertRegex(obj2_name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX) self.assertEqual(obj2.normal.size, 12) obj2.normal.close() # Deleting an object does not delete the file it uses. obj2.delete() obj2.normal.save("django_test.txt", ContentFile("more content")) self.assertNotEqual(obj2_name, obj2.normal.name) self.assertRegex(obj2.normal.name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX) obj2.normal.close() def test_filefield_read(self): # Files can be read in a little at a time, if necessary. obj = Storage.objects.create( normal=SimpleUploadedFile("assignment.txt", b"content")) obj.normal.open() self.assertEqual(obj.normal.read(3), b"con") self.assertEqual(obj.normal.read(), b"tent") self.assertEqual(list(obj.normal.chunks(chunk_size=2)), [b"co", b"nt", b"en", b"t"]) obj.normal.close() def test_filefield_write(self): # Files can be written to. obj = Storage.objects.create(normal=SimpleUploadedFile('rewritten.txt', b'content')) with obj.normal as normal: normal.open('wb') normal.write(b'updated') obj.refresh_from_db() self.assertEqual(obj.normal.read(), b'updated') obj.normal.close() def test_filefield_reopen(self): obj = Storage.objects.create(normal=SimpleUploadedFile('reopen.txt', b'content')) with obj.normal as normal: normal.open() obj.normal.open() obj.normal.file.seek(0) obj.normal.close() def test_duplicate_filename(self): # Multiple files with the same name get _(7 random chars) appended to them. objs = [Storage() for i in range(2)] for o in objs: o.normal.save("multiple_files.txt", ContentFile("Same Content")) try: names = [o.normal.name for o in objs] self.assertEqual(names[0], "tests/multiple_files.txt") self.assertRegex(names[1], "tests/multiple_files_%s.txt" % FILE_SUFFIX_REGEX) finally: for o in objs: o.delete() def test_file_truncation(self): # Given the max_length is limited, when multiple files get uploaded # under the same name, then the filename get truncated in order to fit # in _(7 random chars). When most of the max_length is taken by # dirname + extension and there are not enough characters in the # filename to truncate, an exception should be raised. objs = [Storage() for i in range(2)] filename = 'filename.ext' for o in objs: o.limited_length.save(filename, ContentFile('Same Content')) try: # Testing truncation. names = [o.limited_length.name for o in objs] self.assertEqual(names[0], 'tests/%s' % filename) self.assertRegex(names[1], 'tests/fi_%s.ext' % FILE_SUFFIX_REGEX) # Testing exception is raised when filename is too short to truncate. filename = 'short.longext' objs[0].limited_length.save(filename, ContentFile('Same Content')) with self.assertRaisesMessage(SuspiciousFileOperation, 'Storage can not find an available filename'): objs[1].limited_length.save(*(filename, ContentFile('Same Content'))) finally: for o in objs: o.delete() @unittest.skipIf( sys.platform == 'win32', "Windows supports at most 260 characters in a path.", ) def test_extended_length_storage(self): # Testing FileField with max_length > 255. Most systems have filename # length limitation of 255. Path takes extra chars. filename = (self._storage_max_filename_length(temp_storage) - 4) * 'a' # 4 chars for extension. obj = Storage() obj.extended_length.save('%s.txt' % filename, ContentFile('Same Content')) self.assertEqual(obj.extended_length.name, 'tests/%s.txt' % filename) self.assertEqual(obj.extended_length.read(), b'Same Content') obj.extended_length.close() def test_filefield_default(self): # Default values allow an object to access a single file. temp_storage.save('tests/default.txt', ContentFile('default content')) obj = Storage.objects.create() self.assertEqual(obj.default.name, "tests/default.txt") self.assertEqual(obj.default.read(), b"default content") obj.default.close() # But it shouldn't be deleted, even if there are no more objects using # it. obj.delete() obj = Storage() self.assertEqual(obj.default.read(), b"default content") obj.default.close() def test_empty_upload_to(self): # upload_to can be empty, meaning it does not use subdirectory. obj = Storage() obj.empty.save('django_test.txt', ContentFile('more content')) self.assertEqual(obj.empty.name, "django_test.txt") self.assertEqual(obj.empty.read(), b"more content") obj.empty.close() def test_pathlib_upload_to(self): obj = Storage() obj.pathlib_callable.save('some_file1.txt', ContentFile('some content')) self.assertEqual(obj.pathlib_callable.name, 'bar/some_file1.txt') obj.pathlib_direct.save('some_file2.txt', ContentFile('some content')) self.assertEqual(obj.pathlib_direct.name, 'bar/some_file2.txt') obj.random.close() def test_random_upload_to(self): # Verify the fix for #5655, making sure the directory is only # determined once. obj = Storage() obj.random.save("random_file", ContentFile("random content")) self.assertTrue(obj.random.name.endswith("/random_file")) obj.random.close() def test_custom_valid_name_callable_upload_to(self): """ Storage.get_valid_name() should be called when upload_to is a callable. """ obj = Storage() obj.custom_valid_name.save("random_file", ContentFile("random content")) # CustomValidNameStorage.get_valid_name() appends '_valid' to the name self.assertTrue(obj.custom_valid_name.name.endswith("/random_file_valid")) obj.custom_valid_name.close() def test_filefield_pickling(self): # Push an object into the cache to make sure it pickles properly obj = Storage() obj.normal.save("django_test.txt", ContentFile("more content")) obj.normal.close() cache.set("obj", obj) self.assertEqual(cache.get("obj").normal.name, "tests/django_test.txt") def test_file_object(self): # Create sample file temp_storage.save('tests/example.txt', ContentFile('some content')) # Load it as Python file object with open(temp_storage.path('tests/example.txt')) as file_obj: # Save it using storage and read its content temp_storage.save('tests/file_obj', file_obj) self.assertTrue(temp_storage.exists('tests/file_obj')) with temp_storage.open('tests/file_obj') as f: self.assertEqual(f.read(), b'some content') def test_stringio(self): # Test passing StringIO instance as content argument to save output = StringIO() output.write('content') output.seek(0) # Save it and read written file temp_storage.save('tests/stringio', output) self.assertTrue(temp_storage.exists('tests/stringio')) with temp_storage.open('tests/stringio') as f: self.assertEqual(f.read(), b'content') class FieldCallableFileStorageTests(SimpleTestCase): def setUp(self): self.temp_storage_location = tempfile.mkdtemp(suffix='filefield_callable_storage') def tearDown(self): shutil.rmtree(self.temp_storage_location) def test_callable_base_class_error_raises(self): class NotStorage: pass msg = 'FileField.storage must be a subclass/instance of django.core.files.storage.Storage' for invalid_type in (NotStorage, str, list, set, tuple): with self.subTest(invalid_type=invalid_type): with self.assertRaisesMessage(TypeError, msg): FileField(storage=invalid_type) def test_file_field_storage_none_uses_default_storage(self): self.assertEqual(FileField().storage, default_storage) def test_callable_function_storage_file_field(self): storage = FileSystemStorage(location=self.temp_storage_location) def get_storage(): return storage obj = FileField(storage=get_storage) self.assertEqual(obj.storage, storage) self.assertEqual(obj.storage.location, storage.location) def test_callable_class_storage_file_field(self): class GetStorage(FileSystemStorage): pass obj = FileField(storage=GetStorage) self.assertIsInstance(obj.storage, BaseStorage) def test_callable_storage_file_field_in_model(self): obj = Storage() self.assertEqual(obj.storage_callable.storage, temp_storage) self.assertEqual(obj.storage_callable.storage.location, temp_storage_location) self.assertIsInstance(obj.storage_callable_class.storage, BaseStorage) def test_deconstruction(self): """ Deconstructing gives the original callable, not the evaluated value. """ obj = Storage() *_, kwargs = obj._meta.get_field('storage_callable').deconstruct() storage = kwargs['storage'] self.assertIs(storage, callable_storage) # Tests for a race condition on file saving (#4948). # This is written in such a way that it'll always pass on platforms # without threading. class SlowFile(ContentFile): def chunks(self): time.sleep(1) return super().chunks() class FileSaveRaceConditionTest(SimpleTestCase): def setUp(self): self.storage_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(self.storage_dir) self.thread = threading.Thread(target=self.save_file, args=['conflict']) def tearDown(self): shutil.rmtree(self.storage_dir) def save_file(self, name): name = self.storage.save(name, SlowFile(b"Data")) def test_race_condition(self): self.thread.start() self.save_file('conflict') self.thread.join() files = sorted(os.listdir(self.storage_dir)) self.assertEqual(files[0], 'conflict') self.assertRegex(files[1], 'conflict_%s' % FILE_SUFFIX_REGEX) @unittest.skipIf(sys.platform == 'win32', "Windows only partially supports umasks and chmod.") class FileStoragePermissions(unittest.TestCase): def setUp(self): self.umask = 0o027 self.old_umask = os.umask(self.umask) self.storage_dir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.storage_dir) os.umask(self.old_umask) @override_settings(FILE_UPLOAD_PERMISSIONS=0o654) def test_file_upload_permissions(self): self.storage = FileSystemStorage(self.storage_dir) name = self.storage.save("the_file", ContentFile("data")) actual_mode = os.stat(self.storage.path(name))[0] & 0o777 self.assertEqual(actual_mode, 0o654) @override_settings(FILE_UPLOAD_PERMISSIONS=None) def test_file_upload_default_permissions(self): self.storage = FileSystemStorage(self.storage_dir) fname = self.storage.save("some_file", ContentFile("data")) mode = os.stat(self.storage.path(fname))[0] & 0o777 self.assertEqual(mode, 0o666 & ~self.umask) @override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765) def test_file_upload_directory_permissions(self): self.storage = FileSystemStorage(self.storage_dir) name = self.storage.save('the_directory/subdir/the_file', ContentFile('data')) file_path = Path(self.storage.path(name)) self.assertEqual(file_path.parent.stat().st_mode & 0o777, 0o765) self.assertEqual(file_path.parent.parent.stat().st_mode & 0o777, 0o765) @override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=None) def test_file_upload_directory_default_permissions(self): self.storage = FileSystemStorage(self.storage_dir) name = self.storage.save('the_directory/subdir/the_file', ContentFile('data')) file_path = Path(self.storage.path(name)) expected_mode = 0o777 & ~self.umask self.assertEqual(file_path.parent.stat().st_mode & 0o777, expected_mode) self.assertEqual(file_path.parent.parent.stat().st_mode & 0o777, expected_mode) class FileStoragePathParsing(SimpleTestCase): def setUp(self): self.storage_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(self.storage_dir) def tearDown(self): shutil.rmtree(self.storage_dir) def test_directory_with_dot(self): """Regression test for #9610. If the directory name contains a dot and the file name doesn't, make sure we still mangle the file name instead of the directory name. """ self.storage.save('dotted.path/test', ContentFile("1")) self.storage.save('dotted.path/test', ContentFile("2")) files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path'))) self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path'))) self.assertEqual(files[0], 'test') self.assertRegex(files[1], 'test_%s' % FILE_SUFFIX_REGEX) def test_first_character_dot(self): """ File names with a dot as their first character don't have an extension, and the underscore should get added to the end. """ self.storage.save('dotted.path/.test', ContentFile("1")) self.storage.save('dotted.path/.test', ContentFile("2")) files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path'))) self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path'))) self.assertEqual(files[0], '.test') self.assertRegex(files[1], '.test_%s' % FILE_SUFFIX_REGEX) class ContentFileStorageTestCase(unittest.TestCase): def setUp(self): self.storage_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(self.storage_dir) def tearDown(self): shutil.rmtree(self.storage_dir) def test_content_saving(self): """ ContentFile can be saved correctly with the filesystem storage, if it was initialized with either bytes or unicode content. """ self.storage.save('bytes.txt', ContentFile(b"content")) self.storage.save('unicode.txt', ContentFile("español")) @override_settings(ROOT_URLCONF='file_storage.urls') class FileLikeObjectTestCase(LiveServerTestCase): """ Test file-like objects (#15644). """ available_apps = [] def setUp(self): self.temp_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(location=self.temp_dir) def tearDown(self): shutil.rmtree(self.temp_dir) def test_urllib_request_urlopen(self): """ Test the File storage API with a file-like object coming from urllib.request.urlopen(). """ file_like_object = urlopen(self.live_server_url + '/') f = File(file_like_object) stored_filename = self.storage.save("remote_file.html", f) remote_file = urlopen(self.live_server_url + '/') with self.storage.open(stored_filename) as stored_file: self.assertEqual(stored_file.read(), remote_file.read())
652d023d2c37bb88f8df8b84b31be8443319ecc8ba64159d5f6b43b37773e196
import string import uuid from django.core.exceptions import ImproperlyConfigured from django.test import SimpleTestCase from django.test.utils import override_settings from django.urls import ( NoReverseMatch, Resolver404, path, re_path, resolve, reverse, ) from django.views import View from .converters import DynamicConverter from .views import empty_view included_kwargs = {'base': b'hello', 'value': b'world'} converter_test_data = ( # ('url', ('url_name', 'app_name', {kwargs})), # aGVsbG8= is 'hello' encoded in base64. ('/base64/aGVsbG8=/', ('base64', '', {'value': b'hello'})), ('/base64/aGVsbG8=/subpatterns/d29ybGQ=/', ('subpattern-base64', '', included_kwargs)), ('/base64/aGVsbG8=/namespaced/d29ybGQ=/', ('subpattern-base64', 'namespaced-base64', included_kwargs)), ) @override_settings(ROOT_URLCONF='urlpatterns.path_urls') class SimplifiedURLTests(SimpleTestCase): def test_path_lookup_without_parameters(self): match = resolve('/articles/2003/') self.assertEqual(match.url_name, 'articles-2003') self.assertEqual(match.args, ()) self.assertEqual(match.kwargs, {}) self.assertEqual(match.route, 'articles/2003/') def test_path_lookup_with_typed_parameters(self): match = resolve('/articles/2015/') self.assertEqual(match.url_name, 'articles-year') self.assertEqual(match.args, ()) self.assertEqual(match.kwargs, {'year': 2015}) self.assertEqual(match.route, 'articles/<int:year>/') def test_path_lookup_with_multiple_parameters(self): match = resolve('/articles/2015/04/12/') self.assertEqual(match.url_name, 'articles-year-month-day') self.assertEqual(match.args, ()) self.assertEqual(match.kwargs, {'year': 2015, 'month': 4, 'day': 12}) self.assertEqual(match.route, 'articles/<int:year>/<int:month>/<int:day>/') def test_two_variable_at_start_of_path_pattern(self): match = resolve('/en/foo/') self.assertEqual(match.url_name, 'lang-and-path') self.assertEqual(match.kwargs, {'lang': 'en', 'url': 'foo'}) self.assertEqual(match.route, '<lang>/<path:url>/') def test_re_path(self): match = resolve('/regex/1/') self.assertEqual(match.url_name, 'regex') self.assertEqual(match.kwargs, {'pk': '1'}) self.assertEqual(match.route, '^regex/(?P<pk>[0-9]+)/$') def test_re_path_with_optional_parameter(self): for url, kwargs in ( ('/regex_optional/1/2/', {'arg1': '1', 'arg2': '2'}), ('/regex_optional/1/', {'arg1': '1'}), ): with self.subTest(url=url): match = resolve(url) self.assertEqual(match.url_name, 'regex_optional') self.assertEqual(match.kwargs, kwargs) self.assertEqual( match.route, r'^regex_optional/(?P<arg1>\d+)/(?:(?P<arg2>\d+)/)?', ) def test_re_path_with_missing_optional_parameter(self): match = resolve('/regex_only_optional/') self.assertEqual(match.url_name, 'regex_only_optional') self.assertEqual(match.kwargs, {}) self.assertEqual(match.args, ()) self.assertEqual( match.route, r'^regex_only_optional/(?:(?P<arg1>\d+)/)?', ) def test_path_lookup_with_inclusion(self): match = resolve('/included_urls/extra/something/') self.assertEqual(match.url_name, 'inner-extra') self.assertEqual(match.route, 'included_urls/extra/<extra>/') def test_path_lookup_with_empty_string_inclusion(self): match = resolve('/more/99/') self.assertEqual(match.url_name, 'inner-more') self.assertEqual(match.route, r'^more/(?P<extra>\w+)/$') def test_path_lookup_with_double_inclusion(self): match = resolve('/included_urls/more/some_value/') self.assertEqual(match.url_name, 'inner-more') self.assertEqual(match.route, r'included_urls/more/(?P<extra>\w+)/$') def test_path_reverse_without_parameter(self): url = reverse('articles-2003') self.assertEqual(url, '/articles/2003/') def test_path_reverse_with_parameter(self): url = reverse('articles-year-month-day', kwargs={'year': 2015, 'month': 4, 'day': 12}) self.assertEqual(url, '/articles/2015/4/12/') @override_settings(ROOT_URLCONF='urlpatterns.path_base64_urls') def test_converter_resolve(self): for url, (url_name, app_name, kwargs) in converter_test_data: with self.subTest(url=url): match = resolve(url) self.assertEqual(match.url_name, url_name) self.assertEqual(match.app_name, app_name) self.assertEqual(match.kwargs, kwargs) @override_settings(ROOT_URLCONF='urlpatterns.path_base64_urls') def test_converter_reverse(self): for expected, (url_name, app_name, kwargs) in converter_test_data: if app_name: url_name = '%s:%s' % (app_name, url_name) with self.subTest(url=url_name): url = reverse(url_name, kwargs=kwargs) self.assertEqual(url, expected) @override_settings(ROOT_URLCONF='urlpatterns.path_base64_urls') def test_converter_reverse_with_second_layer_instance_namespace(self): kwargs = included_kwargs.copy() kwargs['last_value'] = b'world' url = reverse('instance-ns-base64:subsubpattern-base64', kwargs=kwargs) self.assertEqual(url, '/base64/aGVsbG8=/subpatterns/d29ybGQ=/d29ybGQ=/') def test_path_inclusion_is_matchable(self): match = resolve('/included_urls/extra/something/') self.assertEqual(match.url_name, 'inner-extra') self.assertEqual(match.kwargs, {'extra': 'something'}) def test_path_inclusion_is_reversible(self): url = reverse('inner-extra', kwargs={'extra': 'something'}) self.assertEqual(url, '/included_urls/extra/something/') def test_invalid_kwargs(self): msg = 'kwargs argument must be a dict, but got str.' with self.assertRaisesMessage(TypeError, msg): path('hello/', empty_view, 'name') with self.assertRaisesMessage(TypeError, msg): re_path('^hello/$', empty_view, 'name') def test_invalid_converter(self): msg = "URL route 'foo/<nonexistent:var>/' uses invalid converter 'nonexistent'." with self.assertRaisesMessage(ImproperlyConfigured, msg): path('foo/<nonexistent:var>/', empty_view) def test_invalid_view(self): msg = 'view must be a callable or a list/tuple in the case of include().' with self.assertRaisesMessage(TypeError, msg): path('articles/', 'invalid_view') def test_invalid_view_instance(self): class EmptyCBV(View): pass msg = 'view must be a callable, pass EmptyCBV.as_view(), not EmptyCBV().' with self.assertRaisesMessage(TypeError, msg): path('foo', EmptyCBV()) def test_whitespace_in_route(self): msg = ( "URL route 'space/<int:num>/extra/<str:%stest>' cannot contain " "whitespace in angle brackets <…>" ) for whitespace in string.whitespace: with self.subTest(repr(whitespace)): with self.assertRaisesMessage(ImproperlyConfigured, msg % whitespace): path('space/<int:num>/extra/<str:%stest>' % whitespace, empty_view) # Whitespaces are valid in paths. p = path('space%s/<int:num>/' % string.whitespace, empty_view) match = p.resolve('space%s/1/' % string.whitespace) self.assertEqual(match.kwargs, {'num': 1}) def test_path_trailing_newlines(self): tests = [ '/articles/2003/\n', '/articles/2010/\n', '/en/foo/\n', '/included_urls/extra/\n', '/regex/1/\n', '/users/1/\n', ] for url in tests: with self.subTest(url=url), self.assertRaises(Resolver404): resolve(url) @override_settings(ROOT_URLCONF='urlpatterns.converter_urls') class ConverterTests(SimpleTestCase): def test_matching_urls(self): def no_converter(x): return x test_data = ( ('int', {'0', '1', '01', 1234567890}, int), ('str', {'abcxyz'}, no_converter), ('path', {'allows.ANY*characters'}, no_converter), ('slug', {'abcxyz-ABCXYZ_01234567890'}, no_converter), ('uuid', {'39da9369-838e-4750-91a5-f7805cd82839'}, uuid.UUID), ) for url_name, url_suffixes, converter in test_data: for url_suffix in url_suffixes: url = '/%s/%s/' % (url_name, url_suffix) with self.subTest(url=url): match = resolve(url) self.assertEqual(match.url_name, url_name) self.assertEqual(match.kwargs, {url_name: converter(url_suffix)}) # reverse() works with string parameters. string_kwargs = {url_name: url_suffix} self.assertEqual(reverse(url_name, kwargs=string_kwargs), url) # reverse() also works with native types (int, UUID, etc.). if converter is not no_converter: # The converted value might be different for int (a # leading zero is lost in the conversion). converted_value = match.kwargs[url_name] converted_url = '/%s/%s/' % (url_name, converted_value) self.assertEqual(reverse(url_name, kwargs={url_name: converted_value}), converted_url) def test_nonmatching_urls(self): test_data = ( ('int', {'-1', 'letters'}), ('str', {'', '/'}), ('path', {''}), ('slug', {'', 'stars*notallowed'}), ('uuid', { '', '9da9369-838e-4750-91a5-f7805cd82839', '39da9369-838-4750-91a5-f7805cd82839', '39da9369-838e-475-91a5-f7805cd82839', '39da9369-838e-4750-91a-f7805cd82839', '39da9369-838e-4750-91a5-f7805cd8283', }), ) for url_name, url_suffixes in test_data: for url_suffix in url_suffixes: url = '/%s/%s/' % (url_name, url_suffix) with self.subTest(url=url), self.assertRaises(Resolver404): resolve(url) @override_settings(ROOT_URLCONF='urlpatterns.path_same_name_urls') class SameNameTests(SimpleTestCase): def test_matching_urls_same_name(self): @DynamicConverter.register_to_url def requires_tiny_int(value): if value > 5: raise ValueError return value tests = [ ('number_of_args', [ ([], {}, '0/'), ([1], {}, '1/1/'), ]), ('kwargs_names', [ ([], {'a': 1}, 'a/1/'), ([], {'b': 1}, 'b/1/'), ]), ('converter', [ (['a/b'], {}, 'path/a/b/'), (['a b'], {}, 'str/a%20b/'), (['a-b'], {}, 'slug/a-b/'), (['2'], {}, 'int/2/'), ( ['39da9369-838e-4750-91a5-f7805cd82839'], {}, 'uuid/39da9369-838e-4750-91a5-f7805cd82839/' ), ]), ('regex', [ (['ABC'], {}, 'uppercase/ABC/'), (['abc'], {}, 'lowercase/abc/'), ]), ('converter_to_url', [ ([6], {}, 'int/6/'), ([1], {}, 'tiny_int/1/'), ]), ] for url_name, cases in tests: for args, kwargs, url_suffix in cases: expected_url = '/%s/%s' % (url_name, url_suffix) with self.subTest(url=expected_url): self.assertEqual( reverse(url_name, args=args, kwargs=kwargs), expected_url, ) class ParameterRestrictionTests(SimpleTestCase): def test_integer_parameter_name_causes_exception(self): msg = ( "URL route 'hello/<int:1>/' uses parameter name '1' which isn't " "a valid Python identifier." ) with self.assertRaisesMessage(ImproperlyConfigured, msg): path(r'hello/<int:1>/', lambda r: None) def test_non_identifier_parameter_name_causes_exception(self): msg = ( "URL route 'b/<int:book.id>/' uses parameter name 'book.id' which " "isn't a valid Python identifier." ) with self.assertRaisesMessage(ImproperlyConfigured, msg): path(r'b/<int:book.id>/', lambda r: None) def test_allows_non_ascii_but_valid_identifiers(self): # \u0394 is "GREEK CAPITAL LETTER DELTA", a valid identifier. p = path('hello/<str:\u0394>/', lambda r: None) match = p.resolve('hello/1/') self.assertEqual(match.kwargs, {'\u0394': '1'}) @override_settings(ROOT_URLCONF='urlpatterns.path_dynamic_urls') class ConversionExceptionTests(SimpleTestCase): """How are errors in Converter.to_python() and to_url() handled?""" def test_resolve_value_error_means_no_match(self): @DynamicConverter.register_to_python def raises_value_error(value): raise ValueError() with self.assertRaises(Resolver404): resolve('/dynamic/abc/') def test_resolve_type_error_propagates(self): @DynamicConverter.register_to_python def raises_type_error(value): raise TypeError('This type error propagates.') with self.assertRaisesMessage(TypeError, 'This type error propagates.'): resolve('/dynamic/abc/') def test_reverse_value_error_means_no_match(self): @DynamicConverter.register_to_url def raises_value_error(value): raise ValueError with self.assertRaises(NoReverseMatch): reverse('dynamic', kwargs={'value': object()}) def test_reverse_type_error_propagates(self): @DynamicConverter.register_to_url def raises_type_error(value): raise TypeError('This type error propagates.') with self.assertRaisesMessage(TypeError, 'This type error propagates.'): reverse('dynamic', kwargs={'value': object()})
0f610c0da1e98d704037ab1ec66c4de4ca5f8695acb9b2867f456975f83c3024
import threading import time from unittest import mock from multiple_database.routers import TestRouter from django.core.exceptions import FieldError from django.db import ( DatabaseError, NotSupportedError, connection, connections, router, transaction, ) from django.test import ( TransactionTestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import CaptureQueriesContext from .models import ( City, CityCountryProxy, Country, EUCity, EUCountry, Person, PersonProfile, ) class SelectForUpdateTests(TransactionTestCase): available_apps = ['select_for_update'] def setUp(self): # This is executed in autocommit mode so that code in # run_select_for_update can see this data. self.country1 = Country.objects.create(name='Belgium') self.country2 = Country.objects.create(name='France') self.city1 = City.objects.create(name='Liberchies', country=self.country1) self.city2 = City.objects.create(name='Samois-sur-Seine', country=self.country2) self.person = Person.objects.create(name='Reinhardt', born=self.city1, died=self.city2) self.person_profile = PersonProfile.objects.create(person=self.person) # We need another database connection in transaction to test that one # connection issuing a SELECT ... FOR UPDATE will block. self.new_connection = connection.copy() def tearDown(self): try: self.end_blocking_transaction() except (DatabaseError, AttributeError): pass self.new_connection.close() def start_blocking_transaction(self): self.new_connection.set_autocommit(False) # Start a blocking transaction. At some point, # end_blocking_transaction() should be called. self.cursor = self.new_connection.cursor() sql = 'SELECT * FROM %(db_table)s %(for_update)s;' % { 'db_table': Person._meta.db_table, 'for_update': self.new_connection.ops.for_update_sql(), } self.cursor.execute(sql, ()) self.cursor.fetchone() def end_blocking_transaction(self): # Roll back the blocking transaction. self.cursor.close() self.new_connection.rollback() self.new_connection.set_autocommit(True) def has_for_update_sql(self, queries, **kwargs): # Examine the SQL that was executed to determine whether it # contains the 'SELECT..FOR UPDATE' stanza. for_update_sql = connection.ops.for_update_sql(**kwargs) return any(for_update_sql in query['sql'] for query in queries) @skipUnlessDBFeature('has_select_for_update') def test_for_update_sql_generated(self): """ The backend's FOR UPDATE variant appears in generated SQL when select_for_update is invoked. """ with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(Person.objects.all().select_for_update()) self.assertTrue(self.has_for_update_sql(ctx.captured_queries)) @skipUnlessDBFeature('has_select_for_update_nowait') def test_for_update_sql_generated_nowait(self): """ The backend's FOR UPDATE NOWAIT variant appears in generated SQL when select_for_update is invoked. """ with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(Person.objects.all().select_for_update(nowait=True)) self.assertTrue(self.has_for_update_sql(ctx.captured_queries, nowait=True)) @skipUnlessDBFeature('has_select_for_update_skip_locked') def test_for_update_sql_generated_skip_locked(self): """ The backend's FOR UPDATE SKIP LOCKED variant appears in generated SQL when select_for_update is invoked. """ with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(Person.objects.all().select_for_update(skip_locked=True)) self.assertTrue(self.has_for_update_sql(ctx.captured_queries, skip_locked=True)) @skipUnlessDBFeature('has_select_for_no_key_update') def test_update_sql_generated_no_key(self): """ The backend's FOR NO KEY UPDATE variant appears in generated SQL when select_for_update() is invoked. """ with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(Person.objects.all().select_for_update(no_key=True)) self.assertIs(self.has_for_update_sql(ctx.captured_queries, no_key=True), True) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_sql_generated_of(self): """ The backend's FOR UPDATE OF variant appears in the generated SQL when select_for_update() is invoked. """ with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(Person.objects.select_related( 'born__country', ).select_for_update( of=('born__country',), ).select_for_update( of=('self', 'born__country') )) features = connections['default'].features if features.select_for_update_of_column: expected = [ 'select_for_update_person"."id', 'select_for_update_country"."entity_ptr_id', ] else: expected = ['select_for_update_person', 'select_for_update_country'] expected = [connection.ops.quote_name(value) for value in expected] self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_sql_model_inheritance_generated_of(self): with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(EUCountry.objects.select_for_update(of=('self',))) if connection.features.select_for_update_of_column: expected = ['select_for_update_eucountry"."country_ptr_id'] else: expected = ['select_for_update_eucountry'] expected = [connection.ops.quote_name(value) for value in expected] self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_sql_model_inheritance_ptr_generated_of(self): with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(EUCountry.objects.select_for_update(of=('self', 'country_ptr',))) if connection.features.select_for_update_of_column: expected = [ 'select_for_update_eucountry"."country_ptr_id', 'select_for_update_country"."entity_ptr_id', ] else: expected = ['select_for_update_eucountry', 'select_for_update_country'] expected = [connection.ops.quote_name(value) for value in expected] self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_sql_related_model_inheritance_generated_of(self): with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(EUCity.objects.select_related('country').select_for_update( of=('self', 'country'), )) if connection.features.select_for_update_of_column: expected = [ 'select_for_update_eucity"."id', 'select_for_update_eucountry"."country_ptr_id', ] else: expected = ['select_for_update_eucity', 'select_for_update_eucountry'] expected = [connection.ops.quote_name(value) for value in expected] self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_sql_model_inheritance_nested_ptr_generated_of(self): with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(EUCity.objects.select_related('country').select_for_update( of=('self', 'country__country_ptr',), )) if connection.features.select_for_update_of_column: expected = [ 'select_for_update_eucity"."id', 'select_for_update_country"."entity_ptr_id', ] else: expected = ['select_for_update_eucity', 'select_for_update_country'] expected = [connection.ops.quote_name(value) for value in expected] self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_sql_multilevel_model_inheritance_ptr_generated_of(self): with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(EUCountry.objects.select_for_update( of=('country_ptr', 'country_ptr__entity_ptr'), )) if connection.features.select_for_update_of_column: expected = [ 'select_for_update_country"."entity_ptr_id', 'select_for_update_entity"."id', ] else: expected = ['select_for_update_country', 'select_for_update_entity'] expected = [connection.ops.quote_name(value) for value in expected] self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_sql_model_proxy_generated_of(self): with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list(CityCountryProxy.objects.select_related( 'country', ).select_for_update( of=('country',), )) if connection.features.select_for_update_of_column: expected = ['select_for_update_country"."entity_ptr_id'] else: expected = ['select_for_update_country'] expected = [connection.ops.quote_name(value) for value in expected] self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected)) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_of_followed_by_values(self): with transaction.atomic(): values = list(Person.objects.select_for_update(of=('self',)).values('pk')) self.assertEqual(values, [{'pk': self.person.pk}]) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_of_followed_by_values_list(self): with transaction.atomic(): values = list(Person.objects.select_for_update(of=('self',)).values_list('pk')) self.assertEqual(values, [(self.person.pk,)]) @skipUnlessDBFeature('has_select_for_update_of') def test_for_update_of_self_when_self_is_not_selected(self): """ select_for_update(of=['self']) when the only columns selected are from related tables. """ with transaction.atomic(): values = list(Person.objects.select_related('born').select_for_update(of=('self',)).values('born__name')) self.assertEqual(values, [{'born__name': self.city1.name}]) @skipUnlessDBFeature( 'has_select_for_update_of', 'supports_select_for_update_with_limit', ) def test_for_update_of_with_exists(self): with transaction.atomic(): qs = Person.objects.select_for_update(of=('self', 'born')) self.assertIs(qs.exists(), True) @skipUnlessDBFeature('has_select_for_update_nowait') def test_nowait_raises_error_on_block(self): """ If nowait is specified, we expect an error to be raised rather than blocking. """ self.start_blocking_transaction() status = [] thread = threading.Thread( target=self.run_select_for_update, args=(status,), kwargs={'nowait': True}, ) thread.start() time.sleep(1) thread.join() self.end_blocking_transaction() self.assertIsInstance(status[-1], DatabaseError) @skipUnlessDBFeature('has_select_for_update_skip_locked') def test_skip_locked_skips_locked_rows(self): """ If skip_locked is specified, the locked row is skipped resulting in Person.DoesNotExist. """ self.start_blocking_transaction() status = [] thread = threading.Thread( target=self.run_select_for_update, args=(status,), kwargs={'skip_locked': True}, ) thread.start() time.sleep(1) thread.join() self.end_blocking_transaction() self.assertIsInstance(status[-1], Person.DoesNotExist) @skipIfDBFeature('has_select_for_update_nowait') @skipUnlessDBFeature('has_select_for_update') def test_unsupported_nowait_raises_error(self): """ NotSupportedError is raised if a SELECT...FOR UPDATE NOWAIT is run on a database backend that supports FOR UPDATE but not NOWAIT. """ with self.assertRaisesMessage(NotSupportedError, 'NOWAIT is not supported on this database backend.'): with transaction.atomic(): Person.objects.select_for_update(nowait=True).get() @skipIfDBFeature('has_select_for_update_skip_locked') @skipUnlessDBFeature('has_select_for_update') def test_unsupported_skip_locked_raises_error(self): """ NotSupportedError is raised if a SELECT...FOR UPDATE SKIP LOCKED is run on a database backend that supports FOR UPDATE but not SKIP LOCKED. """ with self.assertRaisesMessage(NotSupportedError, 'SKIP LOCKED is not supported on this database backend.'): with transaction.atomic(): Person.objects.select_for_update(skip_locked=True).get() @skipIfDBFeature('has_select_for_update_of') @skipUnlessDBFeature('has_select_for_update') def test_unsupported_of_raises_error(self): """ NotSupportedError is raised if a SELECT...FOR UPDATE OF... is run on a database backend that supports FOR UPDATE but not OF. """ msg = 'FOR UPDATE OF is not supported on this database backend.' with self.assertRaisesMessage(NotSupportedError, msg): with transaction.atomic(): Person.objects.select_for_update(of=('self',)).get() @skipIfDBFeature('has_select_for_no_key_update') @skipUnlessDBFeature('has_select_for_update') def test_unsuported_no_key_raises_error(self): """ NotSupportedError is raised if a SELECT...FOR NO KEY UPDATE... is run on a database backend that supports FOR UPDATE but not NO KEY. """ msg = 'FOR NO KEY UPDATE is not supported on this database backend.' with self.assertRaisesMessage(NotSupportedError, msg): with transaction.atomic(): Person.objects.select_for_update(no_key=True).get() @skipUnlessDBFeature('has_select_for_update', 'has_select_for_update_of') def test_unrelated_of_argument_raises_error(self): """ FieldError is raised if a non-relation field is specified in of=(...). """ msg = ( 'Invalid field name(s) given in select_for_update(of=(...)): %s. ' 'Only relational fields followed in the query are allowed. ' 'Choices are: self, born, born__country, ' 'born__country__entity_ptr.' ) invalid_of = [ ('nonexistent',), ('name',), ('born__nonexistent',), ('born__name',), ('born__nonexistent', 'born__name'), ] for of in invalid_of: with self.subTest(of=of): with self.assertRaisesMessage(FieldError, msg % ', '.join(of)): with transaction.atomic(): Person.objects.select_related('born__country').select_for_update(of=of).get() @skipUnlessDBFeature('has_select_for_update', 'has_select_for_update_of') def test_related_but_unselected_of_argument_raises_error(self): """ FieldError is raised if a relation field that is not followed in the query is specified in of=(...). """ msg = ( 'Invalid field name(s) given in select_for_update(of=(...)): %s. ' 'Only relational fields followed in the query are allowed. ' 'Choices are: self, born, profile.' ) for name in ['born__country', 'died', 'died__country']: with self.subTest(name=name): with self.assertRaisesMessage(FieldError, msg % name): with transaction.atomic(): Person.objects.select_related( 'born', 'profile', ).exclude(profile=None).select_for_update(of=(name,)).get() @skipUnlessDBFeature('has_select_for_update', 'has_select_for_update_of') def test_model_inheritance_of_argument_raises_error_ptr_in_choices(self): msg = ( 'Invalid field name(s) given in select_for_update(of=(...)): ' 'name. Only relational fields followed in the query are allowed. ' 'Choices are: self, %s.' ) with self.assertRaisesMessage( FieldError, msg % 'country, country__country_ptr, country__country_ptr__entity_ptr', ): with transaction.atomic(): EUCity.objects.select_related( 'country', ).select_for_update(of=('name',)).get() with self.assertRaisesMessage(FieldError, msg % 'country_ptr, country_ptr__entity_ptr'): with transaction.atomic(): EUCountry.objects.select_for_update(of=('name',)).get() @skipUnlessDBFeature('has_select_for_update', 'has_select_for_update_of') def test_model_proxy_of_argument_raises_error_proxy_field_in_choices(self): msg = ( 'Invalid field name(s) given in select_for_update(of=(...)): ' 'name. Only relational fields followed in the query are allowed. ' 'Choices are: self, country, country__entity_ptr.' ) with self.assertRaisesMessage(FieldError, msg): with transaction.atomic(): CityCountryProxy.objects.select_related( 'country', ).select_for_update(of=('name',)).get() @skipUnlessDBFeature('has_select_for_update', 'has_select_for_update_of') def test_reverse_one_to_one_of_arguments(self): """ Reverse OneToOneFields may be included in of=(...) as long as NULLs are excluded because LEFT JOIN isn't allowed in SELECT FOR UPDATE. """ with transaction.atomic(): person = Person.objects.select_related( 'profile', ).exclude(profile=None).select_for_update(of=('profile',)).get() self.assertEqual(person.profile, self.person_profile) @skipUnlessDBFeature('has_select_for_update') def test_for_update_after_from(self): features_class = connections['default'].features.__class__ attribute_to_patch = "%s.%s.for_update_after_from" % (features_class.__module__, features_class.__name__) with mock.patch(attribute_to_patch, return_value=True): with transaction.atomic(): self.assertIn('FOR UPDATE WHERE', str(Person.objects.filter(name='foo').select_for_update().query)) @skipUnlessDBFeature('has_select_for_update') def test_for_update_requires_transaction(self): """ A TransactionManagementError is raised when a select_for_update query is executed outside of a transaction. """ msg = 'select_for_update cannot be used outside of a transaction.' with self.assertRaisesMessage(transaction.TransactionManagementError, msg): list(Person.objects.all().select_for_update()) @skipUnlessDBFeature('has_select_for_update') def test_for_update_requires_transaction_only_in_execution(self): """ No TransactionManagementError is raised when select_for_update is invoked outside of a transaction - only when the query is executed. """ people = Person.objects.all().select_for_update() msg = 'select_for_update cannot be used outside of a transaction.' with self.assertRaisesMessage(transaction.TransactionManagementError, msg): list(people) @skipUnlessDBFeature('supports_select_for_update_with_limit') def test_select_for_update_with_limit(self): other = Person.objects.create(name='Grappeli', born=self.city1, died=self.city2) with transaction.atomic(): qs = list(Person.objects.all().order_by('pk').select_for_update()[1:2]) self.assertEqual(qs[0], other) @skipIfDBFeature('supports_select_for_update_with_limit') def test_unsupported_select_for_update_with_limit(self): msg = 'LIMIT/OFFSET is not supported with select_for_update on this database backend.' with self.assertRaisesMessage(NotSupportedError, msg): with transaction.atomic(): list(Person.objects.all().order_by('pk').select_for_update()[1:2]) def run_select_for_update(self, status, **kwargs): """ Utility method that runs a SELECT FOR UPDATE against all Person instances. After the select_for_update, it attempts to update the name of the only record, save, and commit. This function expects to run in a separate thread. """ status.append('started') try: # We need to enter transaction management again, as this is done on # per-thread basis with transaction.atomic(): person = Person.objects.select_for_update(**kwargs).get() person.name = 'Fred' person.save() except (DatabaseError, Person.DoesNotExist) as e: status.append(e) finally: # This method is run in a separate thread. It uses its own # database connection. Close it without waiting for the GC. connection.close() @skipUnlessDBFeature('has_select_for_update') @skipUnlessDBFeature('supports_transactions') def test_block(self): """ A thread running a select_for_update that accesses rows being touched by a similar operation on another connection blocks correctly. """ # First, let's start the transaction in our thread. self.start_blocking_transaction() # Now, try it again using the ORM's select_for_update # facility. Do this in a separate thread. status = [] thread = threading.Thread( target=self.run_select_for_update, args=(status,) ) # The thread should immediately block, but we'll sleep # for a bit to make sure. thread.start() sanity_count = 0 while len(status) != 1 and sanity_count < 10: sanity_count += 1 time.sleep(1) if sanity_count >= 10: raise ValueError('Thread did not run and block') # Check the person hasn't been updated. Since this isn't # using FOR UPDATE, it won't block. p = Person.objects.get(pk=self.person.pk) self.assertEqual('Reinhardt', p.name) # When we end our blocking transaction, our thread should # be able to continue. self.end_blocking_transaction() thread.join(5.0) # Check the thread has finished. Assuming it has, we should # find that it has updated the person's name. self.assertFalse(thread.is_alive()) # We must commit the transaction to ensure that MySQL gets a fresh read, # since by default it runs in REPEATABLE READ mode transaction.commit() p = Person.objects.get(pk=self.person.pk) self.assertEqual('Fred', p.name) @skipUnlessDBFeature('has_select_for_update') def test_raw_lock_not_available(self): """ Running a raw query which can't obtain a FOR UPDATE lock raises the correct exception """ self.start_blocking_transaction() def raw(status): try: list( Person.objects.raw( 'SELECT * FROM %s %s' % ( Person._meta.db_table, connection.ops.for_update_sql(nowait=True) ) ) ) except DatabaseError as e: status.append(e) finally: # This method is run in a separate thread. It uses its own # database connection. Close it without waiting for the GC. # Connection cannot be closed on Oracle because cursor is still # open. if connection.vendor != 'oracle': connection.close() status = [] thread = threading.Thread(target=raw, kwargs={'status': status}) thread.start() time.sleep(1) thread.join() self.end_blocking_transaction() self.assertIsInstance(status[-1], DatabaseError) @skipUnlessDBFeature('has_select_for_update') @override_settings(DATABASE_ROUTERS=[TestRouter()]) def test_select_for_update_on_multidb(self): query = Person.objects.select_for_update() self.assertEqual(router.db_for_write(Person), query.db) @skipUnlessDBFeature('has_select_for_update') def test_select_for_update_with_get(self): with transaction.atomic(): person = Person.objects.select_for_update().get(name='Reinhardt') self.assertEqual(person.name, 'Reinhardt') def test_nowait_and_skip_locked(self): with self.assertRaisesMessage(ValueError, 'The nowait option cannot be used with skip_locked.'): Person.objects.select_for_update(nowait=True, skip_locked=True) def test_ordered_select_for_update(self): """ Subqueries should respect ordering as an ORDER BY clause may be useful to specify a row locking order to prevent deadlocks (#27193). """ with transaction.atomic(): qs = Person.objects.filter(id__in=Person.objects.order_by('-id').select_for_update()) self.assertIn('ORDER BY', str(qs.query))
5564eb4e800138f7157df9f8c26a12c9bd835013dd9b35c7585465e30724ec0c
import re from django.conf import settings from django.contrib.sessions.backends.cache import SessionStore from django.core.exceptions import ImproperlyConfigured from django.http import HttpRequest, HttpResponse, UnreadablePostError from django.middleware.csrf import ( CSRF_ALLOWED_CHARS, CSRF_SECRET_LENGTH, CSRF_SESSION_KEY, CSRF_TOKEN_LENGTH, REASON_BAD_ORIGIN, REASON_CSRF_TOKEN_MISSING, REASON_NO_CSRF_COOKIE, CsrfViewMiddleware, InvalidTokenFormat, RejectRequest, _check_token_format, _does_token_match, _mask_cipher_secret, _unmask_cipher_token, get_token, rotate_token, ) from django.test import SimpleTestCase, override_settings from django.test.utils import ignore_warnings from django.utils.deprecation import RemovedInDjango50Warning from django.views.decorators.csrf import csrf_exempt, requires_csrf_token from .views import ( ensure_csrf_cookie_view, ensured_and_protected_view, non_token_view_using_request_processor, post_form_view, protected_view, sandwiched_rotate_token_view, token_view, ) # This is a test (unmasked) CSRF cookie / secret. TEST_SECRET = 'lcccccccX2kcccccccY2jcccccccssIC' # Two masked versions of TEST_SECRET for testing purposes. MASKED_TEST_SECRET1 = '1bcdefghij2bcdefghij3bcdefghij4bcdefghij5bcdefghij6bcdefghijABCD' MASKED_TEST_SECRET2 = '2JgchWvM1tpxT2lfz9aydoXW9yT1DN3NdLiejYxOOlzzV4nhBbYqmqZYbAV3V5Bf' class CsrfFunctionTestMixin: # This method depends on _unmask_cipher_token() being correct. def assertMaskedSecretCorrect(self, masked_secret, secret): """Test that a string is a valid masked version of a secret.""" self.assertEqual(len(masked_secret), CSRF_TOKEN_LENGTH) self.assertEqual(len(secret), CSRF_SECRET_LENGTH) self.assertTrue( set(masked_secret).issubset(set(CSRF_ALLOWED_CHARS)), msg=f'invalid characters in {masked_secret!r}', ) actual = _unmask_cipher_token(masked_secret) self.assertEqual(actual, secret) class CsrfFunctionTests(CsrfFunctionTestMixin, SimpleTestCase): def test_unmask_cipher_token(self): cases = [ (TEST_SECRET, MASKED_TEST_SECRET1), (TEST_SECRET, MASKED_TEST_SECRET2), ( 32 * 'a', 'vFioG3XOLyGyGsPRFyB9iYUs341ufzIEvFioG3XOLyGyGsPRFyB9iYUs341ufzIE', ), (32 * 'a', 64 * 'a'), (32 * 'a', 64 * 'b'), (32 * 'b', 32 * 'a' + 32 * 'b'), (32 * 'b', 32 * 'b' + 32 * 'c'), (32 * 'c', 32 * 'a' + 32 * 'c'), ] for secret, masked_secret in cases: with self.subTest(masked_secret=masked_secret): actual = _unmask_cipher_token(masked_secret) self.assertEqual(actual, secret) def test_mask_cipher_secret(self): cases = [ 32 * 'a', TEST_SECRET, 'da4SrUiHJYoJ0HYQ0vcgisoIuFOxx4ER', ] for secret in cases: with self.subTest(secret=secret): masked = _mask_cipher_secret(secret) self.assertMaskedSecretCorrect(masked, secret) def test_get_token_csrf_cookie_set(self): request = HttpRequest() request.META['CSRF_COOKIE'] = TEST_SECRET self.assertNotIn('CSRF_COOKIE_NEEDS_UPDATE', request.META) token = get_token(request) self.assertMaskedSecretCorrect(token, TEST_SECRET) # The existing cookie is preserved. self.assertEqual(request.META['CSRF_COOKIE'], TEST_SECRET) self.assertIs(request.META['CSRF_COOKIE_NEEDS_UPDATE'], True) def test_get_token_csrf_cookie_not_set(self): request = HttpRequest() self.assertNotIn('CSRF_COOKIE', request.META) self.assertNotIn('CSRF_COOKIE_NEEDS_UPDATE', request.META) token = get_token(request) cookie = request.META['CSRF_COOKIE'] self.assertMaskedSecretCorrect(token, cookie) self.assertIs(request.META['CSRF_COOKIE_NEEDS_UPDATE'], True) def test_rotate_token(self): request = HttpRequest() request.META['CSRF_COOKIE'] = TEST_SECRET self.assertNotIn('CSRF_COOKIE_NEEDS_UPDATE', request.META) rotate_token(request) # The underlying secret was changed. cookie = request.META['CSRF_COOKIE'] self.assertEqual(len(cookie), CSRF_SECRET_LENGTH) self.assertNotEqual(cookie, TEST_SECRET) self.assertIs(request.META['CSRF_COOKIE_NEEDS_UPDATE'], True) def test_check_token_format_valid(self): cases = [ # A token of length CSRF_SECRET_LENGTH. TEST_SECRET, # A token of length CSRF_TOKEN_LENGTH. MASKED_TEST_SECRET1, 64 * 'a', ] for token in cases: with self.subTest(token=token): actual = _check_token_format(token) self.assertIsNone(actual) def test_check_token_format_invalid(self): cases = [ (64 * '*', 'has invalid characters'), (16 * 'a', 'has incorrect length'), ] for token, expected_message in cases: with self.subTest(token=token): with self.assertRaisesMessage(InvalidTokenFormat, expected_message): _check_token_format(token) def test_does_token_match(self): cases = [ # Masked tokens match. ((MASKED_TEST_SECRET1, TEST_SECRET), True), ((MASKED_TEST_SECRET2, TEST_SECRET), True), ((64 * 'a', _unmask_cipher_token(64 * 'a')), True), # Unmasked tokens match. ((TEST_SECRET, TEST_SECRET), True), ((32 * 'a', 32 * 'a'), True), # Incorrect tokens don't match. ((32 * 'a', TEST_SECRET), False), ((64 * 'a', TEST_SECRET), False), ] for (token, secret), expected in cases: with self.subTest(token=token, secret=secret): actual = _does_token_match(token, secret) self.assertIs(actual, expected) def test_does_token_match_wrong_token_length(self): with self.assertRaises(AssertionError): _does_token_match(16 * 'a', TEST_SECRET) class TestingSessionStore(SessionStore): """ A version of SessionStore that stores what cookie values are passed to set_cookie() when CSRF_USE_SESSIONS=True. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # This is a list of the cookie values passed to set_cookie() over # the course of the request-response. self._cookies_set = [] def __setitem__(self, key, value): super().__setitem__(key, value) self._cookies_set.append(value) class TestingHttpRequest(HttpRequest): """ A version of HttpRequest that lets one track and change some things more easily. """ def __init__(self): super().__init__() self.session = TestingSessionStore() def is_secure(self): return getattr(self, '_is_secure_override', False) class PostErrorRequest(TestingHttpRequest): """ TestingHttpRequest that can raise errors when accessing POST data. """ post_error = None def _get_post(self): if self.post_error is not None: raise self.post_error return self._post def _set_post(self, post): self._post = post POST = property(_get_post, _set_post) class CsrfViewMiddlewareTestMixin(CsrfFunctionTestMixin): """ Shared methods and tests for session-based and cookie-based tokens. """ _csrf_id_cookie = MASKED_TEST_SECRET1 _csrf_id_token = MASKED_TEST_SECRET2 def _set_csrf_cookie(self, req, cookie): raise NotImplementedError('This method must be implemented by a subclass.') def _read_csrf_cookie(self, req, resp): """ Return the CSRF cookie as a string, or False if no cookie is present. """ raise NotImplementedError('This method must be implemented by a subclass.') def _get_cookies_set(self, req, resp): """ Return a list of the cookie values passed to set_cookie() over the course of the request-response. """ raise NotImplementedError('This method must be implemented by a subclass.') def _get_request(self, method=None, cookie=None, request_class=None): if method is None: method = 'GET' if request_class is None: request_class = TestingHttpRequest req = request_class() req.method = method if cookie is not None: self._set_csrf_cookie(req, cookie) return req def _get_csrf_cookie_request( self, method=None, cookie=None, post_token=None, meta_token=None, token_header=None, request_class=None, ): """ The method argument defaults to "GET". The cookie argument defaults to this class's default test cookie. The post_token and meta_token arguments are included in the request's req.POST and req.META headers, respectively, when that argument is provided and non-None. The token_header argument is the header key to use for req.META, defaults to "HTTP_X_CSRFTOKEN". """ if cookie is None: cookie = self._csrf_id_cookie if token_header is None: token_header = 'HTTP_X_CSRFTOKEN' req = self._get_request( method=method, cookie=cookie, request_class=request_class, ) if post_token is not None: req.POST['csrfmiddlewaretoken'] = post_token if meta_token is not None: req.META[token_header] = meta_token return req def _get_POST_csrf_cookie_request( self, cookie=None, post_token=None, meta_token=None, token_header=None, request_class=None, ): return self._get_csrf_cookie_request( method='POST', cookie=cookie, post_token=post_token, meta_token=meta_token, token_header=token_header, request_class=request_class, ) def _get_POST_request_with_token(self, cookie=None, request_class=None): """The cookie argument defaults to this class's default test cookie.""" return self._get_POST_csrf_cookie_request( cookie=cookie, post_token=self._csrf_id_token, request_class=request_class, ) # This method depends on _unmask_cipher_token() being correct. def _check_token_present(self, response, csrf_secret=None): if csrf_secret is None: csrf_secret = TEST_SECRET text = str(response.content, response.charset) match = re.search('name="csrfmiddlewaretoken" value="(.*?)"', text) self.assertTrue( match, f'Could not find a csrfmiddlewaretoken value in: {text}', ) csrf_token = match[1] self.assertMaskedSecretCorrect(csrf_token, csrf_secret) def test_process_response_get_token_not_used(self): """ If get_token() is not called, the view middleware does not add a cookie. """ # This is important to make pages cacheable. Pages which do call # get_token(), assuming they use the token, are not cacheable because # the token is specific to the user req = self._get_request() # non_token_view_using_request_processor does not call get_token(), but # does use the csrf request processor. By using this, we are testing # that the view processor is properly lazy and doesn't call get_token() # until needed. mw = CsrfViewMiddleware(non_token_view_using_request_processor) mw.process_request(req) mw.process_view(req, non_token_view_using_request_processor, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertIs(csrf_cookie, False) def _check_bad_or_missing_cookie(self, cookie, expected): """Passing None for cookie includes no cookie.""" req = self._get_request(method='POST', cookie=cookie) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % expected) def test_no_csrf_cookie(self): """ If no CSRF cookies is present, the middleware rejects the incoming request. This will stop login CSRF. """ self._check_bad_or_missing_cookie(None, REASON_NO_CSRF_COOKIE) def _check_bad_or_missing_token( self, expected, post_token=None, meta_token=None, token_header=None, ): req = self._get_POST_csrf_cookie_request( post_token=post_token, meta_token=meta_token, token_header=token_header, ) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % expected) def test_csrf_cookie_bad_or_missing_token(self): """ If a CSRF cookie is present but the token is missing or invalid, the middleware rejects the incoming request. """ cases = [ (None, None, REASON_CSRF_TOKEN_MISSING), (16 * 'a', None, 'CSRF token from POST has incorrect length.'), (64 * '*', None, 'CSRF token from POST has invalid characters.'), (64 * 'a', None, 'CSRF token from POST incorrect.'), ( None, 16 * 'a', "CSRF token from the 'X-Csrftoken' HTTP header has incorrect length.", ), ( None, 64 * '*', "CSRF token from the 'X-Csrftoken' HTTP header has invalid characters.", ), ( None, 64 * 'a', "CSRF token from the 'X-Csrftoken' HTTP header incorrect.", ), ] for post_token, meta_token, expected in cases: with self.subTest(post_token=post_token, meta_token=meta_token): self._check_bad_or_missing_token( expected, post_token=post_token, meta_token=meta_token, ) @override_settings(CSRF_HEADER_NAME='HTTP_X_CSRFTOKEN_CUSTOMIZED') def test_csrf_cookie_bad_token_custom_header(self): """ If a CSRF cookie is present and an invalid token is passed via a custom CSRF_HEADER_NAME, the middleware rejects the incoming request. """ expected = ( "CSRF token from the 'X-Csrftoken-Customized' HTTP header has " "incorrect length." ) self._check_bad_or_missing_token( expected, meta_token=16 * 'a', token_header='HTTP_X_CSRFTOKEN_CUSTOMIZED', ) def test_process_request_csrf_cookie_and_token(self): """ If both a cookie and a token is present, the middleware lets it through. """ req = self._get_POST_request_with_token() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def test_process_request_csrf_cookie_no_token_exempt_view(self): """ If a CSRF cookie is present and no token, but the csrf_exempt decorator has been applied to the view, the middleware lets it through """ req = self._get_POST_csrf_cookie_request() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, csrf_exempt(post_form_view), (), {}) self.assertIsNone(resp) def test_csrf_token_in_header(self): """ The token may be passed in a header instead of in the form. """ req = self._get_POST_csrf_cookie_request(meta_token=self._csrf_id_token) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(CSRF_HEADER_NAME='HTTP_X_CSRFTOKEN_CUSTOMIZED') def test_csrf_token_in_header_with_customized_name(self): """ settings.CSRF_HEADER_NAME can be used to customize the CSRF header name """ req = self._get_POST_csrf_cookie_request( meta_token=self._csrf_id_token, token_header='HTTP_X_CSRFTOKEN_CUSTOMIZED', ) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def test_put_and_delete_rejected(self): """ HTTP PUT and DELETE methods have protection """ req = self._get_request(method='PUT') mw = CsrfViewMiddleware(post_form_view) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE) req = self._get_request(method='DELETE') with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE) def test_put_and_delete_allowed(self): """ HTTP PUT and DELETE can get through with X-CSRFToken and a cookie. """ req = self._get_csrf_cookie_request(method='PUT', meta_token=self._csrf_id_token) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) req = self._get_csrf_cookie_request(method='DELETE', meta_token=self._csrf_id_token) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def test_rotate_token_triggers_second_reset(self): """ If rotate_token() is called after the token is reset in CsrfViewMiddleware's process_response() and before another call to the same process_response(), the cookie is reset a second time. """ req = self._get_POST_request_with_token() resp = sandwiched_rotate_token_view(req) self.assertContains(resp, 'OK') actual_secret = self._read_csrf_cookie(req, resp) # set_cookie() was called a second time with a different secret. cookies_set = self._get_cookies_set(req, resp) # Only compare the last two to exclude a spurious entry that's present # when CsrfViewMiddlewareUseSessionsTests is running. self.assertEqual(cookies_set[-2:], [TEST_SECRET, actual_secret]) self.assertNotEqual(actual_secret, TEST_SECRET) # Tests for the template tag method def test_token_node_no_csrf_cookie(self): """ CsrfTokenNode works when no CSRF cookie is set. """ req = self._get_request() resp = token_view(req) token = get_token(req) self.assertIsNotNone(token) csrf_secret = _unmask_cipher_token(token) self._check_token_present(resp, csrf_secret) def test_token_node_empty_csrf_cookie(self): """ A new token is sent if the csrf_cookie is the empty string. """ req = self._get_request(cookie='') mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = token_view(req) token = get_token(req) self.assertIsNotNone(token) csrf_secret = _unmask_cipher_token(token) self._check_token_present(resp, csrf_secret) def test_token_node_with_csrf_cookie(self): """ CsrfTokenNode works when a CSRF cookie is set. """ req = self._get_csrf_cookie_request() mw = CsrfViewMiddleware(token_view) mw.process_request(req) mw.process_view(req, token_view, (), {}) resp = token_view(req) self._check_token_present(resp) def test_get_token_for_exempt_view(self): """ get_token still works for a view decorated with 'csrf_exempt'. """ req = self._get_csrf_cookie_request() mw = CsrfViewMiddleware(token_view) mw.process_request(req) mw.process_view(req, csrf_exempt(token_view), (), {}) resp = token_view(req) self._check_token_present(resp) def test_get_token_for_requires_csrf_token_view(self): """ get_token() works for a view decorated solely with requires_csrf_token. """ req = self._get_csrf_cookie_request() resp = requires_csrf_token(token_view)(req) self._check_token_present(resp) def test_token_node_with_new_csrf_cookie(self): """ CsrfTokenNode works when a CSRF cookie is created by the middleware (when one was not already present) """ req = self._get_request() mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self._check_token_present(resp, csrf_cookie) def test_cookie_not_reset_on_accepted_request(self): """ The csrf token used in posts is changed on every request (although stays equivalent). The csrf cookie should not change on accepted requests. If it appears in the response, it should keep its value. """ req = self._get_POST_request_with_token() mw = CsrfViewMiddleware(token_view) mw.process_request(req) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual( csrf_cookie, TEST_SECRET, 'CSRF cookie was changed on an accepted request', ) @override_settings(DEBUG=True, ALLOWED_HOSTS=['www.example.com']) def test_https_bad_referer(self): """ A POST HTTPS request with a bad referer is rejected """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.evil.org/somepage' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - https://www.evil.org/somepage does not ' 'match any trusted origins.', status_code=403, ) def _check_referer_rejects(self, mw, req): with self.assertRaises(RejectRequest): mw._check_referer(req) @override_settings(DEBUG=True) def test_https_no_referer(self): """A POST HTTPS request with a missing referer is rejected.""" req = self._get_POST_request_with_token() req._is_secure_override = True mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - no Referer.', status_code=403, ) def test_https_malformed_host(self): """ CsrfViewMiddleware generates a 403 response if it receives an HTTPS request with a bad host. """ req = self._get_request(method='POST') req._is_secure_override = True req.META['HTTP_HOST'] = '@malformed' req.META['HTTP_REFERER'] = 'https://www.evil.org/somepage' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(token_view) expected = ( 'Referer checking failed - https://www.evil.org/somepage does not ' 'match any trusted origins.' ) with self.assertRaisesMessage(RejectRequest, expected): mw._check_referer(req) response = mw.process_view(req, token_view, (), {}) self.assertEqual(response.status_code, 403) def test_origin_malformed_host(self): req = self._get_request(method='POST') req._is_secure_override = True req.META['HTTP_HOST'] = '@malformed' req.META['HTTP_ORIGIN'] = 'https://www.evil.org' mw = CsrfViewMiddleware(token_view) self._check_referer_rejects(mw, req) response = mw.process_view(req, token_view, (), {}) self.assertEqual(response.status_code, 403) @override_settings(DEBUG=True) def test_https_malformed_referer(self): """ A POST HTTPS request with a bad referer is rejected. """ malformed_referer_msg = 'Referer checking failed - Referer is malformed.' req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'http://http://www.example.com/' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - Referer is insecure while host is secure.', status_code=403, ) # Empty req.META['HTTP_REFERER'] = '' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # Non-ASCII req.META['HTTP_REFERER'] = 'ØBöIß' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # missing scheme # >>> urlparse('//example.com/') # ParseResult(scheme='', netloc='example.com', path='/', params='', query='', fragment='') req.META['HTTP_REFERER'] = '//example.com/' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # missing netloc # >>> urlparse('https://') # ParseResult(scheme='https', netloc='', path='', params='', query='', fragment='') req.META['HTTP_REFERER'] = 'https://' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # Invalid URL # >>> urlparse('https://[') # ValueError: Invalid IPv6 URL req.META['HTTP_REFERER'] = 'https://[' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_https_good_referer(self): """ A POST HTTPS request with a good referer is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.example.com/somepage' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_https_good_referer_2(self): """ A POST HTTPS request with a good referer is accepted where the referer contains no trailing slash. """ # See ticket #15617 req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.example.com' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def _test_https_good_referer_behind_proxy(self): req = self._get_POST_request_with_token() req._is_secure_override = True req.META.update({ 'HTTP_HOST': '10.0.0.2', 'HTTP_REFERER': 'https://www.example.com/somepage', 'SERVER_PORT': '8080', 'HTTP_X_FORWARDED_HOST': 'www.example.com', 'HTTP_X_FORWARDED_PORT': '443', }) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(CSRF_TRUSTED_ORIGINS=['https://dashboard.example.com']) def test_https_good_referer_malformed_host(self): """ A POST HTTPS request is accepted if it receives a good referer with a bad host. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = '@malformed' req.META['HTTP_REFERER'] = 'https://dashboard.example.com/somepage' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://dashboard.example.com']) def test_https_csrf_trusted_origin_allowed(self): """ A POST HTTPS request with a referer added to the CSRF_TRUSTED_ORIGINS setting is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://dashboard.example.com' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://*.example.com']) def test_https_csrf_wildcard_trusted_origin_allowed(self): """ A POST HTTPS request with a referer that matches a CSRF_TRUSTED_ORIGINS wildcard is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://dashboard.example.com' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) def _test_https_good_referer_matches_cookie_domain(self): req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'https://foo.example.com/' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) def _test_https_good_referer_matches_cookie_domain_with_different_port(self): req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://foo.example.com:4443/' req.META['SERVER_PORT'] = '4443' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) def test_ensures_csrf_cookie_no_logging(self): """ ensure_csrf_cookie() doesn't log warnings (#19436). """ with self.assertNoLogs('django.request', 'WARNING'): req = self._get_request() ensure_csrf_cookie_view(req) def test_reading_post_data_raises_unreadable_post_error(self): """ An UnreadablePostError raised while reading the POST data should be handled by the middleware. """ req = self._get_POST_request_with_token() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) req = self._get_POST_request_with_token(request_class=PostErrorRequest) req.post_error = UnreadablePostError('Error reading input data.') mw.process_request(req) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(resp.status_code, 403) self.assertEqual( cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_CSRF_TOKEN_MISSING, ) def test_reading_post_data_raises_os_error(self): """ An OSError raised while reading the POST data should not be handled by the middleware. """ mw = CsrfViewMiddleware(post_form_view) req = self._get_POST_request_with_token(request_class=PostErrorRequest) req.post_error = OSError('Deleted directories/Missing permissions.') mw.process_request(req) with self.assertRaises(OSError): mw.process_view(req, post_form_view, (), {}) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_bad_domain(self): """A request with a bad origin is rejected.""" req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://www.evil.org' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_null_origin(self): """A request with a null origin is rejected.""" req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'null' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_bad_protocol(self): """A request with an origin with wrong protocol is rejected.""" req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'http://example.com' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings( ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=[ 'http://no-match.com', 'https://*.example.com', 'http://*.no-match.com', 'http://*.no-match-2.com', ], ) def test_bad_origin_csrf_trusted_origin_bad_protocol(self): """ A request with an origin with the wrong protocol compared to CSRF_TRUSTED_ORIGINS is rejected. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'http://foo.example.com' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) self.assertEqual(mw.allowed_origins_exact, {'http://no-match.com'}) self.assertEqual(mw.allowed_origin_subdomains, { 'https': ['.example.com'], 'http': ['.no-match.com', '.no-match-2.com'], }) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_cannot_be_parsed(self): """ A POST request with an origin that can't be parsed by urlparse() is rejected. """ req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://[' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_good_origin_insecure(self): """A POST HTTP request with a good origin is accepted.""" req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'http://www.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_good_origin_secure(self): """A POST HTTPS request with a good origin is accepted.""" req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://www.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://dashboard.example.com']) def test_good_origin_csrf_trusted_origin_allowed(self): """ A POST request with an origin added to the CSRF_TRUSTED_ORIGINS setting is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://dashboard.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) self.assertEqual(mw.allowed_origins_exact, {'https://dashboard.example.com'}) self.assertEqual(mw.allowed_origin_subdomains, {}) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://*.example.com']) def test_good_origin_wildcard_csrf_trusted_origin_allowed(self): """ A POST request with an origin that matches a CSRF_TRUSTED_ORIGINS wildcard is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://foo.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) self.assertEqual(mw.allowed_origins_exact, set()) self.assertEqual(mw.allowed_origin_subdomains, {'https': ['.example.com']}) class CsrfViewMiddlewareTests(CsrfViewMiddlewareTestMixin, SimpleTestCase): def _set_csrf_cookie(self, req, cookie): req.COOKIES[settings.CSRF_COOKIE_NAME] = cookie def _read_csrf_cookie(self, req, resp): """ Return the CSRF cookie as a string, or False if no cookie is present. """ if settings.CSRF_COOKIE_NAME not in resp.cookies: return False csrf_cookie = resp.cookies[settings.CSRF_COOKIE_NAME] return csrf_cookie.value def _get_cookies_set(self, req, resp): return resp._cookies_set def test_ensures_csrf_cookie_no_middleware(self): """ The ensure_csrf_cookie() decorator works without middleware. """ req = self._get_request() resp = ensure_csrf_cookie_view(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertTrue(csrf_cookie) self.assertIn('Cookie', resp.get('Vary', '')) def test_ensures_csrf_cookie_with_middleware(self): """ The ensure_csrf_cookie() decorator works with the CsrfViewMiddleware enabled. """ req = self._get_request() mw = CsrfViewMiddleware(ensure_csrf_cookie_view) mw.process_view(req, ensure_csrf_cookie_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertTrue(csrf_cookie) self.assertIn('Cookie', resp.get('Vary', '')) def test_csrf_cookie_age(self): """ CSRF cookie age can be set using settings.CSRF_COOKIE_AGE. """ req = self._get_request() MAX_AGE = 123 with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_DOMAIN='.example.com', CSRF_COOKIE_AGE=MAX_AGE, CSRF_COOKIE_PATH='/test/', CSRF_COOKIE_SECURE=True, CSRF_COOKIE_HTTPONLY=True): # token_view calls get_token() indirectly mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) max_age = resp.cookies.get('csrfcookie').get('max-age') self.assertEqual(max_age, MAX_AGE) def test_csrf_cookie_age_none(self): """ CSRF cookie age does not have max age set and therefore uses session-based cookies. """ req = self._get_request() MAX_AGE = None with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_DOMAIN='.example.com', CSRF_COOKIE_AGE=MAX_AGE, CSRF_COOKIE_PATH='/test/', CSRF_COOKIE_SECURE=True, CSRF_COOKIE_HTTPONLY=True): # token_view calls get_token() indirectly mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) max_age = resp.cookies.get('csrfcookie').get('max-age') self.assertEqual(max_age, '') def test_csrf_cookie_samesite(self): req = self._get_request() with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_SAMESITE='Strict'): mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) self.assertEqual(resp.cookies['csrfcookie']['samesite'], 'Strict') def test_bad_csrf_cookie_characters(self): """ If the CSRF cookie has invalid characters in a POST request, the middleware rejects the incoming request. """ self._check_bad_or_missing_cookie(64 * '*', 'CSRF cookie has invalid characters.') def test_bad_csrf_cookie_length(self): """ If the CSRF cookie has an incorrect length in a POST request, the middleware rejects the incoming request. """ self._check_bad_or_missing_cookie(16 * 'a', 'CSRF cookie has incorrect length.') def test_process_view_token_too_long(self): """ If the token is longer than expected, it is ignored and a new token is created. """ req = self._get_request(cookie='x' * 100000) mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual(len(csrf_cookie), CSRF_SECRET_LENGTH) def test_process_view_token_invalid_chars(self): """ If the token contains non-alphanumeric characters, it is ignored and a new token is created. """ token = ('!@#' + self._csrf_id_token)[:CSRF_TOKEN_LENGTH] req = self._get_request(cookie=token) mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual(len(csrf_cookie), CSRF_SECRET_LENGTH) self.assertNotEqual(csrf_cookie, token) def test_masked_unmasked_combinations(self): """ All combinations are allowed of (1) masked and unmasked cookies, (2) masked and unmasked tokens, and (3) tokens provided via POST and the X-CSRFToken header. """ cases = [ (TEST_SECRET, TEST_SECRET, None), (TEST_SECRET, MASKED_TEST_SECRET2, None), (TEST_SECRET, None, TEST_SECRET), (TEST_SECRET, None, MASKED_TEST_SECRET2), (MASKED_TEST_SECRET1, TEST_SECRET, None), (MASKED_TEST_SECRET1, MASKED_TEST_SECRET2, None), (MASKED_TEST_SECRET1, None, TEST_SECRET), (MASKED_TEST_SECRET1, None, MASKED_TEST_SECRET2), ] for args in cases: with self.subTest(args=args): cookie, post_token, meta_token = args req = self._get_POST_csrf_cookie_request( cookie=cookie, post_token=post_token, meta_token=meta_token, ) mw = CsrfViewMiddleware(token_view) mw.process_request(req) resp = mw.process_view(req, token_view, (), {}) self.assertIsNone(resp) def test_set_cookie_called_only_once(self): """ set_cookie() is called only once when the view is decorated with both ensure_csrf_cookie and csrf_protect. """ req = self._get_POST_request_with_token() resp = ensured_and_protected_view(req) self.assertContains(resp, 'OK') csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual(csrf_cookie, TEST_SECRET) # set_cookie() was called only once and with the expected secret. cookies_set = self._get_cookies_set(req, resp) self.assertEqual(cookies_set, [TEST_SECRET]) def test_invalid_cookie_replaced_on_GET(self): """ A CSRF cookie with the wrong format is replaced during a GET request. """ req = self._get_request(cookie='badvalue') resp = protected_view(req) self.assertContains(resp, 'OK') csrf_cookie = self._read_csrf_cookie(req, resp) self.assertTrue(csrf_cookie, msg='No CSRF cookie was sent.') self.assertEqual(len(csrf_cookie), CSRF_SECRET_LENGTH) def test_valid_secret_not_replaced_on_GET(self): """ Masked and unmasked CSRF cookies are not replaced during a GET request. """ cases = [ TEST_SECRET, MASKED_TEST_SECRET1, ] for cookie in cases: with self.subTest(cookie=cookie): req = self._get_request(cookie=cookie) resp = protected_view(req) self.assertContains(resp, 'OK') csrf_cookie = self._read_csrf_cookie(req, resp) self.assertFalse(csrf_cookie, msg='A CSRF cookie was sent.') def test_masked_secret_accepted_and_replaced(self): """ For a view that uses the csrf_token, the csrf cookie is replaced with the unmasked version if originally masked. """ req = self._get_POST_request_with_token(cookie=MASKED_TEST_SECRET1) mw = CsrfViewMiddleware(token_view) mw.process_request(req) resp = mw.process_view(req, token_view, (), {}) self.assertIsNone(resp) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual(csrf_cookie, TEST_SECRET) self._check_token_present(resp, csrf_cookie) def test_bare_secret_accepted_and_not_replaced(self): """ The csrf cookie is left unchanged if originally not masked. """ req = self._get_POST_request_with_token(cookie=TEST_SECRET) mw = CsrfViewMiddleware(token_view) mw.process_request(req) resp = mw.process_view(req, token_view, (), {}) self.assertIsNone(resp) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual(csrf_cookie, TEST_SECRET) self._check_token_present(resp, csrf_cookie) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com', USE_X_FORWARDED_PORT=True) def test_https_good_referer_behind_proxy(self): """ A POST HTTPS request is accepted when USE_X_FORWARDED_PORT=True. """ self._test_https_good_referer_behind_proxy() @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by CSRF_COOKIE_DOMAIN. """ self._test_https_good_referer_matches_cookie_domain() @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain_with_different_port(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by CSRF_COOKIE_DOMAIN and a non-443 port. """ self._test_https_good_referer_matches_cookie_domain_with_different_port() @override_settings(CSRF_COOKIE_DOMAIN='.example.com', DEBUG=True) def test_https_reject_insecure_referer(self): """ A POST HTTPS request from an insecure referer should be rejected. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'http://example.com/' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - Referer is insecure while host is secure.', status_code=403, ) @override_settings(CSRF_USE_SESSIONS=True, CSRF_COOKIE_DOMAIN=None) class CsrfViewMiddlewareUseSessionsTests(CsrfViewMiddlewareTestMixin, SimpleTestCase): """ CSRF tests with CSRF_USE_SESSIONS=True. """ def _set_csrf_cookie(self, req, cookie): req.session[CSRF_SESSION_KEY] = cookie def _read_csrf_cookie(self, req, resp=None): """ Return the CSRF cookie as a string, or False if no cookie is present. """ if CSRF_SESSION_KEY not in req.session: return False return req.session[CSRF_SESSION_KEY] def _get_cookies_set(self, req, resp): return req.session._cookies_set def test_no_session_on_request(self): msg = ( 'CSRF_USE_SESSIONS is enabled, but request.session is not set. ' 'SessionMiddleware must appear before CsrfViewMiddleware in MIDDLEWARE.' ) with self.assertRaisesMessage(ImproperlyConfigured, msg): mw = CsrfViewMiddleware(lambda req: HttpResponse()) mw.process_request(HttpRequest()) def test_masked_unmasked_combinations(self): """ Masked and unmasked tokens are allowed both as POST and as the X-CSRFToken header. """ cases = [ # Bare secrets are not allowed when CSRF_USE_SESSIONS=True. (MASKED_TEST_SECRET1, TEST_SECRET, None), (MASKED_TEST_SECRET1, MASKED_TEST_SECRET2, None), (MASKED_TEST_SECRET1, None, TEST_SECRET), (MASKED_TEST_SECRET1, None, MASKED_TEST_SECRET2), ] for args in cases: with self.subTest(args=args): cookie, post_token, meta_token = args req = self._get_POST_csrf_cookie_request( cookie=cookie, post_token=post_token, meta_token=meta_token, ) mw = CsrfViewMiddleware(token_view) mw.process_request(req) resp = mw.process_view(req, token_view, (), {}) self.assertIsNone(resp) def test_process_response_get_token_used(self): """The ensure_csrf_cookie() decorator works without middleware.""" req = self._get_request() ensure_csrf_cookie_view(req) csrf_cookie = self._read_csrf_cookie(req) self.assertTrue(csrf_cookie) def test_session_modify(self): """The session isn't saved if the CSRF cookie is unchanged.""" req = self._get_request() mw = CsrfViewMiddleware(ensure_csrf_cookie_view) mw.process_view(req, ensure_csrf_cookie_view, (), {}) mw(req) csrf_cookie = self._read_csrf_cookie(req) self.assertTrue(csrf_cookie) req.session.modified = False mw.process_view(req, ensure_csrf_cookie_view, (), {}) mw(req) self.assertFalse(req.session.modified) def test_ensures_csrf_cookie_with_middleware(self): """ The ensure_csrf_cookie() decorator works with the CsrfViewMiddleware enabled. """ req = self._get_request() mw = CsrfViewMiddleware(ensure_csrf_cookie_view) mw.process_view(req, ensure_csrf_cookie_view, (), {}) mw(req) csrf_cookie = self._read_csrf_cookie(req) self.assertTrue(csrf_cookie) @override_settings( ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com', USE_X_FORWARDED_PORT=True, DEBUG=True, ) def test_https_good_referer_behind_proxy(self): """ A POST HTTPS request is accepted when USE_X_FORWARDED_PORT=True. """ self._test_https_good_referer_behind_proxy() @override_settings(ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by SESSION_COOKIE_DOMAIN. """ self._test_https_good_referer_matches_cookie_domain() @override_settings(ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain_with_different_port(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by SESSION_COOKIE_DOMAIN and a non-443 port. """ self._test_https_good_referer_matches_cookie_domain_with_different_port() @override_settings(SESSION_COOKIE_DOMAIN='.example.com', DEBUG=True) def test_https_reject_insecure_referer(self): """ A POST HTTPS request from an insecure referer should be rejected. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'http://example.com/' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - Referer is insecure while host is secure.', status_code=403, ) @override_settings(ROOT_URLCONF='csrf_tests.csrf_token_error_handler_urls', DEBUG=False) class CsrfInErrorHandlingViewsTests(CsrfFunctionTestMixin, SimpleTestCase): def test_csrf_token_on_404_stays_constant(self): response = self.client.get('/does not exist/') # The error handler returns status code 599. self.assertEqual(response.status_code, 599) token1 = response.content.decode('ascii') response = self.client.get('/does not exist/') self.assertEqual(response.status_code, 599) token2 = response.content.decode('ascii') secret2 = _unmask_cipher_token(token2) self.assertMaskedSecretCorrect(token1, secret2) @ignore_warnings(category=RemovedInDjango50Warning) class CsrfCookieMaskedTests(CsrfFunctionTestMixin, SimpleTestCase): @override_settings(CSRF_COOKIE_MASKED=True) def test_get_token_csrf_cookie_not_set(self): request = HttpRequest() self.assertNotIn('CSRF_COOKIE', request.META) self.assertNotIn('CSRF_COOKIE_NEEDS_UPDATE', request.META) token = get_token(request) cookie = request.META['CSRF_COOKIE'] self.assertEqual(len(cookie), CSRF_TOKEN_LENGTH) unmasked_cookie = _unmask_cipher_token(cookie) self.assertMaskedSecretCorrect(token, unmasked_cookie) self.assertIs(request.META['CSRF_COOKIE_NEEDS_UPDATE'], True) @override_settings(CSRF_COOKIE_MASKED=True) def test_rotate_token(self): request = HttpRequest() request.META['CSRF_COOKIE'] = MASKED_TEST_SECRET1 self.assertNotIn('CSRF_COOKIE_NEEDS_UPDATE', request.META) rotate_token(request) # The underlying secret was changed. cookie = request.META['CSRF_COOKIE'] self.assertEqual(len(cookie), CSRF_TOKEN_LENGTH) unmasked_cookie = _unmask_cipher_token(cookie) self.assertNotEqual(unmasked_cookie, TEST_SECRET) self.assertIs(request.META['CSRF_COOKIE_NEEDS_UPDATE'], True)
dc287eabd6ff9b44206c387f3b8d57b145633b28358b234f7328771970975510
from django.http import HttpRequest from django.template.context_processors import csrf from django.test import SimpleTestCase from .tests import CsrfFunctionTestMixin class TestContextProcessor(CsrfFunctionTestMixin, SimpleTestCase): def test_force_token_to_string(self): request = HttpRequest() test_secret = 32 * 'a' request.META['CSRF_COOKIE'] = test_secret token = csrf(request).get('csrf_token') self.assertMaskedSecretCorrect(token, test_secret)
0aaa5e6e89d6afad7fd0a909ab8e103434f222139be3c1f93f96915996dbe877
from django.contrib.contenttypes.fields import ( GenericForeignKey, GenericRelation, ) from django.contrib.contenttypes.models import ContentType from django.db import models class Relation(models.Model): pass class InstanceOnlyDescriptor: def __get__(self, instance, cls=None): if instance is None: raise AttributeError('Instance only') return 1 class AbstractPerson(models.Model): # DATA fields data_abstract = models.CharField(max_length=10) fk_abstract = models.ForeignKey(Relation, models.CASCADE, related_name='fk_abstract_rel') # M2M fields m2m_abstract = models.ManyToManyField(Relation, related_name='m2m_abstract_rel') friends_abstract = models.ManyToManyField('self', symmetrical=True) following_abstract = models.ManyToManyField('self', related_name='followers_abstract', symmetrical=False) # VIRTUAL fields data_not_concrete_abstract = models.ForeignObject( Relation, on_delete=models.CASCADE, from_fields=['abstract_non_concrete_id'], to_fields=['id'], related_name='fo_abstract_rel', ) # GFK fields content_type_abstract = models.ForeignKey(ContentType, models.CASCADE, related_name='+') object_id_abstract = models.PositiveIntegerField() content_object_abstract = GenericForeignKey('content_type_abstract', 'object_id_abstract') # GR fields generic_relation_abstract = GenericRelation(Relation) class Meta: abstract = True @property def test_property(self): return 1 test_instance_only_descriptor = InstanceOnlyDescriptor() class BasePerson(AbstractPerson): # DATA fields data_base = models.CharField(max_length=10) fk_base = models.ForeignKey(Relation, models.CASCADE, related_name='fk_base_rel') # M2M fields m2m_base = models.ManyToManyField(Relation, related_name='m2m_base_rel') friends_base = models.ManyToManyField('self', symmetrical=True) following_base = models.ManyToManyField('self', related_name='followers_base', symmetrical=False) # VIRTUAL fields data_not_concrete_base = models.ForeignObject( Relation, on_delete=models.CASCADE, from_fields=['base_non_concrete_id'], to_fields=['id'], related_name='fo_base_rel', ) # GFK fields content_type_base = models.ForeignKey(ContentType, models.CASCADE, related_name='+') object_id_base = models.PositiveIntegerField() content_object_base = GenericForeignKey('content_type_base', 'object_id_base') # GR fields generic_relation_base = GenericRelation(Relation) class Person(BasePerson): # DATA fields data_inherited = models.CharField(max_length=10) fk_inherited = models.ForeignKey(Relation, models.CASCADE, related_name='fk_concrete_rel') # M2M Fields m2m_inherited = models.ManyToManyField(Relation, related_name='m2m_concrete_rel') friends_inherited = models.ManyToManyField('self', symmetrical=True) following_inherited = models.ManyToManyField('self', related_name='followers_concrete', symmetrical=False) # VIRTUAL fields data_not_concrete_inherited = models.ForeignObject( Relation, on_delete=models.CASCADE, from_fields=['model_non_concrete_id'], to_fields=['id'], related_name='fo_concrete_rel', ) # GFK fields content_type_concrete = models.ForeignKey(ContentType, models.CASCADE, related_name='+') object_id_concrete = models.PositiveIntegerField() content_object_concrete = GenericForeignKey('content_type_concrete', 'object_id_concrete') # GR fields generic_relation_concrete = GenericRelation(Relation) class ProxyPerson(Person): class Meta: proxy = True class PersonThroughProxySubclass(ProxyPerson): pass class Relating(models.Model): # ForeignKey to BasePerson baseperson = models.ForeignKey(BasePerson, models.CASCADE, related_name='relating_baseperson') baseperson_hidden = models.ForeignKey(BasePerson, models.CASCADE, related_name='+') # ForeignKey to Person person = models.ForeignKey(Person, models.CASCADE, related_name='relating_person') person_hidden = models.ForeignKey(Person, models.CASCADE, related_name='+') # ForeignKey to ProxyPerson proxyperson = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='relating_proxyperson') proxyperson_hidden = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='relating_proxyperson_hidden+') # ManyToManyField to BasePerson basepeople = models.ManyToManyField(BasePerson, related_name='relating_basepeople') basepeople_hidden = models.ManyToManyField(BasePerson, related_name='+') # ManyToManyField to Person people = models.ManyToManyField(Person, related_name='relating_people') people_hidden = models.ManyToManyField(Person, related_name='+') # ParentListTests models class CommonAncestor(models.Model): pass class FirstParent(CommonAncestor): first_ancestor = models.OneToOneField(CommonAncestor, models.CASCADE, primary_key=True, parent_link=True) class SecondParent(CommonAncestor): second_ancestor = models.OneToOneField(CommonAncestor, models.CASCADE, primary_key=True, parent_link=True) class Child(FirstParent, SecondParent): pass
c8293a8aa5e17afa3d8a20f47b8fbf73d1c1bbfd450e2e59af41a9163da97483
import datetime import math import re from decimal import Decimal from django.core.exceptions import FieldError from django.db import connection from django.db.models import ( Avg, Case, Count, DateField, DateTimeField, DecimalField, DurationField, Exists, F, FloatField, IntegerField, Max, Min, OuterRef, Q, StdDev, Subquery, Sum, TimeField, Value, Variance, When, ) from django.db.models.expressions import Func, RawSQL from django.db.models.functions import ( Cast, Coalesce, Greatest, Now, Pi, TruncDate, TruncHour, ) from django.test import TestCase from django.test.testcases import skipUnlessDBFeature from django.test.utils import Approximate, CaptureQueriesContext from django.utils import timezone from .models import Author, Book, Publisher, Store class NowUTC(Now): template = 'CURRENT_TIMESTAMP' output_field = DateTimeField() def as_sql(self, compiler, connection, **extra_context): if connection.features.test_now_utc_template: extra_context['template'] = connection.features.test_now_utc_template return super().as_sql(compiler, connection, **extra_context) class AggregateTestCase(TestCase): @classmethod def setUpTestData(cls): cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34) cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35) cls.a3 = Author.objects.create(name='Brad Dayley', age=45) cls.a4 = Author.objects.create(name='James Bennett', age=29) cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37) cls.a6 = Author.objects.create(name='Paul Bissex', age=29) cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25) cls.a8 = Author.objects.create(name='Peter Norvig', age=57) cls.a9 = Author.objects.create(name='Stuart Russell', age=46) cls.a1.friends.add(cls.a2, cls.a4) cls.a2.friends.add(cls.a1, cls.a7) cls.a4.friends.add(cls.a1) cls.a5.friends.add(cls.a6, cls.a7) cls.a6.friends.add(cls.a5, cls.a7) cls.a7.friends.add(cls.a2, cls.a5, cls.a6) cls.a8.friends.add(cls.a9) cls.a9.friends.add(cls.a8) cls.p1 = Publisher.objects.create(name='Apress', num_awards=3, duration=datetime.timedelta(days=1)) cls.p2 = Publisher.objects.create(name='Sams', num_awards=1, duration=datetime.timedelta(days=2)) cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7) cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9) cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0) cls.b1 = Book.objects.create( isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right', pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1, pubdate=datetime.date(2007, 12, 6) ) cls.b2 = Book.objects.create( isbn='067232959', name='Sams Teach Yourself Django in 24 Hours', pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2, pubdate=datetime.date(2008, 3, 3) ) cls.b3 = Book.objects.create( isbn='159059996', name='Practical Django Projects', pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1, pubdate=datetime.date(2008, 6, 23) ) cls.b4 = Book.objects.create( isbn='013235613', name='Python Web Development with Django', pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3, pubdate=datetime.date(2008, 11, 3) ) cls.b5 = Book.objects.create( isbn='013790395', name='Artificial Intelligence: A Modern Approach', pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3, pubdate=datetime.date(1995, 1, 15) ) cls.b6 = Book.objects.create( isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4, pubdate=datetime.date(1991, 10, 15) ) cls.b1.authors.add(cls.a1, cls.a2) cls.b2.authors.add(cls.a3) cls.b3.authors.add(cls.a4) cls.b4.authors.add(cls.a5, cls.a6, cls.a7) cls.b5.authors.add(cls.a8, cls.a9) cls.b6.authors.add(cls.a8) s1 = Store.objects.create( name='Amazon.com', original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42), friday_night_closing=datetime.time(23, 59, 59) ) s2 = Store.objects.create( name='Books.com', original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37), friday_night_closing=datetime.time(23, 59, 59) ) s3 = Store.objects.create( name="Mamma and Pappa's Books", original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14), friday_night_closing=datetime.time(21, 30) ) s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6) s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6) s3.books.add(cls.b3, cls.b4, cls.b6) def test_empty_aggregate(self): self.assertEqual(Author.objects.all().aggregate(), {}) def test_aggregate_in_order_by(self): msg = ( 'Using an aggregate in order_by() without also including it in ' 'annotate() is not allowed: Avg(F(book__rating)' ) with self.assertRaisesMessage(FieldError, msg): Author.objects.values('age').order_by(Avg('book__rating')) def test_single_aggregate(self): vals = Author.objects.aggregate(Avg("age")) self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)}) def test_multiple_aggregates(self): vals = Author.objects.aggregate(Sum("age"), Avg("age")) self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)}) def test_filter_aggregate(self): vals = Author.objects.filter(age__gt=29).aggregate(Sum("age")) self.assertEqual(vals, {'age__sum': 254}) def test_related_aggregate(self): vals = Author.objects.aggregate(Avg("friends__age")) self.assertEqual(vals, {'friends__age__avg': Approximate(34.07, places=2)}) vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age")) self.assertEqual(vals, {'authors__age__avg': Approximate(38.2857, places=2)}) vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating")) self.assertEqual(vals, {'book__rating__avg': 4.0}) vals = Book.objects.aggregate(Sum("publisher__num_awards")) self.assertEqual(vals, {'publisher__num_awards__sum': 30}) vals = Publisher.objects.aggregate(Sum("book__price")) self.assertEqual(vals, {'book__price__sum': Decimal('270.27')}) def test_aggregate_multi_join(self): vals = Store.objects.aggregate(Max("books__authors__age")) self.assertEqual(vals, {'books__authors__age__max': 57}) vals = Author.objects.aggregate(Min("book__publisher__num_awards")) self.assertEqual(vals, {'book__publisher__num_awards__min': 1}) def test_aggregate_alias(self): vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating")) self.assertEqual(vals, {'amazon_mean': Approximate(4.08, places=2)}) def test_aggregate_transform(self): vals = Store.objects.aggregate(min_month=Min('original_opening__month')) self.assertEqual(vals, {'min_month': 3}) def test_aggregate_join_transform(self): vals = Publisher.objects.aggregate(min_year=Min('book__pubdate__year')) self.assertEqual(vals, {'min_year': 1991}) def test_annotate_basic(self): self.assertQuerysetEqual( Book.objects.annotate().order_by('pk'), [ "The Definitive Guide to Django: Web Development Done Right", "Sams Teach Yourself Django in 24 Hours", "Practical Django Projects", "Python Web Development with Django", "Artificial Intelligence: A Modern Approach", "Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp" ], lambda b: b.name ) books = Book.objects.annotate(mean_age=Avg("authors__age")) b = books.get(pk=self.b1.pk) self.assertEqual( b.name, 'The Definitive Guide to Django: Web Development Done Right' ) self.assertEqual(b.mean_age, 34.5) def test_annotate_defer(self): qs = Book.objects.annotate( page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk) rows = [ (self.b1.id, "159059725", 447, "The Definitive Guide to Django: Web Development Done Right") ] self.assertQuerysetEqual( qs.order_by('pk'), rows, lambda r: (r.id, r.isbn, r.page_sum, r.name) ) def test_annotate_defer_select_related(self): qs = Book.objects.select_related('contact').annotate( page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk) rows = [ (self.b1.id, "159059725", 447, "Adrian Holovaty", "The Definitive Guide to Django: Web Development Done Right") ] self.assertQuerysetEqual( qs.order_by('pk'), rows, lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name) ) def test_annotate_m2m(self): books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name") self.assertQuerysetEqual( books, [ ('Artificial Intelligence: A Modern Approach', 51.5), ('Practical Django Projects', 29.0), ('Python Web Development with Django', Approximate(30.3, places=1)), ('Sams Teach Yourself Django in 24 Hours', 45.0) ], lambda b: (b.name, b.authors__age__avg), ) books = Book.objects.annotate(num_authors=Count("authors")).order_by("name") self.assertQuerysetEqual( books, [ ('Artificial Intelligence: A Modern Approach', 2), ('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1), ('Practical Django Projects', 1), ('Python Web Development with Django', 3), ('Sams Teach Yourself Django in 24 Hours', 1), ('The Definitive Guide to Django: Web Development Done Right', 2) ], lambda b: (b.name, b.num_authors) ) def test_backwards_m2m_annotate(self): authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name") self.assertQuerysetEqual( authors, [ ('Adrian Holovaty', 4.5), ('Brad Dayley', 3.0), ('Jacob Kaplan-Moss', 4.5), ('James Bennett', 4.0), ('Paul Bissex', 4.0), ('Stuart Russell', 4.0) ], lambda a: (a.name, a.book__rating__avg) ) authors = Author.objects.annotate(num_books=Count("book")).order_by("name") self.assertQuerysetEqual( authors, [ ('Adrian Holovaty', 1), ('Brad Dayley', 1), ('Jacob Kaplan-Moss', 1), ('James Bennett', 1), ('Jeffrey Forcier', 1), ('Paul Bissex', 1), ('Peter Norvig', 2), ('Stuart Russell', 1), ('Wesley J. Chun', 1) ], lambda a: (a.name, a.num_books) ) def test_reverse_fkey_annotate(self): books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name") self.assertQuerysetEqual( books, [ ('Artificial Intelligence: A Modern Approach', 7), ('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9), ('Practical Django Projects', 3), ('Python Web Development with Django', 7), ('Sams Teach Yourself Django in 24 Hours', 1), ('The Definitive Guide to Django: Web Development Done Right', 3) ], lambda b: (b.name, b.publisher__num_awards__sum) ) publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name") self.assertQuerysetEqual( publishers, [ ('Apress', Decimal("59.69")), ("Jonno's House of Books", None), ('Morgan Kaufmann', Decimal("75.00")), ('Prentice Hall', Decimal("112.49")), ('Sams', Decimal("23.09")) ], lambda p: (p.name, p.book__price__sum) ) def test_annotate_values(self): books = list(Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values()) self.assertEqual( books, [ { "contact_id": self.a1.id, "id": self.b1.id, "isbn": "159059725", "mean_age": 34.5, "name": "The Definitive Guide to Django: Web Development Done Right", "pages": 447, "price": Approximate(Decimal("30")), "pubdate": datetime.date(2007, 12, 6), "publisher_id": self.p1.id, "rating": 4.5, } ] ) books = ( Book.objects .filter(pk=self.b1.pk) .annotate(mean_age=Avg('authors__age')) .values('pk', 'isbn', 'mean_age') ) self.assertEqual( list(books), [ { "pk": self.b1.pk, "isbn": "159059725", "mean_age": 34.5, } ] ) books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values("name") self.assertEqual( list(books), [{'name': 'The Definitive Guide to Django: Web Development Done Right'}], ) books = Book.objects.filter(pk=self.b1.pk).values().annotate(mean_age=Avg('authors__age')) self.assertEqual( list(books), [ { "contact_id": self.a1.id, "id": self.b1.id, "isbn": "159059725", "mean_age": 34.5, "name": "The Definitive Guide to Django: Web Development Done Right", "pages": 447, "price": Approximate(Decimal("30")), "pubdate": datetime.date(2007, 12, 6), "publisher_id": self.p1.id, "rating": 4.5, } ] ) books = ( Book.objects .values("rating") .annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age")) .order_by("rating") ) self.assertEqual( list(books), [ { "rating": 3.0, "n_authors": 1, "mean_age": 45.0, }, { "rating": 4.0, "n_authors": 6, "mean_age": Approximate(37.16, places=1) }, { "rating": 4.5, "n_authors": 2, "mean_age": 34.5, }, { "rating": 5.0, "n_authors": 1, "mean_age": 57.0, } ] ) authors = Author.objects.annotate(Avg("friends__age")).order_by("name") self.assertQuerysetEqual( authors, [ ('Adrian Holovaty', 32.0), ('Brad Dayley', None), ('Jacob Kaplan-Moss', 29.5), ('James Bennett', 34.0), ('Jeffrey Forcier', 27.0), ('Paul Bissex', 31.0), ('Peter Norvig', 46.0), ('Stuart Russell', 57.0), ('Wesley J. Chun', Approximate(33.66, places=1)) ], lambda a: (a.name, a.friends__age__avg) ) def test_count(self): vals = Book.objects.aggregate(Count("rating")) self.assertEqual(vals, {"rating__count": 6}) def test_count_star(self): with self.assertNumQueries(1) as ctx: Book.objects.aggregate(n=Count("*")) sql = ctx.captured_queries[0]['sql'] self.assertIn('SELECT COUNT(*) ', sql) def test_count_distinct_expression(self): aggs = Book.objects.aggregate( distinct_ratings=Count(Case(When(pages__gt=300, then='rating')), distinct=True), ) self.assertEqual(aggs['distinct_ratings'], 4) def test_distinct_on_aggregate(self): for aggregate, expected_result in ( (Avg, 4.125), (Count, 4), (Sum, 16.5), ): with self.subTest(aggregate=aggregate.__name__): books = Book.objects.aggregate(ratings=aggregate('rating', distinct=True)) self.assertEqual(books['ratings'], expected_result) def test_non_grouped_annotation_not_in_group_by(self): """ An annotation not included in values() before an aggregate should be excluded from the group by clause. """ qs = ( Book.objects.annotate(xprice=F('price')).filter(rating=4.0).values('rating') .annotate(count=Count('publisher_id', distinct=True)).values('count', 'rating').order_by('count') ) self.assertEqual(list(qs), [{'rating': 4.0, 'count': 2}]) def test_grouped_annotation_in_group_by(self): """ An annotation included in values() before an aggregate should be included in the group by clause. """ qs = ( Book.objects.annotate(xprice=F('price')).filter(rating=4.0).values('rating', 'xprice') .annotate(count=Count('publisher_id', distinct=True)).values('count', 'rating').order_by('count') ) self.assertEqual( list(qs), [ {'rating': 4.0, 'count': 1}, {'rating': 4.0, 'count': 2}, ] ) def test_fkey_aggregate(self): explicit = list(Author.objects.annotate(Count('book__id'))) implicit = list(Author.objects.annotate(Count('book'))) self.assertCountEqual(explicit, implicit) def test_annotate_ordering(self): books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating') self.assertEqual( list(books), [ {'rating': 4.5, 'oldest': 35}, {'rating': 3.0, 'oldest': 45}, {'rating': 4.0, 'oldest': 57}, {'rating': 5.0, 'oldest': 57}, ] ) books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating") self.assertEqual( list(books), [ {'rating': 5.0, 'oldest': 57}, {'rating': 4.0, 'oldest': 57}, {'rating': 3.0, 'oldest': 45}, {'rating': 4.5, 'oldest': 35}, ] ) def test_aggregate_annotation(self): vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors")) self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)}) def test_avg_duration_field(self): # Explicit `output_field`. self.assertEqual( Publisher.objects.aggregate(Avg('duration', output_field=DurationField())), {'duration__avg': datetime.timedelta(days=1, hours=12)} ) # Implicit `output_field`. self.assertEqual( Publisher.objects.aggregate(Avg('duration')), {'duration__avg': datetime.timedelta(days=1, hours=12)} ) def test_sum_duration_field(self): self.assertEqual( Publisher.objects.aggregate(Sum('duration', output_field=DurationField())), {'duration__sum': datetime.timedelta(days=3)} ) def test_sum_distinct_aggregate(self): """ Sum on a distinct() QuerySet should aggregate only the distinct items. """ authors = Author.objects.filter(book__in=[self.b5, self.b6]) self.assertEqual(authors.count(), 3) distinct_authors = authors.distinct() self.assertEqual(distinct_authors.count(), 2) # Selected author ages are 57 and 46 age_sum = distinct_authors.aggregate(Sum('age')) self.assertEqual(age_sum['age__sum'], 103) def test_filtering(self): p = Publisher.objects.create(name='Expensive Publisher', num_awards=0) Book.objects.create( name='ExpensiveBook1', pages=1, isbn='111', rating=3.5, price=Decimal("1000"), publisher=p, contact_id=self.a1.id, pubdate=datetime.date(2008, 12, 1) ) Book.objects.create( name='ExpensiveBook2', pages=1, isbn='222', rating=4.0, price=Decimal("1000"), publisher=p, contact_id=self.a1.id, pubdate=datetime.date(2008, 12, 2) ) Book.objects.create( name='ExpensiveBook3', pages=1, isbn='333', rating=4.5, price=Decimal("35"), publisher=p, contact_id=self.a1.id, pubdate=datetime.date(2008, 12, 3) ) publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk") self.assertQuerysetEqual( publishers, ['Apress', 'Prentice Hall', 'Expensive Publisher'], lambda p: p.name, ) publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk") self.assertQuerysetEqual( publishers, [ "Apress", "Apress", "Sams", "Prentice Hall", "Expensive Publisher", ], lambda p: p.name ) publishers = ( Publisher.objects .annotate(num_books=Count("book__id")) .filter(num_books__gt=1, book__price__lt=Decimal("40.0")) .order_by("pk") ) self.assertQuerysetEqual( publishers, ['Apress', 'Prentice Hall', 'Expensive Publisher'], lambda p: p.name, ) publishers = ( Publisher.objects .filter(book__price__lt=Decimal("40.0")) .annotate(num_books=Count("book__id")) .filter(num_books__gt=1) .order_by("pk") ) self.assertQuerysetEqual(publishers, ['Apress'], lambda p: p.name) publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk") self.assertQuerysetEqual( publishers, [ "Apress", "Sams", "Prentice Hall", "Morgan Kaufmann", "Expensive Publisher", ], lambda p: p.name ) publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk") self.assertQuerysetEqual( publishers, ['Apress', 'Sams', 'Prentice Hall', 'Morgan Kaufmann'], lambda p: p.name ) publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk") self.assertQuerysetEqual( publishers, ['Sams', 'Morgan Kaufmann', 'Expensive Publisher'], lambda p: p.name, ) publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True) self.assertEqual(len(publishers), 0) def test_annotation(self): vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id")) self.assertEqual(vals, {"friends__id__count": 2}) books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__exact=2).order_by("pk") self.assertQuerysetEqual( books, [ "The Definitive Guide to Django: Web Development Done Right", "Artificial Intelligence: A Modern Approach", ], lambda b: b.name ) authors = ( Author.objects .annotate(num_friends=Count("friends__id", distinct=True)) .filter(num_friends=0) .order_by("pk") ) self.assertQuerysetEqual(authors, ['Brad Dayley'], lambda a: a.name) publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk") self.assertQuerysetEqual(publishers, ['Apress', 'Prentice Hall'], lambda p: p.name) publishers = ( Publisher.objects .filter(book__price__lt=Decimal("40.0")) .annotate(num_books=Count("book__id")) .filter(num_books__gt=1) ) self.assertQuerysetEqual(publishers, ['Apress'], lambda p: p.name) books = ( Book.objects .annotate(num_authors=Count("authors__id")) .filter(authors__name__contains="Norvig", num_authors__gt=1) ) self.assertQuerysetEqual( books, ['Artificial Intelligence: A Modern Approach'], lambda b: b.name ) def test_more_aggregation(self): a = Author.objects.get(name__contains='Norvig') b = Book.objects.get(name__contains='Done Right') b.authors.add(a) b.save() vals = ( Book.objects .annotate(num_authors=Count("authors__id")) .filter(authors__name__contains="Norvig", num_authors__gt=1) .aggregate(Avg("rating")) ) self.assertEqual(vals, {"rating__avg": 4.25}) def test_even_more_aggregate(self): publishers = Publisher.objects.annotate( earliest_book=Min("book__pubdate"), ).exclude(earliest_book=None).order_by("earliest_book").values( 'earliest_book', 'num_awards', 'id', 'name', ) self.assertEqual( list(publishers), [ { 'earliest_book': datetime.date(1991, 10, 15), 'num_awards': 9, 'id': self.p4.id, 'name': 'Morgan Kaufmann' }, { 'earliest_book': datetime.date(1995, 1, 15), 'num_awards': 7, 'id': self.p3.id, 'name': 'Prentice Hall' }, { 'earliest_book': datetime.date(2007, 12, 6), 'num_awards': 3, 'id': self.p1.id, 'name': 'Apress' }, { 'earliest_book': datetime.date(2008, 3, 3), 'num_awards': 1, 'id': self.p2.id, 'name': 'Sams' } ] ) vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening")) self.assertEqual( vals, { "friday_night_closing__max": datetime.time(23, 59, 59), "original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14), } ) def test_annotate_values_list(self): books = ( Book.objects .filter(pk=self.b1.pk) .annotate(mean_age=Avg("authors__age")) .values_list("pk", "isbn", "mean_age") ) self.assertEqual(list(books), [(self.b1.id, '159059725', 34.5)]) books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("isbn") self.assertEqual(list(books), [('159059725',)]) books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("mean_age") self.assertEqual(list(books), [(34.5,)]) books = ( Book.objects .filter(pk=self.b1.pk) .annotate(mean_age=Avg("authors__age")) .values_list("mean_age", flat=True) ) self.assertEqual(list(books), [34.5]) books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price") self.assertEqual( list(books), [ (Decimal("29.69"), 2), (Decimal('23.09'), 1), (Decimal('30'), 1), (Decimal('75'), 1), (Decimal('82.8'), 1), ] ) def test_dates_with_aggregation(self): """ .dates() returns a distinct set of dates when applied to a QuerySet with aggregation. Refs #18056. Previously, .dates() would return distinct (date_kind, aggregation) sets, in this case (year, num_authors), so 2008 would be returned twice because there are books from 2008 with a different number of authors. """ dates = Book.objects.annotate(num_authors=Count("authors")).dates('pubdate', 'year') self.assertSequenceEqual( dates, [ datetime.date(1991, 1, 1), datetime.date(1995, 1, 1), datetime.date(2007, 1, 1), datetime.date(2008, 1, 1), ], ) def test_values_aggregation(self): # Refs #20782 max_rating = Book.objects.values('rating').aggregate(max_rating=Max('rating')) self.assertEqual(max_rating['max_rating'], 5) max_books_per_rating = Book.objects.values('rating').annotate( books_per_rating=Count('id') ).aggregate(Max('books_per_rating')) self.assertEqual( max_books_per_rating, {'books_per_rating__max': 3}) def test_ticket17424(self): """ Doing exclude() on a foreign model after annotate() doesn't crash. """ all_books = list(Book.objects.values_list('pk', flat=True).order_by('pk')) annotated_books = Book.objects.order_by('pk').annotate(one=Count("id")) # The value doesn't matter, we just need any negative # constraint on a related model that's a noop. excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__") # Try to generate query tree str(excluded_books.query) self.assertQuerysetEqual(excluded_books, all_books, lambda x: x.pk) # Check internal state self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type) self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type) def test_ticket12886(self): """ Aggregation over sliced queryset works correctly. """ qs = Book.objects.all().order_by('-rating')[0:3] vals = qs.aggregate(average_top3_rating=Avg('rating'))['average_top3_rating'] self.assertAlmostEqual(vals, 4.5, places=2) def test_ticket11881(self): """ Subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE or select_related() stuff. """ qs = Book.objects.all().select_for_update().order_by( 'pk').select_related('publisher').annotate(max_pk=Max('pk')) with CaptureQueriesContext(connection) as captured_queries: qs.aggregate(avg_pk=Avg('max_pk')) self.assertEqual(len(captured_queries), 1) qstr = captured_queries[0]['sql'].lower() self.assertNotIn('for update', qstr) forced_ordering = connection.ops.force_no_ordering() if forced_ordering: # If the backend needs to force an ordering we make sure it's # the only "ORDER BY" clause present in the query. self.assertEqual( re.findall(r'order by (\w+)', qstr), [', '.join(f[1][0] for f in forced_ordering).lower()] ) else: self.assertNotIn('order by', qstr) self.assertEqual(qstr.count(' join '), 0) def test_decimal_max_digits_has_no_effect(self): Book.objects.all().delete() a1 = Author.objects.first() p1 = Publisher.objects.first() thedate = timezone.now() for i in range(10): Book.objects.create( isbn="abcde{}".format(i), name="none", pages=10, rating=4.0, price=9999.98, contact=a1, publisher=p1, pubdate=thedate) book = Book.objects.aggregate(price_sum=Sum('price')) self.assertEqual(book['price_sum'], Decimal("99999.80")) def test_nonaggregate_aggregation_throws(self): with self.assertRaisesMessage(TypeError, 'fail is not an aggregate expression'): Book.objects.aggregate(fail=F('price')) def test_nonfield_annotation(self): book = Book.objects.annotate(val=Max(Value(2))).first() self.assertEqual(book.val, 2) book = Book.objects.annotate(val=Max(Value(2), output_field=IntegerField())).first() self.assertEqual(book.val, 2) book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first() self.assertEqual(book.val, 2) def test_annotation_expressions(self): authors = Author.objects.annotate(combined_ages=Sum(F('age') + F('friends__age'))).order_by('name') authors2 = Author.objects.annotate(combined_ages=Sum('age') + Sum('friends__age')).order_by('name') for qs in (authors, authors2): self.assertQuerysetEqual( qs, [ ('Adrian Holovaty', 132), ('Brad Dayley', None), ('Jacob Kaplan-Moss', 129), ('James Bennett', 63), ('Jeffrey Forcier', 128), ('Paul Bissex', 120), ('Peter Norvig', 103), ('Stuart Russell', 103), ('Wesley J. Chun', 176) ], lambda a: (a.name, a.combined_ages) ) def test_aggregation_expressions(self): a1 = Author.objects.aggregate(av_age=Sum('age') / Count('*')) a2 = Author.objects.aggregate(av_age=Sum('age') / Count('age')) a3 = Author.objects.aggregate(av_age=Avg('age')) self.assertEqual(a1, {'av_age': 37}) self.assertEqual(a2, {'av_age': 37}) self.assertEqual(a3, {'av_age': Approximate(37.4, places=1)}) def test_avg_decimal_field(self): v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price')))['avg_price'] self.assertIsInstance(v, Decimal) self.assertEqual(v, Approximate(Decimal('47.39'), places=2)) def test_order_of_precedence(self): p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price') + 2) * 3) self.assertEqual(p1, {'avg_price': Approximate(Decimal('148.18'), places=2)}) p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg('price') + 2 * 3) self.assertEqual(p2, {'avg_price': Approximate(Decimal('53.39'), places=2)}) def test_combine_different_types(self): msg = ( 'Expression contains mixed types: FloatField, DecimalField. ' 'You must set output_field.' ) qs = Book.objects.annotate(sums=Sum('rating') + Sum('pages') + Sum('price')) with self.assertRaisesMessage(FieldError, msg): qs.first() with self.assertRaisesMessage(FieldError, msg): qs.first() b1 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'), output_field=IntegerField())).get(pk=self.b4.pk) self.assertEqual(b1.sums, 383) b2 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'), output_field=FloatField())).get(pk=self.b4.pk) self.assertEqual(b2.sums, 383.69) b3 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'), output_field=DecimalField())).get(pk=self.b4.pk) self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2)) def test_complex_aggregations_require_kwarg(self): with self.assertRaisesMessage(TypeError, 'Complex annotations require an alias'): Author.objects.annotate(Sum(F('age') + F('friends__age'))) with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'): Author.objects.aggregate(Sum('age') / Count('age')) with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'): Author.objects.aggregate(Sum(1)) def test_aggregate_over_complex_annotation(self): qs = Author.objects.annotate( combined_ages=Sum(F('age') + F('friends__age'))) age = qs.aggregate(max_combined_age=Max('combined_ages')) self.assertEqual(age['max_combined_age'], 176) age = qs.aggregate(max_combined_age_doubled=Max('combined_ages') * 2) self.assertEqual(age['max_combined_age_doubled'], 176 * 2) age = qs.aggregate( max_combined_age_doubled=Max('combined_ages') + Max('combined_ages')) self.assertEqual(age['max_combined_age_doubled'], 176 * 2) age = qs.aggregate( max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'), sum_combined_age=Sum('combined_ages')) self.assertEqual(age['max_combined_age_doubled'], 176 * 2) self.assertEqual(age['sum_combined_age'], 954) age = qs.aggregate( max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'), sum_combined_age_doubled=Sum('combined_ages') + Sum('combined_ages')) self.assertEqual(age['max_combined_age_doubled'], 176 * 2) self.assertEqual(age['sum_combined_age_doubled'], 954 * 2) def test_values_annotation_with_expression(self): # ensure the F() is promoted to the group by clause qs = Author.objects.values('name').annotate(another_age=Sum('age') + F('age')) a = qs.get(name="Adrian Holovaty") self.assertEqual(a['another_age'], 68) qs = qs.annotate(friend_count=Count('friends')) a = qs.get(name="Adrian Holovaty") self.assertEqual(a['friend_count'], 2) qs = qs.annotate(combined_age=Sum('age') + F('friends__age')).filter( name="Adrian Holovaty").order_by('-combined_age') self.assertEqual( list(qs), [ { "name": 'Adrian Holovaty', "another_age": 68, "friend_count": 1, "combined_age": 69 }, { "name": 'Adrian Holovaty', "another_age": 68, "friend_count": 1, "combined_age": 63 } ] ) vals = qs.values('name', 'combined_age') self.assertEqual( list(vals), [ {'name': 'Adrian Holovaty', 'combined_age': 69}, {'name': 'Adrian Holovaty', 'combined_age': 63}, ] ) def test_annotate_values_aggregate(self): alias_age = Author.objects.annotate( age_alias=F('age') ).values( 'age_alias', ).aggregate(sum_age=Sum('age_alias')) age = Author.objects.values('age').aggregate(sum_age=Sum('age')) self.assertEqual(alias_age['sum_age'], age['sum_age']) def test_annotate_over_annotate(self): author = Author.objects.annotate( age_alias=F('age') ).annotate( sum_age=Sum('age_alias') ).get(name="Adrian Holovaty") other_author = Author.objects.annotate( sum_age=Sum('age') ).get(name="Adrian Holovaty") self.assertEqual(author.sum_age, other_author.sum_age) def test_aggregate_over_aggregate(self): msg = "Cannot compute Avg('age'): 'age' is an aggregate" with self.assertRaisesMessage(FieldError, msg): Author.objects.annotate( age_alias=F('age'), ).aggregate( age=Sum(F('age')), avg_age=Avg(F('age')), ) def test_annotated_aggregate_over_annotated_aggregate(self): with self.assertRaisesMessage(FieldError, "Cannot compute Sum('id__max'): 'id__max' is an aggregate"): Book.objects.annotate(Max('id')).annotate(Sum('id__max')) class MyMax(Max): def as_sql(self, compiler, connection): self.set_source_expressions(self.get_source_expressions()[0:1]) return super().as_sql(compiler, connection) with self.assertRaisesMessage(FieldError, "Cannot compute Max('id__max'): 'id__max' is an aggregate"): Book.objects.annotate(Max('id')).annotate(my_max=MyMax('id__max', 'price')) def test_multi_arg_aggregate(self): class MyMax(Max): output_field = DecimalField() def as_sql(self, compiler, connection): copy = self.copy() copy.set_source_expressions(copy.get_source_expressions()[0:1]) return super(MyMax, copy).as_sql(compiler, connection) with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'): Book.objects.aggregate(MyMax('pages', 'price')) with self.assertRaisesMessage(TypeError, 'Complex annotations require an alias'): Book.objects.annotate(MyMax('pages', 'price')) Book.objects.aggregate(max_field=MyMax('pages', 'price')) def test_add_implementation(self): class MySum(Sum): pass # test completely changing how the output is rendered def lower_case_function_override(self, compiler, connection): sql, params = compiler.compile(self.source_expressions[0]) substitutions = {'function': self.function.lower(), 'expressions': sql, 'distinct': ''} substitutions.update(self.extra) return self.template % substitutions, params setattr(MySum, 'as_' + connection.vendor, lower_case_function_override) qs = Book.objects.annotate( sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField()) ) self.assertEqual(str(qs.query).count('sum('), 1) b1 = qs.get(pk=self.b4.pk) self.assertEqual(b1.sums, 383) # test changing the dict and delegating def lower_case_function_super(self, compiler, connection): self.extra['function'] = self.function.lower() return super(MySum, self).as_sql(compiler, connection) setattr(MySum, 'as_' + connection.vendor, lower_case_function_super) qs = Book.objects.annotate( sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField()) ) self.assertEqual(str(qs.query).count('sum('), 1) b1 = qs.get(pk=self.b4.pk) self.assertEqual(b1.sums, 383) # test overriding all parts of the template def be_evil(self, compiler, connection): substitutions = {'function': 'MAX', 'expressions': '2', 'distinct': ''} substitutions.update(self.extra) return self.template % substitutions, () setattr(MySum, 'as_' + connection.vendor, be_evil) qs = Book.objects.annotate( sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField()) ) self.assertEqual(str(qs.query).count('MAX('), 1) b1 = qs.get(pk=self.b4.pk) self.assertEqual(b1.sums, 2) def test_complex_values_aggregation(self): max_rating = Book.objects.values('rating').aggregate( double_max_rating=Max('rating') + Max('rating')) self.assertEqual(max_rating['double_max_rating'], 5 * 2) max_books_per_rating = Book.objects.values('rating').annotate( books_per_rating=Count('id') + 5 ).aggregate(Max('books_per_rating')) self.assertEqual( max_books_per_rating, {'books_per_rating__max': 3 + 5}) def test_expression_on_aggregation(self): qs = Publisher.objects.annotate( price_or_median=Greatest(Avg('book__rating', output_field=DecimalField()), Avg('book__price')) ).filter(price_or_median__gte=F('num_awards')).order_by('num_awards') self.assertQuerysetEqual( qs, [1, 3, 7, 9], lambda v: v.num_awards) qs2 = Publisher.objects.annotate( rating_or_num_awards=Greatest(Avg('book__rating'), F('num_awards'), output_field=FloatField()) ).filter(rating_or_num_awards__gt=F('num_awards')).order_by('num_awards') self.assertQuerysetEqual( qs2, [1, 3], lambda v: v.num_awards) def test_arguments_must_be_expressions(self): msg = 'QuerySet.aggregate() received non-expression(s): %s.' with self.assertRaisesMessage(TypeError, msg % FloatField()): Book.objects.aggregate(FloatField()) with self.assertRaisesMessage(TypeError, msg % True): Book.objects.aggregate(is_book=True) with self.assertRaisesMessage(TypeError, msg % ', '.join([str(FloatField()), 'True'])): Book.objects.aggregate(FloatField(), Avg('price'), is_book=True) def test_aggregation_subquery_annotation(self): """Subquery annotations are excluded from the GROUP BY if they are not explicitly grouped against.""" latest_book_pubdate_qs = Book.objects.filter( publisher=OuterRef('pk') ).order_by('-pubdate').values('pubdate')[:1] publisher_qs = Publisher.objects.annotate( latest_book_pubdate=Subquery(latest_book_pubdate_qs), ).annotate(count=Count('book')) with self.assertNumQueries(1) as ctx: list(publisher_qs) self.assertEqual(ctx[0]['sql'].count('SELECT'), 2) # The GROUP BY should not be by alias either. self.assertEqual(ctx[0]['sql'].lower().count('latest_book_pubdate'), 1) def test_aggregation_subquery_annotation_exists(self): latest_book_pubdate_qs = Book.objects.filter( publisher=OuterRef('pk') ).order_by('-pubdate').values('pubdate')[:1] publisher_qs = Publisher.objects.annotate( latest_book_pubdate=Subquery(latest_book_pubdate_qs), count=Count('book'), ) self.assertTrue(publisher_qs.exists()) def test_aggregation_exists_annotation(self): published_books = Book.objects.filter(publisher=OuterRef('pk')) publisher_qs = Publisher.objects.annotate( published_book=Exists(published_books), count=Count('book'), ).values_list('name', flat=True) self.assertCountEqual(list(publisher_qs), [ 'Apress', 'Morgan Kaufmann', "Jonno's House of Books", 'Prentice Hall', 'Sams', ]) def test_aggregation_subquery_annotation_values(self): """ Subquery annotations and external aliases are excluded from the GROUP BY if they are not selected. """ books_qs = Book.objects.annotate( first_author_the_same_age=Subquery( Author.objects.filter( age=OuterRef('contact__friends__age'), ).order_by('age').values('id')[:1], ) ).filter( publisher=self.p1, first_author_the_same_age__isnull=False, ).annotate( min_age=Min('contact__friends__age'), ).values('name', 'min_age').order_by('name') self.assertEqual(list(books_qs), [ {'name': 'Practical Django Projects', 'min_age': 34}, { 'name': 'The Definitive Guide to Django: Web Development Done Right', 'min_age': 29, }, ]) def test_aggregation_subquery_annotation_values_collision(self): books_rating_qs = Book.objects.filter( publisher=OuterRef('pk'), price=Decimal('29.69'), ).values('rating') publisher_qs = Publisher.objects.filter( book__contact__age__gt=20, name=self.p1.name, ).annotate( rating=Subquery(books_rating_qs), contacts_count=Count('book__contact'), ).values('rating').annotate(total_count=Count('rating')) self.assertEqual(list(publisher_qs), [ {'rating': 4.0, 'total_count': 2}, ]) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_aggregation_subquery_annotation_multivalued(self): """ Subquery annotations must be included in the GROUP BY if they use potentially multivalued relations (contain the LOOKUP_SEP). """ subquery_qs = Author.objects.filter( pk=OuterRef('pk'), book__name=OuterRef('book__name'), ).values('pk') author_qs = Author.objects.annotate( subquery_id=Subquery(subquery_qs), ).annotate(count=Count('book')) self.assertEqual(author_qs.count(), Author.objects.count()) def test_aggregation_order_by_not_selected_annotation_values(self): result_asc = [ self.b4.pk, self.b3.pk, self.b1.pk, self.b2.pk, self.b5.pk, self.b6.pk, ] result_desc = result_asc[::-1] tests = [ ('min_related_age', result_asc), ('-min_related_age', result_desc), (F('min_related_age'), result_asc), (F('min_related_age').asc(), result_asc), (F('min_related_age').desc(), result_desc), ] for ordering, expected_result in tests: with self.subTest(ordering=ordering): books_qs = Book.objects.annotate( min_age=Min('authors__age'), ).annotate( min_related_age=Coalesce('min_age', 'contact__age'), ).order_by(ordering).values_list('pk', flat=True) self.assertEqual(list(books_qs), expected_result) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_group_by_subquery_annotation(self): """ Subquery annotations are included in the GROUP BY if they are grouped against. """ long_books_count_qs = Book.objects.filter( publisher=OuterRef('pk'), pages__gt=400, ).values( 'publisher' ).annotate(count=Count('pk')).values('count') groups = [ Subquery(long_books_count_qs), long_books_count_qs, long_books_count_qs.query, ] for group in groups: with self.subTest(group=group.__class__.__name__): long_books_count_breakdown = Publisher.objects.values_list( group, ).annotate(total=Count('*')) self.assertEqual(dict(long_books_count_breakdown), {None: 1, 1: 4}) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_group_by_exists_annotation(self): """ Exists annotations are included in the GROUP BY if they are grouped against. """ long_books_qs = Book.objects.filter( publisher=OuterRef('pk'), pages__gt=800, ) has_long_books_breakdown = Publisher.objects.values_list( Exists(long_books_qs), ).annotate(total=Count('*')) self.assertEqual(dict(has_long_books_breakdown), {True: 2, False: 3}) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_aggregation_subquery_annotation_related_field(self): publisher = Publisher.objects.create(name=self.a9.name, num_awards=2) book = Book.objects.create( isbn='159059999', name='Test book.', pages=819, rating=2.5, price=Decimal('14.44'), contact=self.a9, publisher=publisher, pubdate=datetime.date(2019, 12, 6), ) book.authors.add(self.a5, self.a6, self.a7) books_qs = Book.objects.annotate( contact_publisher=Subquery( Publisher.objects.filter( pk=OuterRef('publisher'), name=OuterRef('contact__name'), ).values('name')[:1], ) ).filter( contact_publisher__isnull=False, ).annotate(count=Count('authors')) self.assertSequenceEqual(books_qs, [book]) # FIXME: GROUP BY doesn't need to include a subquery with # non-multivalued JOINs, see Col.possibly_multivalued (refs #31150): # with self.assertNumQueries(1) as ctx: # self.assertSequenceEqual(books_qs, [book]) # self.assertEqual(ctx[0]['sql'].count('SELECT'), 2) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_aggregation_nested_subquery_outerref(self): publisher_with_same_name = Publisher.objects.filter( id__in=Subquery( Publisher.objects.filter( name=OuterRef(OuterRef('publisher__name')), ).values('id'), ), ).values(publisher_count=Count('id'))[:1] books_breakdown = Book.objects.annotate( publisher_count=Subquery(publisher_with_same_name), authors_count=Count('authors'), ).values_list('publisher_count', flat=True) self.assertSequenceEqual(books_breakdown, [1] * 6) def test_filter_in_subquery_or_aggregation(self): """ Filtering against an aggregate requires the usage of the HAVING clause. If such a filter is unionized to a non-aggregate one the latter will also need to be moved to the HAVING clause and have its grouping columns used in the GROUP BY. When this is done with a subquery the specialized logic in charge of using outer reference columns to group should be used instead of the subquery itself as the latter might return multiple rows. """ authors = Author.objects.annotate( Count('book'), ).filter( Q(book__count__gt=0) | Q(pk__in=Book.objects.values('authors')) ) self.assertQuerysetEqual(authors, Author.objects.all(), ordered=False) def test_aggregation_random_ordering(self): """Random() is not included in the GROUP BY when used for ordering.""" authors = Author.objects.annotate(contact_count=Count('book')).order_by('?') self.assertQuerysetEqual(authors, [ ('Adrian Holovaty', 1), ('Jacob Kaplan-Moss', 1), ('Brad Dayley', 1), ('James Bennett', 1), ('Jeffrey Forcier', 1), ('Paul Bissex', 1), ('Wesley J. Chun', 1), ('Stuart Russell', 1), ('Peter Norvig', 2), ], lambda a: (a.name, a.contact_count), ordered=False) def test_empty_result_optimization(self): with self.assertNumQueries(0): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Sum('num_awards'), books_count=Count('book'), ), { 'sum_awards': None, 'books_count': 0, } ) # Expression without empty_result_set_value forces queries to be # executed even if they would return an empty result set. raw_books_count = Func('book', function='COUNT') raw_books_count.contains_aggregate = True with self.assertNumQueries(1): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Sum('num_awards'), books_count=raw_books_count, ), { 'sum_awards': None, 'books_count': 0, } ) def test_coalesced_empty_result_set(self): with self.assertNumQueries(0): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Coalesce(Sum('num_awards'), 0), )['sum_awards'], 0, ) # Multiple expressions. with self.assertNumQueries(0): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Coalesce(Sum('num_awards'), None, 0), )['sum_awards'], 0, ) # Nested coalesce. with self.assertNumQueries(0): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Coalesce(Coalesce(Sum('num_awards'), None), 0), )['sum_awards'], 0, ) # Expression coalesce. with self.assertNumQueries(1): self.assertIsInstance( Store.objects.none().aggregate( latest_opening=Coalesce( Max('original_opening'), RawSQL('CURRENT_TIMESTAMP', []), ), )['latest_opening'], datetime.datetime, ) def test_aggregation_default_unsupported_by_count(self): msg = 'Count does not allow default.' with self.assertRaisesMessage(TypeError, msg): Count('age', default=0) def test_aggregation_default_unset(self): for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]: with self.subTest(Aggregate): result = Author.objects.filter(age__gt=100).aggregate( value=Aggregate('age'), ) self.assertIsNone(result['value']) def test_aggregation_default_zero(self): for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]: with self.subTest(Aggregate): result = Author.objects.filter(age__gt=100).aggregate( value=Aggregate('age', default=0), ) self.assertEqual(result['value'], 0) def test_aggregation_default_integer(self): for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]: with self.subTest(Aggregate): result = Author.objects.filter(age__gt=100).aggregate( value=Aggregate('age', default=21), ) self.assertEqual(result['value'], 21) def test_aggregation_default_expression(self): for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]: with self.subTest(Aggregate): result = Author.objects.filter(age__gt=100).aggregate( value=Aggregate('age', default=Value(5) * Value(7)), ) self.assertEqual(result['value'], 35) def test_aggregation_default_group_by(self): qs = Publisher.objects.values('name').annotate( books=Count('book'), pages=Sum('book__pages', default=0), ).filter(books=0) self.assertSequenceEqual( qs, [{'name': "Jonno's House of Books", 'books': 0, 'pages': 0}], ) def test_aggregation_default_compound_expression(self): # Scale rating to a percentage; default to 50% if no books published. formula = Avg('book__rating', default=2.5) * 20.0 queryset = Publisher.objects.annotate(rating=formula).order_by('name') self.assertSequenceEqual(queryset.values('name', 'rating'), [ {'name': 'Apress', 'rating': 85.0}, {'name': "Jonno's House of Books", 'rating': 50.0}, {'name': 'Morgan Kaufmann', 'rating': 100.0}, {'name': 'Prentice Hall', 'rating': 80.0}, {'name': 'Sams', 'rating': 60.0}, ]) def test_aggregation_default_using_time_from_python(self): expr = Min( 'store__friday_night_closing', filter=~Q(store__name='Amazon.com'), default=datetime.time(17), ) if connection.vendor == 'mysql': # Workaround for #30224 for MySQL 8.0+ & MariaDB. expr.default = Cast(expr.default, TimeField()) queryset = Book.objects.annotate(oldest_store_opening=expr).order_by('isbn') self.assertSequenceEqual(queryset.values('isbn', 'oldest_store_opening'), [ {'isbn': '013235613', 'oldest_store_opening': datetime.time(21, 30)}, {'isbn': '013790395', 'oldest_store_opening': datetime.time(23, 59, 59)}, {'isbn': '067232959', 'oldest_store_opening': datetime.time(17)}, {'isbn': '155860191', 'oldest_store_opening': datetime.time(21, 30)}, {'isbn': '159059725', 'oldest_store_opening': datetime.time(23, 59, 59)}, {'isbn': '159059996', 'oldest_store_opening': datetime.time(21, 30)}, ]) def test_aggregation_default_using_time_from_database(self): now = timezone.now().astimezone(timezone.utc) expr = Min( 'store__friday_night_closing', filter=~Q(store__name='Amazon.com'), default=TruncHour(NowUTC(), output_field=TimeField()), ) queryset = Book.objects.annotate(oldest_store_opening=expr).order_by('isbn') self.assertSequenceEqual(queryset.values('isbn', 'oldest_store_opening'), [ {'isbn': '013235613', 'oldest_store_opening': datetime.time(21, 30)}, {'isbn': '013790395', 'oldest_store_opening': datetime.time(23, 59, 59)}, {'isbn': '067232959', 'oldest_store_opening': datetime.time(now.hour)}, {'isbn': '155860191', 'oldest_store_opening': datetime.time(21, 30)}, {'isbn': '159059725', 'oldest_store_opening': datetime.time(23, 59, 59)}, {'isbn': '159059996', 'oldest_store_opening': datetime.time(21, 30)}, ]) def test_aggregation_default_using_date_from_python(self): expr = Min('book__pubdate', default=datetime.date(1970, 1, 1)) if connection.vendor == 'mysql': # Workaround for #30224 for MySQL 5.7+ & MariaDB. expr.default = Cast(expr.default, DateField()) queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by('name') self.assertSequenceEqual(queryset.values('name', 'earliest_pubdate'), [ {'name': 'Apress', 'earliest_pubdate': datetime.date(2007, 12, 6)}, {'name': "Jonno's House of Books", 'earliest_pubdate': datetime.date(1970, 1, 1)}, {'name': 'Morgan Kaufmann', 'earliest_pubdate': datetime.date(1991, 10, 15)}, {'name': 'Prentice Hall', 'earliest_pubdate': datetime.date(1995, 1, 15)}, {'name': 'Sams', 'earliest_pubdate': datetime.date(2008, 3, 3)}, ]) def test_aggregation_default_using_date_from_database(self): now = timezone.now().astimezone(timezone.utc) expr = Min('book__pubdate', default=TruncDate(NowUTC())) queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by('name') self.assertSequenceEqual(queryset.values('name', 'earliest_pubdate'), [ {'name': 'Apress', 'earliest_pubdate': datetime.date(2007, 12, 6)}, {'name': "Jonno's House of Books", 'earliest_pubdate': now.date()}, {'name': 'Morgan Kaufmann', 'earliest_pubdate': datetime.date(1991, 10, 15)}, {'name': 'Prentice Hall', 'earliest_pubdate': datetime.date(1995, 1, 15)}, {'name': 'Sams', 'earliest_pubdate': datetime.date(2008, 3, 3)}, ]) def test_aggregation_default_using_datetime_from_python(self): expr = Min( 'store__original_opening', filter=~Q(store__name='Amazon.com'), default=datetime.datetime(1970, 1, 1), ) if connection.vendor == 'mysql': # Workaround for #30224 for MySQL 8.0+ & MariaDB. expr.default = Cast(expr.default, DateTimeField()) queryset = Book.objects.annotate(oldest_store_opening=expr).order_by('isbn') self.assertSequenceEqual(queryset.values('isbn', 'oldest_store_opening'), [ {'isbn': '013235613', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, {'isbn': '013790395', 'oldest_store_opening': datetime.datetime(2001, 3, 15, 11, 23, 37)}, {'isbn': '067232959', 'oldest_store_opening': datetime.datetime(1970, 1, 1)}, {'isbn': '155860191', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, {'isbn': '159059725', 'oldest_store_opening': datetime.datetime(2001, 3, 15, 11, 23, 37)}, {'isbn': '159059996', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, ]) def test_aggregation_default_using_datetime_from_database(self): now = timezone.now().astimezone(timezone.utc) expr = Min( 'store__original_opening', filter=~Q(store__name='Amazon.com'), default=TruncHour(NowUTC(), output_field=DateTimeField()), ) queryset = Book.objects.annotate(oldest_store_opening=expr).order_by('isbn') self.assertSequenceEqual(queryset.values('isbn', 'oldest_store_opening'), [ {'isbn': '013235613', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, {'isbn': '013790395', 'oldest_store_opening': datetime.datetime(2001, 3, 15, 11, 23, 37)}, {'isbn': '067232959', 'oldest_store_opening': now.replace(minute=0, second=0, microsecond=0, tzinfo=None)}, {'isbn': '155860191', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, {'isbn': '159059725', 'oldest_store_opening': datetime.datetime(2001, 3, 15, 11, 23, 37)}, {'isbn': '159059996', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, ]) def test_aggregation_default_using_duration_from_python(self): result = Publisher.objects.filter(num_awards__gt=3).aggregate( value=Sum('duration', default=datetime.timedelta(0)), ) self.assertEqual(result['value'], datetime.timedelta(0)) def test_aggregation_default_using_duration_from_database(self): result = Publisher.objects.filter(num_awards__gt=3).aggregate( value=Sum('duration', default=Now() - Now()), ) self.assertEqual(result['value'], datetime.timedelta(0)) def test_aggregation_default_using_decimal_from_python(self): result = Book.objects.filter(rating__lt=3.0).aggregate( value=Sum('price', default=Decimal('0.00')), ) self.assertEqual(result['value'], Decimal('0.00')) def test_aggregation_default_using_decimal_from_database(self): result = Book.objects.filter(rating__lt=3.0).aggregate( value=Sum('price', default=Pi()), ) self.assertAlmostEqual(result['value'], Decimal.from_float(math.pi), places=6) def test_aggregation_default_passed_another_aggregate(self): result = Book.objects.aggregate( value=Sum('price', filter=Q(rating__lt=3.0), default=Avg('pages') / 10.0), ) self.assertAlmostEqual(result['value'], Decimal('61.72'), places=2) def test_exists_none_with_aggregate(self): qs = Book.objects.all().annotate( count=Count('id'), exists=Exists(Author.objects.none()), ) self.assertEqual(len(qs), 6) def test_exists_extra_where_with_aggregate(self): qs = Book.objects.all().annotate( count=Count('id'), exists=Exists(Author.objects.extra(where=['1=0'])), ) self.assertEqual(len(qs), 6)
539ea38ee4ee350d01a1a24bc7c97e042edbc41b93ac7565ecd67bc69740deb7
import datetime from decimal import Decimal from django.db.models import ( Avg, Case, Count, Exists, F, Max, OuterRef, Q, StdDev, Subquery, Sum, Variance, When, ) from django.test import TestCase from django.test.utils import Approximate from .models import Author, Book, Publisher class FilteredAggregateTests(TestCase): @classmethod def setUpTestData(cls): cls.a1 = Author.objects.create(name='test', age=40) cls.a2 = Author.objects.create(name='test2', age=60) cls.a3 = Author.objects.create(name='test3', age=100) cls.p1 = Publisher.objects.create(name='Apress', num_awards=3, duration=datetime.timedelta(days=1)) cls.b1 = Book.objects.create( isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right', pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1, pubdate=datetime.date(2007, 12, 6), ) cls.b2 = Book.objects.create( isbn='067232959', name='Sams Teach Yourself Django in 24 Hours', pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a2, publisher=cls.p1, pubdate=datetime.date(2008, 3, 3), ) cls.b3 = Book.objects.create( isbn='159059996', name='Practical Django Projects', pages=600, rating=4.5, price=Decimal('29.69'), contact=cls.a3, publisher=cls.p1, pubdate=datetime.date(2008, 6, 23), ) cls.a1.friends.add(cls.a2) cls.a1.friends.add(cls.a3) cls.b1.authors.add(cls.a1) cls.b1.authors.add(cls.a3) cls.b2.authors.add(cls.a2) cls.b3.authors.add(cls.a3) def test_filtered_aggregates(self): agg = Sum('age', filter=Q(name__startswith='test')) self.assertEqual(Author.objects.aggregate(age=agg)['age'], 200) def test_filtered_numerical_aggregates(self): for aggregate, expected_result in ( (Avg, Approximate(66.7, 1)), (StdDev, Approximate(24.9, 1)), (Variance, Approximate(622.2, 1)), ): with self.subTest(aggregate=aggregate.__name__): agg = aggregate('age', filter=Q(name__startswith='test')) self.assertEqual(Author.objects.aggregate(age=agg)['age'], expected_result) def test_double_filtered_aggregates(self): agg = Sum('age', filter=Q(Q(name='test2') & ~Q(name='test'))) self.assertEqual(Author.objects.aggregate(age=agg)['age'], 60) def test_excluded_aggregates(self): agg = Sum('age', filter=~Q(name='test2')) self.assertEqual(Author.objects.aggregate(age=agg)['age'], 140) def test_related_aggregates_m2m(self): agg = Sum('friends__age', filter=~Q(friends__name='test')) self.assertEqual(Author.objects.filter(name='test').aggregate(age=agg)['age'], 160) def test_related_aggregates_m2m_and_fk(self): q = Q(friends__book__publisher__name='Apress') & ~Q(friends__name='test3') agg = Sum('friends__book__pages', filter=q) self.assertEqual(Author.objects.filter(name='test').aggregate(pages=agg)['pages'], 528) def test_plain_annotate(self): agg = Sum('book__pages', filter=Q(book__rating__gt=3)) qs = Author.objects.annotate(pages=agg).order_by('pk') self.assertSequenceEqual([a.pages for a in qs], [447, None, 1047]) def test_filtered_aggregate_on_annotate(self): pages_annotate = Sum('book__pages', filter=Q(book__rating__gt=3)) age_agg = Sum('age', filter=Q(total_pages__gte=400)) aggregated = Author.objects.annotate(total_pages=pages_annotate).aggregate(summed_age=age_agg) self.assertEqual(aggregated, {'summed_age': 140}) def test_case_aggregate(self): agg = Sum( Case(When(friends__age=40, then=F('friends__age'))), filter=Q(friends__name__startswith='test'), ) self.assertEqual(Author.objects.aggregate(age=agg)['age'], 80) def test_sum_star_exception(self): msg = 'Star cannot be used with filter. Please specify a field.' with self.assertRaisesMessage(ValueError, msg): Count('*', filter=Q(age=40)) def test_filtered_reused_subquery(self): qs = Author.objects.annotate( older_friends_count=Count('friends', filter=Q(friends__age__gt=F('age'))), ).filter( older_friends_count__gte=2, ) self.assertEqual(qs.get(pk__in=qs.values('pk')), self.a1) def test_filtered_aggregate_ref_annotation(self): aggs = Author.objects.annotate( double_age=F('age') * 2, ).aggregate( cnt=Count('pk', filter=Q(double_age__gt=100)), ) self.assertEqual(aggs['cnt'], 2) def test_filtered_aggregate_ref_subquery_annotation(self): aggs = Author.objects.annotate( earliest_book_year=Subquery( Book.objects.filter( contact__pk=OuterRef('pk'), ).order_by('pubdate').values('pubdate__year')[:1] ), ).aggregate( cnt=Count('pk', filter=Q(earliest_book_year=2008)), ) self.assertEqual(aggs['cnt'], 2) def test_filtered_aggregate_ref_multiple_subquery_annotation(self): aggregate = Book.objects.values('publisher').annotate( has_authors=Exists( Book.authors.through.objects.filter(book=OuterRef('pk')), ), authors_have_other_books=Exists( Book.objects.filter( authors__in=Author.objects.filter( book_contact_set=OuterRef(OuterRef('pk')), ) ).exclude(pk=OuterRef('pk')), ), ).aggregate( max_rating=Max( 'rating', filter=Q(has_authors=True, authors_have_other_books=False), ) ) self.assertEqual(aggregate, {'max_rating': 4.5}) def test_filtered_aggregate_on_exists(self): aggregate = Book.objects.values('publisher').aggregate( max_rating=Max('rating', filter=Exists( Book.authors.through.objects.filter(book=OuterRef('pk')), )), ) self.assertEqual(aggregate, {'max_rating': 4.5})
88ea82cd4a4c71e1f26e777ef608f6c94fe3b5e20f340a01ac01b11c8f757150
from functools import wraps from django.db import IntegrityError, connections, transaction from django.test import TestCase, skipUnlessDBFeature from django.test.testcases import DatabaseOperationForbidden, TestData from .models import Car, Person, PossessedCar class TestTestCase(TestCase): @skipUnlessDBFeature('can_defer_constraint_checks') @skipUnlessDBFeature('supports_foreign_keys') def test_fixture_teardown_checks_constraints(self): rollback_atomics = self._rollback_atomics self._rollback_atomics = lambda connection: None # noop try: car = PossessedCar.objects.create(car_id=1, belongs_to_id=1) with self.assertRaises(IntegrityError), transaction.atomic(): self._fixture_teardown() car.delete() finally: self._rollback_atomics = rollback_atomics def test_disallowed_database_connection(self): message = ( "Database connections to 'other' are not allowed in this test. " "Add 'other' to test_utils.test_testcase.TestTestCase.databases to " "ensure proper test isolation and silence this failure." ) with self.assertRaisesMessage(DatabaseOperationForbidden, message): connections['other'].connect() with self.assertRaisesMessage(DatabaseOperationForbidden, message): connections['other'].temporary_connection() def test_disallowed_database_queries(self): message = ( "Database queries to 'other' are not allowed in this test. " "Add 'other' to test_utils.test_testcase.TestTestCase.databases to " "ensure proper test isolation and silence this failure." ) with self.assertRaisesMessage(DatabaseOperationForbidden, message): Car.objects.using('other').get() def test_reset_sequences(self): old_reset_sequences = self.reset_sequences self.reset_sequences = True msg = 'reset_sequences cannot be used on TestCase instances' try: with self.assertRaisesMessage(TypeError, msg): self._fixture_setup() finally: self.reset_sequences = old_reset_sequences def assert_no_queries(test): @wraps(test) def inner(self): with self.assertNumQueries(0): test(self) return inner class TestDataTests(TestCase): # setUpTestData re-assignment are also wrapped in TestData. jim_douglas = None @classmethod def setUpTestData(cls): cls.jim_douglas = Person.objects.create(name='Jim Douglas') cls.car = Car.objects.create(name='1963 Volkswagen Beetle') cls.herbie = cls.jim_douglas.possessed_cars.create( car=cls.car, belongs_to=cls.jim_douglas, ) cls.person_binary = Person.objects.create(name='Person', data=b'binary data') cls.person_binary_get = Person.objects.get(pk=cls.person_binary.pk) @assert_no_queries def test_class_attribute_equality(self): """Class level test data is equal to instance level test data.""" self.assertEqual(self.jim_douglas, self.__class__.jim_douglas) self.assertEqual(self.person_binary, self.__class__.person_binary) self.assertEqual(self.person_binary_get, self.__class__.person_binary_get) @assert_no_queries def test_class_attribute_identity(self): """ Class level test data is not identical to instance level test data. """ self.assertIsNot(self.jim_douglas, self.__class__.jim_douglas) self.assertIsNot(self.person_binary, self.__class__.person_binary) self.assertIsNot(self.person_binary_get, self.__class__.person_binary_get) @assert_no_queries def test_binaryfield_data_type(self): self.assertEqual(bytes(self.person_binary.data), b'binary data') self.assertEqual(bytes(self.person_binary_get.data), b'binary data') self.assertEqual( type(self.person_binary_get.data), type(self.__class__.person_binary_get.data), ) self.assertEqual( type(self.person_binary.data), type(self.__class__.person_binary.data), ) @assert_no_queries def test_identity_preservation(self): """Identity of test data is preserved between accesses.""" self.assertIs(self.jim_douglas, self.jim_douglas) @assert_no_queries def test_known_related_objects_identity_preservation(self): """Known related objects identity is preserved.""" self.assertIs(self.herbie.car, self.car) self.assertIs(self.herbie.belongs_to, self.jim_douglas) def test_repr(self): self.assertEqual( repr(TestData('attr', 'value')), "<TestData: name='attr', data='value'>", ) class SetupTestDataIsolationTests(TestCase): """ In-memory data isolation is respected for model instances assigned to class attributes during setUpTestData. """ @classmethod def setUpTestData(cls): cls.car = Car.objects.create(name='Volkswagen Beetle') def test_book_name_deutsh(self): self.assertEqual(self.car.name, 'Volkswagen Beetle') self.car.name = 'VW sKäfer' self.car.save() def test_book_name_french(self): self.assertEqual(self.car.name, 'Volkswagen Beetle') self.car.name = 'Volkswagen Coccinelle' self.car.save()
60a50caced08ab51c8a02d9288eb0db25b6c423e309411bd323f1d5812115b1f
import logging import os import unittest import warnings from io import StringIO from unittest import mock from django.conf import settings from django.contrib.staticfiles.finders import get_finder, get_finders from django.contrib.staticfiles.storage import staticfiles_storage from django.core.exceptions import ImproperlyConfigured from django.core.files.storage import default_storage from django.db import ( IntegrityError, connection, connections, models, router, transaction, ) from django.forms import ( CharField, EmailField, Form, IntegerField, ValidationError, formset_factory, ) from django.http import HttpResponse from django.template.loader import render_to_string from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.html import HTMLParseError, parse_html from django.test.testcases import DatabaseOperationForbidden from django.test.utils import ( CaptureQueriesContext, TestContextDecorator, ignore_warnings, isolate_apps, override_settings, setup_test_environment, ) from django.urls import NoReverseMatch, path, reverse, reverse_lazy from django.utils.deprecation import RemovedInDjango50Warning from django.utils.log import DEFAULT_LOGGING from .models import Car, Person, PossessedCar from .views import empty_response class SkippingTestCase(SimpleTestCase): def _assert_skipping(self, func, expected_exc, msg=None): try: if msg is not None: with self.assertRaisesMessage(expected_exc, msg): func() else: with self.assertRaises(expected_exc): func() except unittest.SkipTest: self.fail('%s should not result in a skipped test.' % func.__name__) def test_skip_unless_db_feature(self): """ Testing the django.test.skipUnlessDBFeature decorator. """ # Total hack, but it works, just want an attribute that's always true. @skipUnlessDBFeature("__class__") def test_func(): raise ValueError @skipUnlessDBFeature("notprovided") def test_func2(): raise ValueError @skipUnlessDBFeature("__class__", "__class__") def test_func3(): raise ValueError @skipUnlessDBFeature("__class__", "notprovided") def test_func4(): raise ValueError self._assert_skipping(test_func, ValueError) self._assert_skipping(test_func2, unittest.SkipTest) self._assert_skipping(test_func3, ValueError) self._assert_skipping(test_func4, unittest.SkipTest) class SkipTestCase(SimpleTestCase): @skipUnlessDBFeature('missing') def test_foo(self): pass self._assert_skipping( SkipTestCase('test_foo').test_foo, ValueError, "skipUnlessDBFeature cannot be used on test_foo (test_utils.tests." "SkippingTestCase.test_skip_unless_db_feature.<locals>.SkipTestCase) " "as SkippingTestCase.test_skip_unless_db_feature.<locals>.SkipTestCase " "doesn't allow queries against the 'default' database." ) def test_skip_if_db_feature(self): """ Testing the django.test.skipIfDBFeature decorator. """ @skipIfDBFeature("__class__") def test_func(): raise ValueError @skipIfDBFeature("notprovided") def test_func2(): raise ValueError @skipIfDBFeature("__class__", "__class__") def test_func3(): raise ValueError @skipIfDBFeature("__class__", "notprovided") def test_func4(): raise ValueError @skipIfDBFeature("notprovided", "notprovided") def test_func5(): raise ValueError self._assert_skipping(test_func, unittest.SkipTest) self._assert_skipping(test_func2, ValueError) self._assert_skipping(test_func3, unittest.SkipTest) self._assert_skipping(test_func4, unittest.SkipTest) self._assert_skipping(test_func5, ValueError) class SkipTestCase(SimpleTestCase): @skipIfDBFeature('missing') def test_foo(self): pass self._assert_skipping( SkipTestCase('test_foo').test_foo, ValueError, "skipIfDBFeature cannot be used on test_foo (test_utils.tests." "SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase) " "as SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase " "doesn't allow queries against the 'default' database." ) class SkippingClassTestCase(TestCase): def test_skip_class_unless_db_feature(self): @skipUnlessDBFeature("__class__") class NotSkippedTests(TestCase): def test_dummy(self): return @skipUnlessDBFeature("missing") @skipIfDBFeature("__class__") class SkippedTests(TestCase): def test_will_be_skipped(self): self.fail("We should never arrive here.") @skipIfDBFeature("__dict__") class SkippedTestsSubclass(SkippedTests): pass test_suite = unittest.TestSuite() test_suite.addTest(NotSkippedTests('test_dummy')) try: test_suite.addTest(SkippedTests('test_will_be_skipped')) test_suite.addTest(SkippedTestsSubclass('test_will_be_skipped')) except unittest.SkipTest: self.fail('SkipTest should not be raised here.') result = unittest.TextTestRunner(stream=StringIO()).run(test_suite) self.assertEqual(result.testsRun, 3) self.assertEqual(len(result.skipped), 2) self.assertEqual(result.skipped[0][1], 'Database has feature(s) __class__') self.assertEqual(result.skipped[1][1], 'Database has feature(s) __class__') def test_missing_default_databases(self): @skipIfDBFeature('missing') class MissingDatabases(SimpleTestCase): def test_assertion_error(self): pass suite = unittest.TestSuite() try: suite.addTest(MissingDatabases('test_assertion_error')) except unittest.SkipTest: self.fail("SkipTest should not be raised at this stage") runner = unittest.TextTestRunner(stream=StringIO()) msg = ( "skipIfDBFeature cannot be used on <class 'test_utils.tests." "SkippingClassTestCase.test_missing_default_databases.<locals>." "MissingDatabases'> as it doesn't allow queries against the " "'default' database." ) with self.assertRaisesMessage(ValueError, msg): runner.run(suite) @override_settings(ROOT_URLCONF='test_utils.urls') class AssertNumQueriesTests(TestCase): def test_assert_num_queries(self): def test_func(): raise ValueError with self.assertRaises(ValueError): self.assertNumQueries(2, test_func) def test_assert_num_queries_with_client(self): person = Person.objects.create(name='test') self.assertNumQueries( 1, self.client.get, "/test_utils/get_person/%s/" % person.pk ) self.assertNumQueries( 1, self.client.get, "/test_utils/get_person/%s/" % person.pk ) def test_func(): self.client.get("/test_utils/get_person/%s/" % person.pk) self.client.get("/test_utils/get_person/%s/" % person.pk) self.assertNumQueries(2, test_func) @unittest.skipUnless( connection.vendor != 'sqlite' or not connection.is_in_memory_db(), 'For SQLite in-memory tests, closing the connection destroys the database.' ) class AssertNumQueriesUponConnectionTests(TransactionTestCase): available_apps = [] def test_ignores_connection_configuration_queries(self): real_ensure_connection = connection.ensure_connection connection.close() def make_configuration_query(): is_opening_connection = connection.connection is None real_ensure_connection() if is_opening_connection: # Avoid infinite recursion. Creating a cursor calls # ensure_connection() which is currently mocked by this method. with connection.cursor() as cursor: cursor.execute('SELECT 1' + connection.features.bare_select_suffix) ensure_connection = 'django.db.backends.base.base.BaseDatabaseWrapper.ensure_connection' with mock.patch(ensure_connection, side_effect=make_configuration_query): with self.assertNumQueries(1): list(Car.objects.all()) class AssertQuerysetEqualTests(TestCase): @classmethod def setUpTestData(cls): cls.p1 = Person.objects.create(name='p1') cls.p2 = Person.objects.create(name='p2') def test_empty(self): self.assertQuerysetEqual(Person.objects.filter(name='p3'), []) def test_ordered(self): self.assertQuerysetEqual( Person.objects.all().order_by('name'), [self.p1, self.p2], ) def test_unordered(self): self.assertQuerysetEqual( Person.objects.all().order_by('name'), [self.p2, self.p1], ordered=False ) def test_queryset(self): self.assertQuerysetEqual( Person.objects.all().order_by('name'), Person.objects.all().order_by('name'), ) def test_flat_values_list(self): self.assertQuerysetEqual( Person.objects.all().order_by('name').values_list('name', flat=True), ['p1', 'p2'], ) def test_transform(self): self.assertQuerysetEqual( Person.objects.all().order_by('name'), [self.p1.pk, self.p2.pk], transform=lambda x: x.pk ) def test_repr_transform(self): self.assertQuerysetEqual( Person.objects.all().order_by('name'), [repr(self.p1), repr(self.p2)], transform=repr, ) def test_undefined_order(self): # Using an unordered queryset with more than one ordered value # is an error. msg = ( 'Trying to compare non-ordered queryset against more than one ' 'ordered value.' ) with self.assertRaisesMessage(ValueError, msg): self.assertQuerysetEqual( Person.objects.all(), [self.p1, self.p2], ) # No error for one value. self.assertQuerysetEqual(Person.objects.filter(name='p1'), [self.p1]) def test_repeated_values(self): """ assertQuerysetEqual checks the number of appearance of each item when used with option ordered=False. """ batmobile = Car.objects.create(name='Batmobile') k2000 = Car.objects.create(name='K 2000') PossessedCar.objects.bulk_create([ PossessedCar(car=batmobile, belongs_to=self.p1), PossessedCar(car=batmobile, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), ]) with self.assertRaises(AssertionError): self.assertQuerysetEqual( self.p1.cars.all(), [batmobile, k2000], ordered=False ) self.assertQuerysetEqual( self.p1.cars.all(), [batmobile] * 2 + [k2000] * 4, ordered=False ) def test_maxdiff(self): names = ['Joe Smith %s' % i for i in range(20)] Person.objects.bulk_create([Person(name=name) for name in names]) names.append('Extra Person') with self.assertRaises(AssertionError) as ctx: self.assertQuerysetEqual( Person.objects.filter(name__startswith='Joe'), names, ordered=False, transform=lambda p: p.name, ) self.assertIn('Set self.maxDiff to None to see it.', str(ctx.exception)) original = self.maxDiff self.maxDiff = None try: with self.assertRaises(AssertionError) as ctx: self.assertQuerysetEqual( Person.objects.filter(name__startswith='Joe'), names, ordered=False, transform=lambda p: p.name, ) finally: self.maxDiff = original exception_msg = str(ctx.exception) self.assertNotIn('Set self.maxDiff to None to see it.', exception_msg) for name in names: self.assertIn(name, exception_msg) @override_settings(ROOT_URLCONF='test_utils.urls') class CaptureQueriesContextManagerTests(TestCase): @classmethod def setUpTestData(cls): cls.person_pk = str(Person.objects.create(name='test').pk) def test_simple(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.get(pk=self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]['sql']) with CaptureQueriesContext(connection) as captured_queries: pass self.assertEqual(0, len(captured_queries)) def test_within(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.get(pk=self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]['sql']) def test_nested(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.count() with CaptureQueriesContext(connection) as nested_captured_queries: Person.objects.count() self.assertEqual(1, len(nested_captured_queries)) self.assertEqual(2, len(captured_queries)) def test_failure(self): with self.assertRaises(TypeError): with CaptureQueriesContext(connection): raise TypeError def test_with_client(self): with CaptureQueriesContext(connection) as captured_queries: self.client.get("/test_utils/get_person/%s/" % self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]['sql']) with CaptureQueriesContext(connection) as captured_queries: self.client.get("/test_utils/get_person/%s/" % self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]['sql']) with CaptureQueriesContext(connection) as captured_queries: self.client.get("/test_utils/get_person/%s/" % self.person_pk) self.client.get("/test_utils/get_person/%s/" % self.person_pk) self.assertEqual(len(captured_queries), 2) self.assertIn(self.person_pk, captured_queries[0]['sql']) self.assertIn(self.person_pk, captured_queries[1]['sql']) @override_settings(ROOT_URLCONF='test_utils.urls') class AssertNumQueriesContextManagerTests(TestCase): def test_simple(self): with self.assertNumQueries(0): pass with self.assertNumQueries(1): Person.objects.count() with self.assertNumQueries(2): Person.objects.count() Person.objects.count() def test_failure(self): msg = ( '1 != 2 : 1 queries executed, 2 expected\nCaptured queries were:\n' '1.' ) with self.assertRaisesMessage(AssertionError, msg): with self.assertNumQueries(2): Person.objects.count() with self.assertRaises(TypeError): with self.assertNumQueries(4000): raise TypeError def test_with_client(self): person = Person.objects.create(name="test") with self.assertNumQueries(1): self.client.get("/test_utils/get_person/%s/" % person.pk) with self.assertNumQueries(1): self.client.get("/test_utils/get_person/%s/" % person.pk) with self.assertNumQueries(2): self.client.get("/test_utils/get_person/%s/" % person.pk) self.client.get("/test_utils/get_person/%s/" % person.pk) @override_settings(ROOT_URLCONF='test_utils.urls') class AssertTemplateUsedContextManagerTests(SimpleTestCase): def test_usage(self): with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/base.html') with self.assertTemplateUsed(template_name='template_used/base.html'): render_to_string('template_used/base.html') with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/include.html') with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/extends.html') with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/base.html') render_to_string('template_used/base.html') def test_nested_usage(self): with self.assertTemplateUsed('template_used/base.html'): with self.assertTemplateUsed('template_used/include.html'): render_to_string('template_used/include.html') with self.assertTemplateUsed('template_used/extends.html'): with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/extends.html') with self.assertTemplateUsed('template_used/base.html'): with self.assertTemplateUsed('template_used/alternative.html'): render_to_string('template_used/alternative.html') render_to_string('template_used/base.html') with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/extends.html') with self.assertTemplateNotUsed('template_used/base.html'): render_to_string('template_used/alternative.html') render_to_string('template_used/base.html') def test_not_used(self): with self.assertTemplateNotUsed('template_used/base.html'): pass with self.assertTemplateNotUsed('template_used/alternative.html'): pass def test_error_message(self): msg = 'No templates used to render the response' with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed('template_used/base.html'): pass with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(template_name='template_used/base.html'): pass msg2 = ( "Template 'template_used/base.html' was not a template used to render " "the response. Actual template(s) used: template_used/alternative.html" ) with self.assertRaisesMessage(AssertionError, msg2): with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/alternative.html') with self.assertRaisesMessage(AssertionError, 'No templates used to render the response'): response = self.client.get('/test_utils/no_template_used/') self.assertTemplateUsed(response, 'template_used/base.html') def test_msg_prefix(self): msg_prefix = 'Prefix' msg = f'{msg_prefix}: No templates used to render the response' with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed('template_used/base.html', msg_prefix=msg_prefix): pass with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed( template_name='template_used/base.html', msg_prefix=msg_prefix, ): pass msg = ( f"{msg_prefix}: Template 'template_used/base.html' was not a " f"template used to render the response. Actual template(s) used: " f"template_used/alternative.html" ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed('template_used/base.html', msg_prefix=msg_prefix): render_to_string('template_used/alternative.html') def test_count(self): with self.assertTemplateUsed('template_used/base.html', count=2): render_to_string('template_used/base.html') render_to_string('template_used/base.html') msg = ( "Template 'template_used/base.html' was expected to be rendered " "3 time(s) but was actually rendered 2 time(s)." ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed('template_used/base.html', count=3): render_to_string('template_used/base.html') render_to_string('template_used/base.html') def test_failure(self): msg = 'response and/or template_name argument must be provided' with self.assertRaisesMessage(TypeError, msg): with self.assertTemplateUsed(): pass msg = 'No templates used to render the response' with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(''): pass with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(''): render_to_string('template_used/base.html') with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(template_name=''): pass msg = ( "Template 'template_used/base.html' was not a template used to " "render the response. Actual template(s) used: " "template_used/alternative.html" ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed('template_used/base.html'): render_to_string('template_used/alternative.html') def test_assert_used_on_http_response(self): response = HttpResponse() msg = '%s() is only usable on responses fetched using the Django test Client.' with self.assertRaisesMessage(ValueError, msg % 'assertTemplateUsed'): self.assertTemplateUsed(response, 'template.html') with self.assertRaisesMessage(ValueError, msg % 'assertTemplateNotUsed'): self.assertTemplateNotUsed(response, 'template.html') class HTMLEqualTests(SimpleTestCase): def test_html_parser(self): element = parse_html('<div><p>Hello</p></div>') self.assertEqual(len(element.children), 1) self.assertEqual(element.children[0].name, 'p') self.assertEqual(element.children[0].children[0], 'Hello') parse_html('<p>') parse_html('<p attr>') dom = parse_html('<p>foo') self.assertEqual(len(dom.children), 1) self.assertEqual(dom.name, 'p') self.assertEqual(dom[0], 'foo') def test_parse_html_in_script(self): parse_html('<script>var a = "<p" + ">";</script>') parse_html(''' <script> var js_sha_link='<p>***</p>'; </script> ''') # script content will be parsed to text dom = parse_html(''' <script><p>foo</p> '</scr'+'ipt>' <span>bar</span></script> ''') self.assertEqual(len(dom.children), 1) self.assertEqual(dom.children[0], "<p>foo</p> '</scr'+'ipt>' <span>bar</span>") def test_self_closing_tags(self): self_closing_tags = [ 'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'link', 'meta', 'param', 'source', 'track', 'wbr', # Deprecated tags 'frame', 'spacer', ] for tag in self_closing_tags: with self.subTest(tag): dom = parse_html('<p>Hello <%s> world</p>' % tag) self.assertEqual(len(dom.children), 3) self.assertEqual(dom[0], 'Hello') self.assertEqual(dom[1].name, tag) self.assertEqual(dom[2], 'world') dom = parse_html('<p>Hello <%s /> world</p>' % tag) self.assertEqual(len(dom.children), 3) self.assertEqual(dom[0], 'Hello') self.assertEqual(dom[1].name, tag) self.assertEqual(dom[2], 'world') def test_simple_equal_html(self): self.assertHTMLEqual('', '') self.assertHTMLEqual('<p></p>', '<p></p>') self.assertHTMLEqual('<p></p>', ' <p> </p> ') self.assertHTMLEqual( '<div><p>Hello</p></div>', '<div><p>Hello</p></div>') self.assertHTMLEqual( '<div><p>Hello</p></div>', '<div> <p>Hello</p> </div>') self.assertHTMLEqual( '<div>\n<p>Hello</p></div>', '<div><p>Hello</p></div>\n') self.assertHTMLEqual( '<div><p>Hello\nWorld !</p></div>', '<div><p>Hello World\n!</p></div>') self.assertHTMLEqual( '<div><p>Hello\nWorld !</p></div>', '<div><p>Hello World\n!</p></div>') self.assertHTMLEqual( '<p>Hello World !</p>', '<p>Hello World\n\n!</p>') self.assertHTMLEqual('<p> </p>', '<p></p>') self.assertHTMLEqual('<p/>', '<p></p>') self.assertHTMLEqual('<p />', '<p></p>') self.assertHTMLEqual('<input checked>', '<input checked="checked">') self.assertHTMLEqual('<p>Hello', '<p> Hello') self.assertHTMLEqual('<p>Hello</p>World', '<p>Hello</p> World') def test_ignore_comments(self): self.assertHTMLEqual( '<div>Hello<!-- this is a comment --> World!</div>', '<div>Hello World!</div>') def test_unequal_html(self): self.assertHTMLNotEqual('<p>Hello</p>', '<p>Hello!</p>') self.assertHTMLNotEqual('<p>foo&#20;bar</p>', '<p>foo&nbsp;bar</p>') self.assertHTMLNotEqual('<p>foo bar</p>', '<p>foo &nbsp;bar</p>') self.assertHTMLNotEqual('<p>foo nbsp</p>', '<p>foo &nbsp;</p>') self.assertHTMLNotEqual('<p>foo #20</p>', '<p>foo &#20;</p>') self.assertHTMLNotEqual( '<p><span>Hello</span><span>World</span></p>', '<p><span>Hello</span>World</p>') self.assertHTMLNotEqual( '<p><span>Hello</span>World</p>', '<p><span>Hello</span><span>World</span></p>') def test_attributes(self): self.assertHTMLEqual( '<input type="text" id="id_name" />', '<input id="id_name" type="text" />') self.assertHTMLEqual( '''<input type='text' id="id_name" />''', '<input id="id_name" type="text" />') self.assertHTMLNotEqual( '<input type="text" id="id_name" />', '<input type="password" id="id_name" />') def test_class_attribute(self): pairs = [ ('<p class="foo bar"></p>', '<p class="bar foo"></p>'), ('<p class=" foo bar "></p>', '<p class="bar foo"></p>'), ('<p class=" foo bar "></p>', '<p class="bar foo"></p>'), ('<p class="foo\tbar"></p>', '<p class="bar foo"></p>'), ('<p class="\tfoo\tbar\t"></p>', '<p class="bar foo"></p>'), ('<p class="\t\t\tfoo\t\t\tbar\t\t\t"></p>', '<p class="bar foo"></p>'), ('<p class="\t \nfoo \t\nbar\n\t "></p>', '<p class="bar foo"></p>'), ] for html1, html2 in pairs: with self.subTest(html1): self.assertHTMLEqual(html1, html2) def test_boolean_attribute(self): html1 = '<input checked>' html2 = '<input checked="">' html3 = '<input checked="checked">' self.assertHTMLEqual(html1, html2) self.assertHTMLEqual(html1, html3) self.assertHTMLEqual(html2, html3) self.assertHTMLNotEqual(html1, '<input checked="invalid">') self.assertEqual(str(parse_html(html1)), '<input checked>') self.assertEqual(str(parse_html(html2)), '<input checked>') self.assertEqual(str(parse_html(html3)), '<input checked>') def test_non_boolean_attibutes(self): html1 = '<input value>' html2 = '<input value="">' html3 = '<input value="value">' self.assertHTMLEqual(html1, html2) self.assertHTMLNotEqual(html1, html3) self.assertEqual(str(parse_html(html1)), '<input value="">') self.assertEqual(str(parse_html(html2)), '<input value="">') def test_normalize_refs(self): pairs = [ ('&#39;', '&#x27;'), ('&#39;', "'"), ('&#x27;', '&#39;'), ('&#x27;', "'"), ("'", '&#39;'), ("'", '&#x27;'), ('&amp;', '&#38;'), ('&amp;', '&#x26;'), ('&amp;', '&'), ('&#38;', '&amp;'), ('&#38;', '&#x26;'), ('&#38;', '&'), ('&#x26;', '&amp;'), ('&#x26;', '&#38;'), ('&#x26;', '&'), ('&', '&amp;'), ('&', '&#38;'), ('&', '&#x26;'), ] for pair in pairs: with self.subTest(repr(pair)): self.assertHTMLEqual(*pair) def test_complex_examples(self): self.assertHTMLEqual( """<tr><th><label for="id_first_name">First name:</label></th> <td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr> <tr><th><label for="id_last_name">Last name:</label></th> <td><input type="text" id="id_last_name" name="last_name" value="Lennon" /></td></tr> <tr><th><label for="id_birthday">Birthday:</label></th> <td><input type="text" value="1940-10-9" name="birthday" id="id_birthday" /></td></tr>""", """ <tr><th> <label for="id_first_name">First name:</label></th><td> <input type="text" name="first_name" value="John" id="id_first_name" /> </td></tr> <tr><th> <label for="id_last_name">Last name:</label></th><td> <input type="text" name="last_name" value="Lennon" id="id_last_name" /> </td></tr> <tr><th> <label for="id_birthday">Birthday:</label></th><td> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /> </td></tr> """) self.assertHTMLEqual( """<!DOCTYPE html> <html> <head> <link rel="stylesheet"> <title>Document</title> <meta attribute="value"> </head> <body> <p> This is a valid paragraph <div> this is a div AFTER the p</div> </body> </html>""", """ <html> <head> <link rel="stylesheet"> <title>Document</title> <meta attribute="value"> </head> <body> <p> This is a valid paragraph <!-- browsers would close the p tag here --> <div> this is a div AFTER the p</div> </p> <!-- this is invalid HTML parsing, but it should make no difference in most cases --> </body> </html>""") def test_html_contain(self): # equal html contains each other dom1 = parse_html('<p>foo') dom2 = parse_html('<p>foo</p>') self.assertIn(dom1, dom2) self.assertIn(dom2, dom1) dom2 = parse_html('<div><p>foo</p></div>') self.assertIn(dom1, dom2) self.assertNotIn(dom2, dom1) self.assertNotIn('<p>foo</p>', dom2) self.assertIn('foo', dom2) # when a root element is used ... dom1 = parse_html('<p>foo</p><p>bar</p>') dom2 = parse_html('<p>foo</p><p>bar</p>') self.assertIn(dom1, dom2) dom1 = parse_html('<p>foo</p>') self.assertIn(dom1, dom2) dom1 = parse_html('<p>bar</p>') self.assertIn(dom1, dom2) dom1 = parse_html('<div><p>foo</p><p>bar</p></div>') self.assertIn(dom2, dom1) def test_count(self): # equal html contains each other one time dom1 = parse_html('<p>foo') dom2 = parse_html('<p>foo</p>') self.assertEqual(dom1.count(dom2), 1) self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<p>foo</p><p>bar</p>') self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<p>foo foo</p><p>foo</p>') self.assertEqual(dom2.count('foo'), 3) dom2 = parse_html('<p class="bar">foo</p>') self.assertEqual(dom2.count('bar'), 0) self.assertEqual(dom2.count('class'), 0) self.assertEqual(dom2.count('p'), 0) self.assertEqual(dom2.count('o'), 2) dom2 = parse_html('<p>foo</p><p>foo</p>') self.assertEqual(dom2.count(dom1), 2) dom2 = parse_html('<div><p>foo<input type=""></p><p>foo</p></div>') self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<div><div><p>foo</p></div></div>') self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<p>foo<p>foo</p></p>') self.assertEqual(dom2.count(dom1), 1) dom2 = parse_html('<p>foo<p>bar</p></p>') self.assertEqual(dom2.count(dom1), 0) # HTML with a root element contains the same HTML with no root element. dom1 = parse_html('<p>foo</p><p>bar</p>') dom2 = parse_html('<div><p>foo</p><p>bar</p></div>') self.assertEqual(dom2.count(dom1), 1) # Target of search is a sequence of child elements and appears more # than once. dom2 = parse_html('<div><p>foo</p><p>bar</p><p>foo</p><p>bar</p></div>') self.assertEqual(dom2.count(dom1), 2) # Searched HTML has additional children. dom1 = parse_html('<a/><b/>') dom2 = parse_html('<a/><b/><c/>') self.assertEqual(dom2.count(dom1), 1) # No match found in children. dom1 = parse_html('<b/><a/>') self.assertEqual(dom2.count(dom1), 0) # Target of search found among children and grandchildren. dom1 = parse_html('<b/><b/>') dom2 = parse_html('<a><b/><b/></a><b/><b/>') self.assertEqual(dom2.count(dom1), 2) def test_root_element_escaped_html(self): html = '&lt;br&gt;' parsed = parse_html(html) self.assertEqual(str(parsed), html) def test_parsing_errors(self): with self.assertRaises(AssertionError): self.assertHTMLEqual('<p>', '') with self.assertRaises(AssertionError): self.assertHTMLEqual('', '<p>') error_msg = ( "First argument is not valid HTML:\n" "('Unexpected end tag `div` (Line 1, Column 6)', (1, 6))" ) with self.assertRaisesMessage(AssertionError, error_msg): self.assertHTMLEqual('< div></ div>', '<div></div>') with self.assertRaises(HTMLParseError): parse_html('</p>') def test_escaped_html_errors(self): msg = ( '<p>\n<foo>\n</p>' ' != ' '<p>\n&lt;foo&gt;\n</p>\n' ) with self.assertRaisesMessage(AssertionError, msg): self.assertHTMLEqual('<p><foo></p>', '<p>&lt;foo&gt;</p>') with self.assertRaisesMessage(AssertionError, msg): self.assertHTMLEqual('<p><foo></p>', '<p>&#60;foo&#62;</p>') def test_contains_html(self): response = HttpResponse('''<body> This is a form: <form method="get"> <input type="text" name="Hello" /> </form></body>''') self.assertNotContains(response, "<input name='Hello' type='text'>") self.assertContains(response, '<form method="get">') self.assertContains(response, "<input name='Hello' type='text'>", html=True) self.assertNotContains(response, '<form method="get">', html=True) invalid_response = HttpResponse('''<body <bad>>''') with self.assertRaises(AssertionError): self.assertContains(invalid_response, '<p></p>') with self.assertRaises(AssertionError): self.assertContains(response, '<p "whats" that>') def test_unicode_handling(self): response = HttpResponse('<p class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</p>') self.assertContains( response, '<p class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</p>', html=True ) class JSONEqualTests(SimpleTestCase): def test_simple_equal(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr1": "foo", "attr2":"baz"}' self.assertJSONEqual(json1, json2) def test_simple_equal_unordered(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr2":"baz", "attr1": "foo"}' self.assertJSONEqual(json1, json2) def test_simple_equal_raise(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr2":"baz"}' with self.assertRaises(AssertionError): self.assertJSONEqual(json1, json2) def test_equal_parsing_errors(self): invalid_json = '{"attr1": "foo, "attr2":"baz"}' valid_json = '{"attr1": "foo", "attr2":"baz"}' with self.assertRaises(AssertionError): self.assertJSONEqual(invalid_json, valid_json) with self.assertRaises(AssertionError): self.assertJSONEqual(valid_json, invalid_json) def test_simple_not_equal(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr2":"baz"}' self.assertJSONNotEqual(json1, json2) def test_simple_not_equal_raise(self): json1 = '{"attr1": "foo", "attr2":"baz"}' json2 = '{"attr1": "foo", "attr2":"baz"}' with self.assertRaises(AssertionError): self.assertJSONNotEqual(json1, json2) def test_not_equal_parsing_errors(self): invalid_json = '{"attr1": "foo, "attr2":"baz"}' valid_json = '{"attr1": "foo", "attr2":"baz"}' with self.assertRaises(AssertionError): self.assertJSONNotEqual(invalid_json, valid_json) with self.assertRaises(AssertionError): self.assertJSONNotEqual(valid_json, invalid_json) class XMLEqualTests(SimpleTestCase): def test_simple_equal(self): xml1 = "<elem attr1='a' attr2='b' />" xml2 = "<elem attr1='a' attr2='b' />" self.assertXMLEqual(xml1, xml2) def test_simple_equal_unordered(self): xml1 = "<elem attr1='a' attr2='b' />" xml2 = "<elem attr2='b' attr1='a' />" self.assertXMLEqual(xml1, xml2) def test_simple_equal_raise(self): xml1 = "<elem attr1='a' />" xml2 = "<elem attr2='b' attr1='a' />" with self.assertRaises(AssertionError): self.assertXMLEqual(xml1, xml2) def test_simple_equal_raises_message(self): xml1 = "<elem attr1='a' />" xml2 = "<elem attr2='b' attr1='a' />" msg = '''{xml1} != {xml2} - <elem attr1='a' /> + <elem attr2='b' attr1='a' /> ? ++++++++++ '''.format(xml1=repr(xml1), xml2=repr(xml2)) with self.assertRaisesMessage(AssertionError, msg): self.assertXMLEqual(xml1, xml2) def test_simple_not_equal(self): xml1 = "<elem attr1='a' attr2='c' />" xml2 = "<elem attr1='a' attr2='b' />" self.assertXMLNotEqual(xml1, xml2) def test_simple_not_equal_raise(self): xml1 = "<elem attr1='a' attr2='b' />" xml2 = "<elem attr2='b' attr1='a' />" with self.assertRaises(AssertionError): self.assertXMLNotEqual(xml1, xml2) def test_parsing_errors(self): xml_unvalid = "<elem attr1='a attr2='b' />" xml2 = "<elem attr2='b' attr1='a' />" with self.assertRaises(AssertionError): self.assertXMLNotEqual(xml_unvalid, xml2) def test_comment_root(self): xml1 = "<?xml version='1.0'?><!-- comment1 --><elem attr1='a' attr2='b' />" xml2 = "<?xml version='1.0'?><!-- comment2 --><elem attr2='b' attr1='a' />" self.assertXMLEqual(xml1, xml2) def test_simple_equal_with_leading_or_trailing_whitespace(self): xml1 = "<elem>foo</elem> \t\n" xml2 = " \t\n<elem>foo</elem>" self.assertXMLEqual(xml1, xml2) def test_simple_not_equal_with_whitespace_in_the_middle(self): xml1 = "<elem>foo</elem><elem>bar</elem>" xml2 = "<elem>foo</elem> <elem>bar</elem>" self.assertXMLNotEqual(xml1, xml2) def test_doctype_root(self): xml1 = '<?xml version="1.0"?><!DOCTYPE root SYSTEM "example1.dtd"><root />' xml2 = '<?xml version="1.0"?><!DOCTYPE root SYSTEM "example2.dtd"><root />' self.assertXMLEqual(xml1, xml2) def test_processing_instruction(self): xml1 = ( '<?xml version="1.0"?>' '<?xml-model href="http://www.example1.com"?><root />' ) xml2 = ( '<?xml version="1.0"?>' '<?xml-model href="http://www.example2.com"?><root />' ) self.assertXMLEqual(xml1, xml2) self.assertXMLEqual( '<?xml-stylesheet href="style1.xslt" type="text/xsl"?><root />', '<?xml-stylesheet href="style2.xslt" type="text/xsl"?><root />', ) class SkippingExtraTests(TestCase): fixtures = ['should_not_be_loaded.json'] # HACK: This depends on internals of our TestCase subclasses def __call__(self, result=None): # Detect fixture loading by counting SQL queries, should be zero with self.assertNumQueries(0): super().__call__(result) @unittest.skip("Fixture loading should not be performed for skipped tests.") def test_fixtures_are_skipped(self): pass class AssertRaisesMsgTest(SimpleTestCase): def test_assert_raises_message(self): msg = "'Expected message' not found in 'Unexpected message'" # context manager form of assertRaisesMessage() with self.assertRaisesMessage(AssertionError, msg): with self.assertRaisesMessage(ValueError, "Expected message"): raise ValueError("Unexpected message") # callable form def func(): raise ValueError("Unexpected message") with self.assertRaisesMessage(AssertionError, msg): self.assertRaisesMessage(ValueError, "Expected message", func) def test_special_re_chars(self): """assertRaisesMessage shouldn't interpret RE special chars.""" def func1(): raise ValueError("[.*x+]y?") with self.assertRaisesMessage(ValueError, "[.*x+]y?"): func1() class AssertWarnsMessageTests(SimpleTestCase): def test_context_manager(self): with self.assertWarnsMessage(UserWarning, 'Expected message'): warnings.warn('Expected message', UserWarning) def test_context_manager_failure(self): msg = "Expected message' not found in 'Unexpected message'" with self.assertRaisesMessage(AssertionError, msg): with self.assertWarnsMessage(UserWarning, 'Expected message'): warnings.warn('Unexpected message', UserWarning) def test_callable(self): def func(): warnings.warn('Expected message', UserWarning) self.assertWarnsMessage(UserWarning, 'Expected message', func) def test_special_re_chars(self): def func1(): warnings.warn('[.*x+]y?', UserWarning) with self.assertWarnsMessage(UserWarning, '[.*x+]y?'): func1() # TODO: Remove when dropping support for PY39. class AssertNoLogsTest(SimpleTestCase): @classmethod def setUpClass(cls): super().setUpClass() logging.config.dictConfig(DEFAULT_LOGGING) cls.addClassCleanup(logging.config.dictConfig, settings.LOGGING) def setUp(self): self.logger = logging.getLogger('django') @override_settings(DEBUG=True) def test_fails_when_log_emitted(self): msg = "Unexpected logs found: ['INFO:django:FAIL!']" with self.assertRaisesMessage(AssertionError, msg): with self.assertNoLogs('django', 'INFO'): self.logger.info('FAIL!') @override_settings(DEBUG=True) def test_text_level(self): with self.assertNoLogs('django', 'INFO'): self.logger.debug('DEBUG logs are ignored.') @override_settings(DEBUG=True) def test_int_level(self): with self.assertNoLogs('django', logging.INFO): self.logger.debug('DEBUG logs are ignored.') @override_settings(DEBUG=True) def test_default_level(self): with self.assertNoLogs('django'): self.logger.debug('DEBUG logs are ignored.') @override_settings(DEBUG=True) def test_does_not_hide_other_failures(self): msg = '1 != 2' with self.assertRaisesMessage(AssertionError, msg): with self.assertNoLogs('django'): self.assertEqual(1, 2) class AssertFieldOutputTests(SimpleTestCase): def test_assert_field_output(self): error_invalid = ['Enter a valid email address.'] self.assertFieldOutput(EmailField, {'[email protected]': '[email protected]'}, {'aaa': error_invalid}) with self.assertRaises(AssertionError): self.assertFieldOutput(EmailField, {'[email protected]': '[email protected]'}, {'aaa': error_invalid + ['Another error']}) with self.assertRaises(AssertionError): self.assertFieldOutput(EmailField, {'[email protected]': 'Wrong output'}, {'aaa': error_invalid}) with self.assertRaises(AssertionError): self.assertFieldOutput( EmailField, {'[email protected]': '[email protected]'}, {'aaa': ['Come on, gimme some well formatted data, dude.']} ) def test_custom_required_message(self): class MyCustomField(IntegerField): default_error_messages = { 'required': 'This is really required.', } self.assertFieldOutput(MyCustomField, {}, {}, empty_value=None) @override_settings(ROOT_URLCONF='test_utils.urls') class AssertURLEqualTests(SimpleTestCase): def test_equal(self): valid_tests = ( ('http://example.com/?', 'http://example.com/'), ('http://example.com/?x=1&', 'http://example.com/?x=1'), ('http://example.com/?x=1&y=2', 'http://example.com/?y=2&x=1'), ('http://example.com/?x=1&y=2', 'http://example.com/?y=2&x=1'), ('http://example.com/?x=1&y=2&a=1&a=2', 'http://example.com/?a=1&a=2&y=2&x=1'), ('/path/to/?x=1&y=2&z=3', '/path/to/?z=3&y=2&x=1'), ('?x=1&y=2&z=3', '?z=3&y=2&x=1'), ('/test_utils/no_template_used/', reverse_lazy('no_template_used')), ) for url1, url2 in valid_tests: with self.subTest(url=url1): self.assertURLEqual(url1, url2) def test_not_equal(self): invalid_tests = ( # Protocol must be the same. ('http://example.com/', 'https://example.com/'), ('http://example.com/?x=1&x=2', 'https://example.com/?x=2&x=1'), ('http://example.com/?x=1&y=bar&x=2', 'https://example.com/?y=bar&x=2&x=1'), # Parameters of the same name must be in the same order. ('/path/to?a=1&a=2', '/path/to/?a=2&a=1') ) for url1, url2 in invalid_tests: with self.subTest(url=url1), self.assertRaises(AssertionError): self.assertURLEqual(url1, url2) def test_message(self): msg = ( "Expected 'http://example.com/?x=1&x=2' to equal " "'https://example.com/?x=2&x=1'" ) with self.assertRaisesMessage(AssertionError, msg): self.assertURLEqual('http://example.com/?x=1&x=2', 'https://example.com/?x=2&x=1') def test_msg_prefix(self): msg = ( "Prefix: Expected 'http://example.com/?x=1&x=2' to equal " "'https://example.com/?x=2&x=1'" ) with self.assertRaisesMessage(AssertionError, msg): self.assertURLEqual( 'http://example.com/?x=1&x=2', 'https://example.com/?x=2&x=1', msg_prefix='Prefix: ', ) class TestForm(Form): field = CharField() def clean_field(self): value = self.cleaned_data.get('field', '') if value == 'invalid': raise ValidationError('invalid value') return value def clean(self): if self.cleaned_data.get('field') == 'invalid_non_field': raise ValidationError('non-field error') return self.cleaned_data @classmethod def _get_cleaned_form(cls, field_value): form = cls({'field': field_value}) form.full_clean() return form @classmethod def valid(cls): return cls._get_cleaned_form('valid') @classmethod def invalid(cls, nonfield=False): return cls._get_cleaned_form('invalid_non_field' if nonfield else 'invalid') class TestFormset(formset_factory(TestForm)): @classmethod def _get_cleaned_formset(cls, field_value): formset = cls({ 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-0-field': field_value, }) formset.full_clean() return formset @classmethod def valid(cls): return cls._get_cleaned_formset('valid') @classmethod def invalid(cls, nonfield=False, nonform=False): if nonform: formset = cls({}, error_messages={'missing_management_form': 'error'}) formset.full_clean() return formset return cls._get_cleaned_formset('invalid_non_field' if nonfield else 'invalid') class AssertFormErrorTests(SimpleTestCase): def test_non_client_response(self): msg = ( 'assertFormError() is only usable on responses fetched using the ' 'Django test Client.' ) response = HttpResponse() with self.assertRaisesMessage(ValueError, msg): self.assertFormError(response, 'formset', 0, 'field', 'invalid value') def test_response_with_no_context(self): msg = 'Response did not use any contexts to render the response' response = mock.Mock(context=[]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'field', 'invalid value') msg_prefix = 'Custom prefix' with self.assertRaisesMessage(AssertionError, f'{msg_prefix}: {msg}'): self.assertFormError( response, 'form', 'field', 'invalid value', msg_prefix=msg_prefix, ) def test_form_not_in_context(self): msg = "The form 'form' was not used to render the response" response = mock.Mock(context=[{}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'field', 'invalid value') def test_field_not_in_form(self): msg = "The form 'form' in context 0 does not contain the field 'other_field'" response = mock.Mock(context=[{'form': TestForm.invalid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'other_field', 'invalid value') def test_field_not_in_form_multicontext(self): msg = "The form 'form' in context 1 does not contain the field 'other_field'" response = mock.Mock(context=[{}, {'form': TestForm.invalid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'other_field', 'invalid value') def test_field_with_no_errors(self): msg = "The field 'field' on form 'form' in context 0 contains no errors" response = mock.Mock(context=[{'form': TestForm.valid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'field', 'invalid value') def test_field_with_no_errors_multicontext(self): msg = "The field 'field' on form 'form' in context 1 contains no errors" response = mock.Mock(context=[{}, {'form': TestForm.valid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'field', 'invalid value') def test_field_with_different_error(self): msg = ( "The field 'field' on form 'form' in context 0 does not contain " "the error 'other error' (actual errors: ['invalid value'])" ) response = mock.Mock(context=[{'form': TestForm.invalid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'field', 'other error') def test_field_with_different_error_multicontext(self): msg = ( "The field 'field' on form 'form' in context 1 does not contain " "the error 'other error' (actual errors: ['invalid value'])" ) response = mock.Mock(context=[{}, {'form': TestForm.invalid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'field', 'other error') def test_basic_positive_assertion(self): response = mock.Mock(context=[{'form': TestForm.invalid()}]) self.assertFormError(response, 'form', 'field', 'invalid value') def test_basic_positive_assertion_multicontext(self): response = mock.Mock(context=[{}, {'form': TestForm.invalid()}]) self.assertFormError(response, 'form', 'field', 'invalid value') def test_empty_errors_unbound_form(self): response = mock.Mock(context=[{'form': TestForm()}]) self.assertFormError(response, 'form', 'field', []) def test_empty_errors_valid_form(self): response = mock.Mock(context=[{'form': TestForm.valid()}]) self.assertFormError(response, 'form', 'field', []) def test_empty_errors_invalid_form(self): response = mock.Mock(context=[{'form': TestForm.invalid()}]) self.assertFormError(response, 'form', 'field', []) def test_non_field_errors(self): response = mock.Mock(context=[{'form': TestForm.invalid(nonfield=True)}]) self.assertFormError(response, 'form', None, 'non-field error') @ignore_warnings(category=RemovedInDjango50Warning) def test_errors_none(self): response = mock.Mock(context=[{'form': TestForm.invalid()}]) self.assertFormError(response, 'form', 'field', None) def test_errors_none_warning(self): response = mock.Mock(context=[{'form': TestForm.invalid()}]) msg = ( 'Passing errors=None to assertFormError() is deprecated, use ' 'errors=[] instead.' ) with self.assertWarnsMessage(RemovedInDjango50Warning, msg): self.assertFormError(response, 'form', 'value', None) class AssertFormsetErrorTests(SimpleTestCase): def _get_formset_data(self, field_value): return { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-0-field': field_value, } def test_non_client_response(self): msg = ( 'assertFormsetError() is only usable on responses fetched using ' 'the Django test Client.' ) response = HttpResponse() with self.assertRaisesMessage(ValueError, msg): self.assertFormsetError(response, 'formset', 0, 'field', 'invalid value') def test_response_with_no_context(self): msg = 'Response did not use any contexts to render the response' response = mock.Mock(context=[]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'formset', 0, 'field', 'invalid value') def test_formset_not_in_context(self): msg = "The formset 'formset' was not used to render the response" response = mock.Mock(context=[{}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'formset', 0, 'field', 'invalid value') def test_field_not_in_form(self): msg = ( "The formset 'formset', form 0 in context 0 does not contain the " "field 'other_field'" ) response = mock.Mock(context=[{'formset': TestFormset.invalid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError( response, 'formset', 0, 'other_field', 'invalid value', ) def test_field_not_in_form_multicontext(self): msg = ( "The formset 'formset', form 0 in context 1 does not contain the " "field 'other_field'" ) response = mock.Mock(context=[{}, {'formset': TestFormset.invalid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError( response, 'formset', 0, 'other_field', 'invalid value', ) def test_field_with_no_errors(self): msg = ( "The field 'field' on formset 'formset', form 0 in context 0 " "contains no errors" ) response = mock.Mock(context=[{'formset': TestFormset.valid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'formset', 0, 'field', 'invalid value') def test_field_with_no_errors_multicontext(self): msg = ( "The field 'field' on formset 'formset', form 0 in context 1 " "contains no errors" ) response = mock.Mock(context=[{}, {'formset': TestFormset.valid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'formset', 0, 'field', 'invalid value') def test_field_with_different_error(self): msg = ( "The field 'field' on formset 'formset', form 0 in context 0 does" " not contain the error 'other error' (actual errors: ['invalid " "value'])" ) response = mock.Mock(context=[{'formset': TestFormset.invalid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'formset', 0, 'field', 'other error') def test_field_with_different_error_multicontext(self): msg = ( "The field 'field' on formset 'formset', form 0 in context 1 does" " not contain the error 'other error' (actual errors: ['invalid " "value'])" ) response = mock.Mock(context=[{}, {'formset': TestFormset.invalid()}]) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'formset', 0, 'field', 'other error') def test_basic_positive_assertion(self): response = mock.Mock(context=[{'formset': TestFormset.invalid()}]) self.assertFormsetError(response, 'formset', 0, 'field', 'invalid value') def test_basic_positive_assertion_multicontext(self): response = mock.Mock(context=[{}, {'formset': TestFormset.invalid()}]) self.assertFormsetError(response, 'formset', 0, 'field', 'invalid value') def test_empty_errors_unbound_formset(self): response = mock.Mock(context=[{'formset': TestFormset()}]) self.assertFormsetError(response, 'formset', 0, 'field', []) def test_empty_errors_valid_formset(self): response = mock.Mock(context=[{}, {'formset': TestFormset.valid()}]) self.assertFormsetError(response, 'formset', 0, 'field', []) def test_empty_errors_invalid_formset(self): response = mock.Mock(context=[{}, {'formset': TestFormset.invalid()}]) self.assertFormsetError(response, 'formset', 0, 'field', []) def test_non_field_errors(self): response = mock.Mock(context=[ {}, {'formset': TestFormset.invalid(nonfield=True)}, ]) self.assertFormsetError(response, 'formset', 0, None, 'non-field error') def test_non_form_errors(self): response = mock.Mock(context=[ {}, {'formset': TestFormset.invalid(nonform=True)}, ]) self.assertFormsetError(response, 'formset', None, None, 'error') def test_formset_named_form(self): formset = TestFormset.invalid() # The mocked context emulates the template-based rendering of the # formset. response = mock.Mock(context=[ {'form': formset}, {'form': formset.management_form}, ]) self.assertFormsetError(response, 'form', 0, 'field', 'invalid value') @ignore_warnings(category=RemovedInDjango50Warning) def test_errors_none(self): response = mock.Mock(context=[{'formset': TestFormset.invalid()}]) self.assertFormsetError(response, 'formset', 0, 'field', None) def test_errors_none_warning(self): response = mock.Mock(context=[{'formset': TestFormset.invalid()}]) msg = ( 'Passing errors=None to assertFormsetError() is deprecated, use ' 'errors=[] instead.' ) with self.assertWarnsMessage(RemovedInDjango50Warning, msg): self.assertFormsetError(response, 'formset', 0, 'field', None) class FirstUrls: urlpatterns = [path('first/', empty_response, name='first')] class SecondUrls: urlpatterns = [path('second/', empty_response, name='second')] class SetupTestEnvironmentTests(SimpleTestCase): def test_setup_test_environment_calling_more_than_once(self): with self.assertRaisesMessage(RuntimeError, "setup_test_environment() was already called"): setup_test_environment() def test_allowed_hosts(self): for type_ in (list, tuple): with self.subTest(type_=type_): allowed_hosts = type_('*') with mock.patch('django.test.utils._TestState') as x: del x.saved_data with self.settings(ALLOWED_HOSTS=allowed_hosts): setup_test_environment() self.assertEqual(settings.ALLOWED_HOSTS, ['*', 'testserver']) class OverrideSettingsTests(SimpleTestCase): # #21518 -- If neither override_settings nor a setting_changed receiver # clears the URL cache between tests, then one of test_first or # test_second will fail. @override_settings(ROOT_URLCONF=FirstUrls) def test_urlconf_first(self): reverse('first') @override_settings(ROOT_URLCONF=SecondUrls) def test_urlconf_second(self): reverse('second') def test_urlconf_cache(self): with self.assertRaises(NoReverseMatch): reverse('first') with self.assertRaises(NoReverseMatch): reverse('second') with override_settings(ROOT_URLCONF=FirstUrls): self.client.get(reverse('first')) with self.assertRaises(NoReverseMatch): reverse('second') with override_settings(ROOT_URLCONF=SecondUrls): with self.assertRaises(NoReverseMatch): reverse('first') self.client.get(reverse('second')) self.client.get(reverse('first')) with self.assertRaises(NoReverseMatch): reverse('second') with self.assertRaises(NoReverseMatch): reverse('first') with self.assertRaises(NoReverseMatch): reverse('second') def test_override_media_root(self): """ Overriding the MEDIA_ROOT setting should be reflected in the base_location attribute of django.core.files.storage.default_storage. """ self.assertEqual(default_storage.base_location, '') with self.settings(MEDIA_ROOT='test_value'): self.assertEqual(default_storage.base_location, 'test_value') def test_override_media_url(self): """ Overriding the MEDIA_URL setting should be reflected in the base_url attribute of django.core.files.storage.default_storage. """ self.assertEqual(default_storage.base_location, '') with self.settings(MEDIA_URL='/test_value/'): self.assertEqual(default_storage.base_url, '/test_value/') def test_override_file_upload_permissions(self): """ Overriding the FILE_UPLOAD_PERMISSIONS setting should be reflected in the file_permissions_mode attribute of django.core.files.storage.default_storage. """ self.assertEqual(default_storage.file_permissions_mode, 0o644) with self.settings(FILE_UPLOAD_PERMISSIONS=0o777): self.assertEqual(default_storage.file_permissions_mode, 0o777) def test_override_file_upload_directory_permissions(self): """ Overriding the FILE_UPLOAD_DIRECTORY_PERMISSIONS setting should be reflected in the directory_permissions_mode attribute of django.core.files.storage.default_storage. """ self.assertIsNone(default_storage.directory_permissions_mode) with self.settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777): self.assertEqual(default_storage.directory_permissions_mode, 0o777) def test_override_database_routers(self): """ Overriding DATABASE_ROUTERS should update the master router. """ test_routers = [object()] with self.settings(DATABASE_ROUTERS=test_routers): self.assertEqual(router.routers, test_routers) def test_override_static_url(self): """ Overriding the STATIC_URL setting should be reflected in the base_url attribute of django.contrib.staticfiles.storage.staticfiles_storage. """ with self.settings(STATIC_URL='/test/'): self.assertEqual(staticfiles_storage.base_url, '/test/') def test_override_static_root(self): """ Overriding the STATIC_ROOT setting should be reflected in the location attribute of django.contrib.staticfiles.storage.staticfiles_storage. """ with self.settings(STATIC_ROOT='/tmp/test'): self.assertEqual(staticfiles_storage.location, os.path.abspath('/tmp/test')) def test_override_staticfiles_storage(self): """ Overriding the STATICFILES_STORAGE setting should be reflected in the value of django.contrib.staticfiles.storage.staticfiles_storage. """ new_class = 'ManifestStaticFilesStorage' new_storage = 'django.contrib.staticfiles.storage.' + new_class with self.settings(STATICFILES_STORAGE=new_storage): self.assertEqual(staticfiles_storage.__class__.__name__, new_class) def test_override_staticfiles_finders(self): """ Overriding the STATICFILES_FINDERS setting should be reflected in the return value of django.contrib.staticfiles.finders.get_finders. """ current = get_finders() self.assertGreater(len(list(current)), 1) finders = ['django.contrib.staticfiles.finders.FileSystemFinder'] with self.settings(STATICFILES_FINDERS=finders): self.assertEqual(len(list(get_finders())), len(finders)) def test_override_staticfiles_dirs(self): """ Overriding the STATICFILES_DIRS setting should be reflected in the locations attribute of the django.contrib.staticfiles.finders.FileSystemFinder instance. """ finder = get_finder('django.contrib.staticfiles.finders.FileSystemFinder') test_path = '/tmp/test' expected_location = ('', test_path) self.assertNotIn(expected_location, finder.locations) with self.settings(STATICFILES_DIRS=[test_path]): finder = get_finder('django.contrib.staticfiles.finders.FileSystemFinder') self.assertIn(expected_location, finder.locations) class TestBadSetUpTestData(TestCase): """ An exception in setUpTestData() shouldn't leak a transaction which would cascade across the rest of the test suite. """ class MyException(Exception): pass @classmethod def setUpClass(cls): try: super().setUpClass() except cls.MyException: cls._in_atomic_block = connection.in_atomic_block @classmethod def tearDownClass(Cls): # override to avoid a second cls._rollback_atomics() which would fail. # Normal setUpClass() methods won't have exception handling so this # method wouldn't typically be run. pass @classmethod def setUpTestData(cls): # Simulate a broken setUpTestData() method. raise cls.MyException() def test_failure_in_setUpTestData_should_rollback_transaction(self): # setUpTestData() should call _rollback_atomics() so that the # transaction doesn't leak. self.assertFalse(self._in_atomic_block) class CaptureOnCommitCallbacksTests(TestCase): databases = {'default', 'other'} callback_called = False def enqueue_callback(self, using='default'): def hook(): self.callback_called = True transaction.on_commit(hook, using=using) def test_no_arguments(self): with self.captureOnCommitCallbacks() as callbacks: self.enqueue_callback() self.assertEqual(len(callbacks), 1) self.assertIs(self.callback_called, False) callbacks[0]() self.assertIs(self.callback_called, True) def test_using(self): with self.captureOnCommitCallbacks(using='other') as callbacks: self.enqueue_callback(using='other') self.assertEqual(len(callbacks), 1) self.assertIs(self.callback_called, False) callbacks[0]() self.assertIs(self.callback_called, True) def test_different_using(self): with self.captureOnCommitCallbacks(using='default') as callbacks: self.enqueue_callback(using='other') self.assertEqual(callbacks, []) def test_execute(self): with self.captureOnCommitCallbacks(execute=True) as callbacks: self.enqueue_callback() self.assertEqual(len(callbacks), 1) self.assertIs(self.callback_called, True) def test_pre_callback(self): def pre_hook(): pass transaction.on_commit(pre_hook, using='default') with self.captureOnCommitCallbacks() as callbacks: self.enqueue_callback() self.assertEqual(len(callbacks), 1) self.assertNotEqual(callbacks[0], pre_hook) def test_with_rolled_back_savepoint(self): with self.captureOnCommitCallbacks() as callbacks: try: with transaction.atomic(): self.enqueue_callback() raise IntegrityError except IntegrityError: # Inner transaction.atomic() has been rolled back. pass self.assertEqual(callbacks, []) def test_execute_recursive(self): with self.captureOnCommitCallbacks(execute=True) as callbacks: transaction.on_commit(self.enqueue_callback) self.assertEqual(len(callbacks), 2) self.assertIs(self.callback_called, True) def test_execute_tree(self): """ A visualisation of the callback tree tested. Each node is expected to be visited only once: └─branch_1 ├─branch_2 │ ├─leaf_1 │ └─leaf_2 └─leaf_3 """ branch_1_call_counter = 0 branch_2_call_counter = 0 leaf_1_call_counter = 0 leaf_2_call_counter = 0 leaf_3_call_counter = 0 def leaf_1(): nonlocal leaf_1_call_counter leaf_1_call_counter += 1 def leaf_2(): nonlocal leaf_2_call_counter leaf_2_call_counter += 1 def leaf_3(): nonlocal leaf_3_call_counter leaf_3_call_counter += 1 def branch_1(): nonlocal branch_1_call_counter branch_1_call_counter += 1 transaction.on_commit(branch_2) transaction.on_commit(leaf_3) def branch_2(): nonlocal branch_2_call_counter branch_2_call_counter += 1 transaction.on_commit(leaf_1) transaction.on_commit(leaf_2) with self.captureOnCommitCallbacks(execute=True) as callbacks: transaction.on_commit(branch_1) self.assertEqual(branch_1_call_counter, 1) self.assertEqual(branch_2_call_counter, 1) self.assertEqual(leaf_1_call_counter, 1) self.assertEqual(leaf_2_call_counter, 1) self.assertEqual(leaf_3_call_counter, 1) self.assertEqual(callbacks, [branch_1, branch_2, leaf_3, leaf_1, leaf_2]) class DisallowedDatabaseQueriesTests(SimpleTestCase): def test_disallowed_database_connections(self): expected_message = ( "Database connections to 'default' are not allowed in SimpleTestCase " "subclasses. Either subclass TestCase or TransactionTestCase to " "ensure proper test isolation or add 'default' to " "test_utils.tests.DisallowedDatabaseQueriesTests.databases to " "silence this failure." ) with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message): connection.connect() with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message): connection.temporary_connection() def test_disallowed_database_queries(self): expected_message = ( "Database queries to 'default' are not allowed in SimpleTestCase " "subclasses. Either subclass TestCase or TransactionTestCase to " "ensure proper test isolation or add 'default' to " "test_utils.tests.DisallowedDatabaseQueriesTests.databases to " "silence this failure." ) with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message): Car.objects.first() def test_disallowed_database_chunked_cursor_queries(self): expected_message = ( "Database queries to 'default' are not allowed in SimpleTestCase " "subclasses. Either subclass TestCase or TransactionTestCase to " "ensure proper test isolation or add 'default' to " "test_utils.tests.DisallowedDatabaseQueriesTests.databases to " "silence this failure." ) with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message): next(Car.objects.iterator()) class AllowedDatabaseQueriesTests(SimpleTestCase): databases = {'default'} def test_allowed_database_queries(self): Car.objects.first() def test_allowed_database_chunked_cursor_queries(self): next(Car.objects.iterator(), None) class DatabaseAliasTests(SimpleTestCase): def setUp(self): self.addCleanup(setattr, self.__class__, 'databases', self.databases) def test_no_close_match(self): self.__class__.databases = {'void'} message = ( "test_utils.tests.DatabaseAliasTests.databases refers to 'void' which is not defined " "in settings.DATABASES." ) with self.assertRaisesMessage(ImproperlyConfigured, message): self._validate_databases() def test_close_match(self): self.__class__.databases = {'defualt'} message = ( "test_utils.tests.DatabaseAliasTests.databases refers to 'defualt' which is not defined " "in settings.DATABASES. Did you mean 'default'?" ) with self.assertRaisesMessage(ImproperlyConfigured, message): self._validate_databases() def test_match(self): self.__class__.databases = {'default', 'other'} self.assertEqual(self._validate_databases(), frozenset({'default', 'other'})) def test_all(self): self.__class__.databases = '__all__' self.assertEqual(self._validate_databases(), frozenset(connections)) @isolate_apps('test_utils', attr_name='class_apps') class IsolatedAppsTests(SimpleTestCase): def test_installed_apps(self): self.assertEqual([app_config.label for app_config in self.class_apps.get_app_configs()], ['test_utils']) def test_class_decoration(self): class ClassDecoration(models.Model): pass self.assertEqual(ClassDecoration._meta.apps, self.class_apps) @isolate_apps('test_utils', kwarg_name='method_apps') def test_method_decoration(self, method_apps): class MethodDecoration(models.Model): pass self.assertEqual(MethodDecoration._meta.apps, method_apps) def test_context_manager(self): with isolate_apps('test_utils') as context_apps: class ContextManager(models.Model): pass self.assertEqual(ContextManager._meta.apps, context_apps) @isolate_apps('test_utils', kwarg_name='method_apps') def test_nested(self, method_apps): class MethodDecoration(models.Model): pass with isolate_apps('test_utils') as context_apps: class ContextManager(models.Model): pass with isolate_apps('test_utils') as nested_context_apps: class NestedContextManager(models.Model): pass self.assertEqual(MethodDecoration._meta.apps, method_apps) self.assertEqual(ContextManager._meta.apps, context_apps) self.assertEqual(NestedContextManager._meta.apps, nested_context_apps) class DoNothingDecorator(TestContextDecorator): def enable(self): pass def disable(self): pass class TestContextDecoratorTests(SimpleTestCase): @mock.patch.object(DoNothingDecorator, 'disable') def test_exception_in_setup(self, mock_disable): """An exception is setUp() is reraised after disable() is called.""" class ExceptionInSetUp(unittest.TestCase): def setUp(self): raise NotImplementedError('reraised') decorator = DoNothingDecorator() decorated_test_class = decorator.__call__(ExceptionInSetUp)() self.assertFalse(mock_disable.called) with self.assertRaisesMessage(NotImplementedError, 'reraised'): decorated_test_class.setUp() decorated_test_class.doCleanups() self.assertTrue(mock_disable.called) def test_cleanups_run_after_tearDown(self): calls = [] class SaveCallsDecorator(TestContextDecorator): def enable(self): calls.append('enable') def disable(self): calls.append('disable') class AddCleanupInSetUp(unittest.TestCase): def setUp(self): calls.append('setUp') self.addCleanup(lambda: calls.append('cleanup')) decorator = SaveCallsDecorator() decorated_test_class = decorator.__call__(AddCleanupInSetUp)() decorated_test_class.setUp() decorated_test_class.tearDown() decorated_test_class.doCleanups() self.assertEqual(calls, ['enable', 'setUp', 'cleanup', 'disable'])
a87bc98d7a176475270a68820be82efc3efb6b8f25dbce7c2d9ec05db57869cb
from django.db import models class Car(models.Model): name = models.CharField(max_length=100) class Person(models.Model): name = models.CharField(max_length=100) cars = models.ManyToManyField(Car, through='PossessedCar') data = models.BinaryField(null=True) class PossessedCar(models.Model): car = models.ForeignKey(Car, models.CASCADE) belongs_to = models.ForeignKey(Person, models.CASCADE, related_name='possessed_cars')
d56ef6bc1144acbb3ce95999d9700107df0d4f071d61e11466dddd9632ce3dab
from unittest import mock from django.db import connections from django.test import TestCase, TransactionTestCase, override_settings from django.test.testcases import DatabaseOperationForbidden from .models import Car class TestSerializedRollbackInhibitsPostMigrate(TransactionTestCase): """ TransactionTestCase._fixture_teardown() inhibits the post_migrate signal for test classes with serialized_rollback=True. """ available_apps = ['test_utils'] serialized_rollback = True def setUp(self): # self.available_apps must be None to test the serialized_rollback # condition. self.available_apps = None def tearDown(self): self.available_apps = ['test_utils'] @mock.patch('django.test.testcases.call_command') def test(self, call_command): # with a mocked call_command(), this doesn't have any effect. self._fixture_teardown() call_command.assert_called_with( 'flush', interactive=False, allow_cascade=False, reset_sequences=False, inhibit_post_migrate=True, database='default', verbosity=0, ) @override_settings(DEBUG=True) # Enable query logging for test_queries_cleared class TransactionTestCaseDatabasesTests(TestCase): available_apps = [] databases = {'default', 'other'} def test_queries_cleared(self): """ TransactionTestCase._pre_setup() clears the connections' queries_log so that it's less likely to overflow. An overflow causes assertNumQueries() to fail. """ for alias in self.databases: self.assertEqual(len(connections[alias].queries_log), 0, 'Failed for alias %s' % alias) class DisallowedDatabaseQueriesTests(TransactionTestCase): available_apps = ['test_utils'] def test_disallowed_database_queries(self): message = ( "Database queries to 'other' are not allowed in this test. " "Add 'other' to test_utils.test_transactiontestcase." "DisallowedDatabaseQueriesTests.databases to ensure proper test " "isolation and silence this failure." ) with self.assertRaisesMessage(DatabaseOperationForbidden, message): Car.objects.using('other').get()
1c2a459e2b635d8a605970bfb364a803c184894334d41b9f2cfce82c0c54f137
import datetime import os import re import unittest from unittest import mock from urllib.parse import parse_qsl, urljoin, urlparse try: import zoneinfo except ImportError: from backports import zoneinfo try: import pytz except ImportError: pytz = None from django.contrib import admin from django.contrib.admin import AdminSite, ModelAdmin from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME from django.contrib.admin.models import ADDITION, DELETION, LogEntry from django.contrib.admin.options import TO_FIELD_VAR from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.admin.utils import quote from django.contrib.admin.views.main import IS_POPUP_VAR from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename from django.contrib.auth.models import Group, Permission, User from django.contrib.contenttypes.models import ContentType from django.core import mail from django.core.checks import Error from django.core.files import temp as tempfile from django.forms.utils import ErrorList from django.template.response import TemplateResponse from django.test import ( TestCase, modify_settings, override_settings, skipUnlessDBFeature, ) from django.test.utils import override_script_prefix from django.urls import NoReverseMatch, resolve, reverse from django.utils import formats, translation from django.utils.cache import get_max_age from django.utils.encoding import iri_to_uri from django.utils.html import escape from django.utils.http import urlencode from . import customadmin from .admin import CityAdmin, site, site2 from .models import ( Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField, AdminOrderedModelMethod, Album, Answer, Answer2, Article, BarAccount, Book, Bookmark, Box, Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child, Choice, City, Collector, Color, ComplexSortedPerson, CoverLetter, CustomArticle, CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel, Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount, FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link, MainPrepopulated, Media, ModelWithStringPrimaryKey, OtherStory, Paper, Parent, ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona, Picture, Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post, PrePopulatedPost, Promo, Question, ReadablePizza, ReadOnlyPizza, ReadOnlyRelatedField, Recommendation, Recommender, RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Restaurant, RowLevelChangePermissionModel, SecretHideout, Section, ShortMessage, Simple, Song, State, Story, SuperSecretHideout, SuperVillain, Telegram, TitleTranslation, Topping, UnchangeableObject, UndeletableObject, UnorderedObject, UserProxy, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour, ) ERROR_MESSAGE = "Please enter the correct username and password \ for a staff account. Note that both fields may be case-sensitive." MULTIPART_ENCTYPE = 'enctype="multipart/form-data"' def make_aware_datetimes(dt, iana_key): """Makes one aware datetime for each supported time zone provider.""" yield dt.replace(tzinfo=zoneinfo.ZoneInfo(iana_key)) if pytz is not None: yield pytz.timezone(iana_key).localize(dt, is_dst=None) class AdminFieldExtractionMixin: """ Helper methods for extracting data from AdminForm. """ def get_admin_form_fields(self, response): """ Return a list of AdminFields for the AdminForm in the response. """ fields = [] for fieldset in response.context['adminform']: for field_line in fieldset: fields.extend(field_line) return fields def get_admin_readonly_fields(self, response): """ Return the readonly fields for the response's AdminForm. """ return [f for f in self.get_admin_form_fields(response) if f.is_readonly] def get_admin_readonly_field(self, response, field_name): """ Return the readonly field for the given field_name. """ admin_readonly_fields = self.get_admin_readonly_fields(response) for field in admin_readonly_fields: if field.field['name'] == field_name: return field @override_settings(ROOT_URLCONF='admin_views.urls', USE_I18N=True, LANGUAGE_CODE='en') class AdminViewBasicTestCase(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1, title='Article 1', ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1, title='Article 2', ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.color1 = Color.objects.create(value='Red', warm=True) cls.color2 = Color.objects.create(value='Orange', warm=True) cls.color3 = Color.objects.create(value='Blue', warm=False) cls.color4 = Color.objects.create(value='Green', warm=False) cls.fab1 = Fabric.objects.create(surface='x') cls.fab2 = Fabric.objects.create(surface='y') cls.fab3 = Fabric.objects.create(surface='plain') cls.b1 = Book.objects.create(name='Book 1') cls.b2 = Book.objects.create(name='Book 2') cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1) cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2) cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1) cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1) cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2) cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2) cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1') cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2') Actor.objects.create(name='Palin', age=27) # Post data for edit inline cls.inline_post_data = { "name": "Test section", # inline data "article_set-TOTAL_FORMS": "6", "article_set-INITIAL_FORMS": "3", "article_set-MAX_NUM_FORMS": "0", "article_set-0-id": cls.a1.pk, # there is no title in database, give one here or formset will fail. "article_set-0-title": "Norske bostaver æøå skaper problemer", "article_set-0-content": "&lt;p&gt;Middle content&lt;/p&gt;", "article_set-0-date_0": "2008-03-18", "article_set-0-date_1": "11:54:58", "article_set-0-section": cls.s1.pk, "article_set-1-id": cls.a2.pk, "article_set-1-title": "Need a title.", "article_set-1-content": "&lt;p&gt;Oldest content&lt;/p&gt;", "article_set-1-date_0": "2000-03-18", "article_set-1-date_1": "11:54:58", "article_set-2-id": cls.a3.pk, "article_set-2-title": "Need a title.", "article_set-2-content": "&lt;p&gt;Newest content&lt;/p&gt;", "article_set-2-date_0": "2009-03-18", "article_set-2-date_1": "11:54:58", "article_set-3-id": "", "article_set-3-title": "", "article_set-3-content": "", "article_set-3-date_0": "", "article_set-3-date_1": "", "article_set-4-id": "", "article_set-4-title": "", "article_set-4-content": "", "article_set-4-date_0": "", "article_set-4-date_1": "", "article_set-5-id": "", "article_set-5-title": "", "article_set-5-content": "", "article_set-5-date_0": "", "article_set-5-date_1": "", } def setUp(self): self.client.force_login(self.superuser) def assertContentBefore(self, response, text1, text2, failing_msg=None): """ Testing utility asserting that text1 appears before text2 in response content. """ self.assertEqual(response.status_code, 200) self.assertLess( response.content.index(text1.encode()), response.content.index(text2.encode()), (failing_msg or '') + '\nResponse:\n' + response.content.decode(response.charset) ) class AdminViewBasicTest(AdminViewBasicTestCase): def test_trailing_slash_required(self): """ If you leave off the trailing slash, app should redirect and add it. """ add_url = reverse('admin:admin_views_article_add') response = self.client.get(add_url[:-1]) self.assertRedirects(response, add_url, status_code=301) def test_basic_add_GET(self): """ A smoke test to ensure GET on the add_view works. """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) def test_add_with_GET_args(self): response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'}) self.assertContains( response, 'value="My Section"', msg_prefix="Couldn't find an input with the right value in the response" ) def test_basic_edit_GET(self): """ A smoke test to ensure GET on the change_view works. """ response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) def test_basic_edit_GET_string_PK(self): """ GET on the change_view (when passing a string as the PK argument for a model with an integer PK field) redirects to the index page with a message saying the object doesn't exist. """ response = self.client.get(reverse('admin:admin_views_section_change', args=(quote("abc/<b>"),)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['section with ID “abc/<b>” doesn’t exist. Perhaps it was deleted?'] ) def test_basic_edit_GET_old_url_redirect(self): """ The change URL changed in Django 1.9, but the old one still redirects. """ response = self.client.get( reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '') ) self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,))) def test_basic_inheritance_GET_string_PK(self): """ GET on the change_view (for inherited models) redirects to the index page with a message saying the object doesn't exist. """ response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['super villain with ID “abc” doesn’t exist. Perhaps it was deleted?'] ) def test_basic_add_POST(self): """ A smoke test to ensure POST on add_view works. """ post_data = { "name": "Another Section", # inline data "article_set-TOTAL_FORMS": "3", "article_set-INITIAL_FORMS": "0", "article_set-MAX_NUM_FORMS": "0", } response = self.client.post(reverse('admin:admin_views_section_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_popup_add_POST(self): """HTTP response from a popup is properly escaped.""" post_data = { IS_POPUP_VAR: '1', 'title': 'title with a new\nline', 'content': 'some content', 'date_0': '2010-09-10', 'date_1': '14:55:39', } response = self.client.post(reverse('admin:admin_views_article_add'), post_data) self.assertContains(response, 'title with a new\\nline') def test_basic_edit_POST(self): """ A smoke test to ensure POST on edit_view works. """ url = reverse('admin:admin_views_section_change', args=(self.s1.pk,)) response = self.client.post(url, self.inline_post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_edit_save_as(self): """ Test "save as". """ post_data = self.inline_post_data.copy() post_data.update({ '_saveasnew': 'Save+as+new', "article_set-1-section": "1", "article_set-2-section": "1", "article_set-3-section": "1", "article_set-4-section": "1", "article_set-5-section": "1", }) response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_edit_save_as_delete_inline(self): """ Should be able to "Save as new" while also deleting an inline. """ post_data = self.inline_post_data.copy() post_data.update({ '_saveasnew': 'Save+as+new', "article_set-1-section": "1", "article_set-2-section": "1", "article_set-2-DELETE": "1", "article_set-3-section": "1", }) response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data) self.assertEqual(response.status_code, 302) # started with 3 articles, one was deleted. self.assertEqual(Section.objects.latest('id').article_set.count(), 2) def test_change_list_column_field_classes(self): response = self.client.get(reverse('admin:admin_views_article_changelist')) # callables display the callable name. self.assertContains(response, 'column-callable_year') self.assertContains(response, 'field-callable_year') # lambdas display as "lambda" + index that they appear in list_display. self.assertContains(response, 'column-lambda8') self.assertContains(response, 'field-lambda8') def test_change_list_sorting_callable(self): """ Ensure we can sort on a list_display field that is a callable (column 2 is callable_year in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2}) self.assertContentBefore( response, 'Oldest content', 'Middle content', "Results of sorting on callable are out of order." ) self.assertContentBefore( response, 'Middle content', 'Newest content', "Results of sorting on callable are out of order." ) def test_change_list_sorting_property(self): """ Sort on a list_display field that is a property (column 10 is a property in Article model). """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 10}) self.assertContentBefore( response, 'Oldest content', 'Middle content', 'Results of sorting on property are out of order.', ) self.assertContentBefore( response, 'Middle content', 'Newest content', 'Results of sorting on property are out of order.', ) def test_change_list_sorting_callable_query_expression(self): """Query expressions may be used for admin_order_field.""" tests = [ ('order_by_expression', 9), ('order_by_f_expression', 12), ('order_by_orderby_expression', 13), ] for admin_order_field, index in tests: with self.subTest(admin_order_field): response = self.client.get( reverse('admin:admin_views_article_changelist'), {'o': index}, ) self.assertContentBefore( response, 'Oldest content', 'Middle content', 'Results of sorting on callable are out of order.' ) self.assertContentBefore( response, 'Middle content', 'Newest content', 'Results of sorting on callable are out of order.' ) def test_change_list_sorting_callable_query_expression_reverse(self): tests = [ ('order_by_expression', -9), ('order_by_f_expression', -12), ('order_by_orderby_expression', -13), ] for admin_order_field, index in tests: with self.subTest(admin_order_field): response = self.client.get( reverse('admin:admin_views_article_changelist'), {'o': index}, ) self.assertContentBefore( response, 'Middle content', 'Oldest content', 'Results of sorting on callable are out of order.' ) self.assertContentBefore( response, 'Newest content', 'Middle content', 'Results of sorting on callable are out of order.' ) def test_change_list_sorting_model(self): """ Ensure we can sort on a list_display field that is a Model method (column 3 is 'model_year' in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'}) self.assertContentBefore( response, 'Newest content', 'Middle content', "Results of sorting on Model method are out of order." ) self.assertContentBefore( response, 'Middle content', 'Oldest content', "Results of sorting on Model method are out of order." ) def test_change_list_sorting_model_admin(self): """ Ensure we can sort on a list_display field that is a ModelAdmin method (column 4 is 'modeladmin_year' in ArticleAdmin) """ response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'}) self.assertContentBefore( response, 'Oldest content', 'Middle content', "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, 'Middle content', 'Newest content', "Results of sorting on ModelAdmin method are out of order." ) def test_change_list_sorting_model_admin_reverse(self): """ Ensure we can sort on a list_display field that is a ModelAdmin method in reverse order (i.e. admin_order_field uses the '-' prefix) (column 6 is 'model_year_reverse' in ArticleAdmin) """ td = '<td class="field-model_property_year">%s</td>' td_2000, td_2008, td_2009 = td % 2000, td % 2008, td % 2009 response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'}) self.assertContentBefore( response, td_2009, td_2008, "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, td_2008, td_2000, "Results of sorting on ModelAdmin method are out of order." ) # Let's make sure the ordering is right and that we don't get a # FieldError when we change to descending order response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'}) self.assertContentBefore( response, td_2000, td_2008, "Results of sorting on ModelAdmin method are out of order." ) self.assertContentBefore( response, td_2008, td_2009, "Results of sorting on ModelAdmin method are out of order." ) def test_change_list_sorting_multiple(self): p1 = Person.objects.create(name="Chris", gender=1, alive=True) p2 = Person.objects.create(name="Chris", gender=2, alive=True) p3 = Person.objects.create(name="Bob", gender=1, alive=True) link1 = reverse('admin:admin_views_person_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_person_change', args=(p2.pk,)) link3 = reverse('admin:admin_views_person_change', args=(p3.pk,)) # Sort by name, gender response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'}) self.assertContentBefore(response, link3, link1) self.assertContentBefore(response, link1, link2) # Sort by gender descending, name response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'}) self.assertContentBefore(response, link2, link3) self.assertContentBefore(response, link3, link1) def test_change_list_sorting_preserve_queryset_ordering(self): """ If no ordering is defined in `ModelAdmin.ordering` or in the query string, then the underlying order of the queryset should not be changed, even if it is defined in `Modeladmin.get_queryset()`. Refs #11868, #7309. """ p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80) p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70) p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60) link1 = reverse('admin:admin_views_person_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_person_change', args=(p2.pk,)) link3 = reverse('admin:admin_views_person_change', args=(p3.pk,)) response = self.client.get(reverse('admin:admin_views_person_changelist'), {}) self.assertContentBefore(response, link3, link2) self.assertContentBefore(response, link2, link1) def test_change_list_sorting_model_meta(self): # Test ordering on Model Meta is respected l1 = Language.objects.create(iso='ur', name='Urdu') l2 = Language.objects.create(iso='ar', name='Arabic') link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),)) link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),)) response = self.client.get(reverse('admin:admin_views_language_changelist'), {}) self.assertContentBefore(response, link2, link1) # Test we can override with query string response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'}) self.assertContentBefore(response, link1, link2) def test_change_list_sorting_override_model_admin(self): # Test ordering on Model Admin is respected, and overrides Model Meta dt = datetime.datetime.now() p1 = Podcast.objects.create(name="A", release_date=dt) p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10)) link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,)) response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {}) self.assertContentBefore(response, link1, link2) def test_multiple_sort_same_field(self): # The changelist displays the correct columns if two columns correspond # to the same ordering field. dt = datetime.datetime.now() p1 = Podcast.objects.create(name="A", release_date=dt) p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10)) link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),)) link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),)) response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {}) self.assertContentBefore(response, link1, link2) p1 = ComplexSortedPerson.objects.create(name="Bob", age=10) p2 = ComplexSortedPerson.objects.create(name="Amy", age=20) link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,)) link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,)) response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {}) # Should have 5 columns (including action checkbox col) self.assertContains(response, '<th scope="col"', count=5) self.assertContains(response, 'Name') self.assertContains(response, 'Colored name') # Check order self.assertContentBefore(response, 'Name', 'Colored name') # Check sorting - should be by name self.assertContentBefore(response, link2, link1) def test_sort_indicators_admin_order(self): """ The admin shows default sort indicators for all kinds of 'ordering' fields: field names, method on the model admin and model itself, and other callables. See #17252. """ models = [(AdminOrderedField, 'adminorderedfield'), (AdminOrderedModelMethod, 'adminorderedmodelmethod'), (AdminOrderedAdminMethod, 'adminorderedadminmethod'), (AdminOrderedCallable, 'adminorderedcallable')] for model, url in models: model.objects.create(stuff='The Last Item', order=3) model.objects.create(stuff='The First Item', order=1) model.objects.create(stuff='The Middle Item', order=2) response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {}) # Should have 3 columns including action checkbox col. self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url) # Check if the correct column was selected. 2 is the index of the # 'order' column in the model admin's 'list_display' with 0 being # the implicit 'action_checkbox' and 1 being the column 'stuff'. self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'}) # Check order of records. self.assertContentBefore(response, 'The First Item', 'The Middle Item') self.assertContentBefore(response, 'The Middle Item', 'The Last Item') def test_has_related_field_in_list_display_fk(self): """Joins shouldn't be performed for <FK>_id fields in list display.""" state = State.objects.create(name='Karnataka') City.objects.create(state=state, name='Bangalore') response = self.client.get(reverse('admin:admin_views_city_changelist'), {}) response.context['cl'].list_display = ['id', 'name', 'state'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), True) response.context['cl'].list_display = ['id', 'name', 'state_id'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), False) def test_has_related_field_in_list_display_o2o(self): """Joins shouldn't be performed for <O2O>_id fields in list display.""" media = Media.objects.create(name='Foo') Vodcast.objects.create(media=media) response = self.client.get(reverse('admin:admin_views_vodcast_changelist'), {}) response.context['cl'].list_display = ['media'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), True) response.context['cl'].list_display = ['media_id'] self.assertIs(response.context['cl'].has_related_field_in_list_display(), False) def test_limited_filter(self): """Ensure admin changelist filters do not contain objects excluded via limit_choices_to. This also tests relation-spanning filters (e.g. 'color__value'). """ response = self.client.get(reverse('admin:admin_views_thing_changelist')) self.assertContains( response, '<div id="changelist-filter">', msg_prefix="Expected filter not found in changelist view" ) self.assertNotContains( response, '<a href="?color__id__exact=3">Blue</a>', msg_prefix="Changelist filter not correctly limited by limit_choices_to" ) def test_relation_spanning_filters(self): changelist_url = reverse('admin:admin_views_chapterxtra1_changelist') response = self.client.get(changelist_url) self.assertContains(response, '<div id="changelist-filter">') filters = { 'chap__id__exact': { 'values': [c.id for c in Chapter.objects.all()], 'test': lambda obj, value: obj.chap.id == value, }, 'chap__title': { 'values': [c.title for c in Chapter.objects.all()], 'test': lambda obj, value: obj.chap.title == value, }, 'chap__book__id__exact': { 'values': [b.id for b in Book.objects.all()], 'test': lambda obj, value: obj.chap.book.id == value, }, 'chap__book__name': { 'values': [b.name for b in Book.objects.all()], 'test': lambda obj, value: obj.chap.book.name == value, }, 'chap__book__promo__id__exact': { 'values': [p.id for p in Promo.objects.all()], 'test': lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists(), }, 'chap__book__promo__name': { 'values': [p.name for p in Promo.objects.all()], 'test': lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists(), }, # A forward relation (book) after a reverse relation (promo). 'guest_author__promo__book__id__exact': { 'values': [p.id for p in Book.objects.all()], 'test': lambda obj, value: obj.guest_author.promo_set.filter(book=value).exists(), }, } for filter_path, params in filters.items(): for value in params['values']: query_string = urlencode({filter_path: value}) # ensure filter link exists self.assertContains(response, '<a href="?%s"' % query_string) # ensure link works filtered_response = self.client.get('%s?%s' % (changelist_url, query_string)) self.assertEqual(filtered_response.status_code, 200) # ensure changelist contains only valid objects for obj in filtered_response.context['cl'].queryset.all(): self.assertTrue(params['test'](obj, value)) def test_incorrect_lookup_parameters(self): """Ensure incorrect lookup parameters are handled gracefully.""" changelist_url = reverse('admin:admin_views_thing_changelist') response = self.client.get(changelist_url, {'notarealfield': '5'}) self.assertRedirects(response, '%s?e=1' % changelist_url) # Spanning relationships through a nonexistent related object (Refs #16716) response = self.client.get(changelist_url, {'notarealfield__whatever': '5'}) self.assertRedirects(response, '%s?e=1' % changelist_url) response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'}) self.assertRedirects(response, '%s?e=1' % changelist_url) # Regression test for #18530 response = self.client.get(changelist_url, {'pub_date__gte': 'foo'}) self.assertRedirects(response, '%s?e=1' % changelist_url) def test_isnull_lookups(self): """Ensure is_null is handled correctly.""" Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now()) changelist_url = reverse('admin:admin_views_article_changelist') response = self.client.get(changelist_url) self.assertContains(response, '4 articles') response = self.client.get(changelist_url, {'section__isnull': 'false'}) self.assertContains(response, '3 articles') response = self.client.get(changelist_url, {'section__isnull': '0'}) self.assertContains(response, '3 articles') response = self.client.get(changelist_url, {'section__isnull': 'true'}) self.assertContains(response, '1 article') response = self.client.get(changelist_url, {'section__isnull': '1'}) self.assertContains(response, '1 article') def test_logout_and_password_change_URLs(self): response = self.client.get(reverse('admin:admin_views_article_changelist')) self.assertContains(response, '<a href="%s">' % reverse('admin:logout')) self.assertContains(response, '<a href="%s">' % reverse('admin:password_change')) def test_named_group_field_choices_change_list(self): """ Ensures the admin changelist shows correct values in the relevant column for rows corresponding to instances of a model in which a named group has been used in the choices option of a field. """ link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,)) link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,)) response = self.client.get(reverse('admin:admin_views_fabric_changelist')) fail_msg = ( "Changelist table isn't showing the right human-readable values " "set by a model field 'choices' option named group." ) self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True) self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True) def test_named_group_field_choices_filter(self): """ Ensures the filter UI shows correctly when at least one named group has been used in the choices option of a model field. """ response = self.client.get(reverse('admin:admin_views_fabric_changelist')) fail_msg = ( "Changelist filter isn't showing options contained inside a model " "field 'choices' option named group." ) self.assertContains(response, '<div id="changelist-filter">') self.assertContains( response, '<a href="?surface__exact=x">Horizontal</a>', msg_prefix=fail_msg, html=True ) self.assertContains( response, '<a href="?surface__exact=y">Vertical</a>', msg_prefix=fail_msg, html=True ) def test_change_list_null_boolean_display(self): Post.objects.create(public=None) response = self.client.get(reverse('admin:admin_views_post_changelist')) self.assertContains(response, 'icon-unknown.svg') def test_display_decorator_with_boolean_and_empty_value(self): msg = ( 'The boolean and empty_value arguments to the @display decorator ' 'are mutually exclusive.' ) with self.assertRaisesMessage(ValueError, msg): class BookAdmin(admin.ModelAdmin): @admin.display(boolean=True, empty_value='(Missing)') def is_published(self, obj): return obj.publish_date is not None def test_i18n_language_non_english_default(self): """ Check if the JavaScript i18n view returns an empty language catalog if the default language is non-English but the selected language is English. See #13388 and #3594 for more details. """ with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'): response = self.client.get(reverse('admin:jsi18n')) self.assertNotContains(response, 'Choisir une heure') def test_i18n_language_non_english_fallback(self): """ Makes sure that the fallback language is still working properly in cases where the selected language cannot be found. """ with self.settings(LANGUAGE_CODE='fr'), translation.override('none'): response = self.client.get(reverse('admin:jsi18n')) self.assertContains(response, 'Choisir une heure') def test_jsi18n_with_context(self): response = self.client.get(reverse('admin-extra-context:jsi18n')) self.assertEqual(response.status_code, 200) def test_jsi18n_format_fallback(self): """ The JavaScript i18n view doesn't return localized date/time formats when the selected language cannot be found. """ with self.settings(LANGUAGE_CODE='ru'), translation.override('none'): response = self.client.get(reverse('admin:jsi18n')) self.assertNotContains(response, '%d.%m.%Y %H:%M:%S') self.assertContains(response, '%Y-%m-%d %H:%M:%S') def test_disallowed_filtering(self): with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'): response = self.client.get( "%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist') ) self.assertEqual(response.status_code, 400) # Filters are allowed if explicitly included in list_filter response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist')) self.assertEqual(response.status_code, 200) response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist')) self.assertEqual(response.status_code, 200) # Filters should be allowed if they involve a local field without the # need to allow them in list_filter or date_hierarchy. response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist')) self.assertEqual(response.status_code, 200) e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123') e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124') WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1) WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2) response = self.client.get(reverse('admin:admin_views_workhour_changelist')) self.assertContains(response, 'employee__person_ptr__exact') response = self.client.get("%s?employee__person_ptr__exact=%d" % ( reverse('admin:admin_views_workhour_changelist'), e1.pk) ) self.assertEqual(response.status_code, 200) def test_disallowed_to_field(self): url = reverse('admin:admin_views_section_changelist') with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'}) self.assertEqual(response.status_code, 400) # Specifying a field that is not referred by any other model registered # to this admin site should raise an exception. with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) # #23839 - Primary key should always be allowed, even if the referenced model isn't registered. response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'}) self.assertEqual(response.status_code, 200) # #23915 - Specifying a field referenced by another model though a m2m should be allowed. response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'}) self.assertEqual(response.status_code, 200) # #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed. response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'}) self.assertEqual(response.status_code, 200) # #23329 - Specifying a field that is not referred by any other model directly registered # to this admin site but registered through inheritance should be allowed. response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 200) # #23431 - Specifying a field that is only referred to by a inline of a registered # model should be allowed. response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 200) # #25622 - Specifying a field of a model only referred by a generic # relation should raise DisallowedModelAdminToField. url = reverse('admin:admin_views_referencedbygenrel_changelist') with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.get(url, {TO_FIELD_VAR: 'object_id'}) self.assertEqual(response.status_code, 400) # We also want to prevent the add, change, and delete views from # leaking a disallowed field value. with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) section = Section.objects.create() url = reverse('admin:admin_views_section_change', args=(section.pk,)) with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(url, {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) url = reverse('admin:admin_views_section_delete', args=(section.pk,)) with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'): response = self.client.post(url, {TO_FIELD_VAR: 'name'}) self.assertEqual(response.status_code, 400) def test_allowed_filtering_15103(self): """ Regressions test for ticket 15103 - filtering on fields defined in a ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields can break. """ # Filters should be allowed if they are defined on a ForeignKey pointing to this model url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist') response = self.client.get(url) self.assertEqual(response.status_code, 200) def test_popup_dismiss_related(self): """ Regression test for ticket 20664 - ensure the pk is properly quoted. """ actor = Actor.objects.create(name="Palin", age=27) response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR)) self.assertContains(response, 'data-popup-opener="%s"' % actor.pk) def test_hide_change_password(self): """ Tests if the "change password" link in the admin is hidden if the User does not have a usable password set. (against 9bea85795705d015cdadc82c68b99196a8554f5c) """ user = User.objects.get(username='super') user.set_unusable_password() user.save() self.client.force_login(user) response = self.client.get(reverse('admin:index')) self.assertNotContains( response, reverse('admin:password_change'), msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.' ) def test_change_view_with_show_delete_extra_context(self): """ The 'show_delete' context variable in the admin's change view controls the display of the delete button. """ instance = UndeletableObject.objects.create(name='foo') response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,))) self.assertNotContains(response, 'deletelink') def test_change_view_logs_m2m_field_changes(self): """Changes to ManyToManyFields are included in the object's history.""" pizza = ReadablePizza.objects.create(name='Cheese') cheese = Topping.objects.create(name='cheese') post_data = {'name': pizza.name, 'toppings': [cheese.pk]} response = self.client.post(reverse('admin:admin_views_readablepizza_change', args=(pizza.pk,)), post_data) self.assertRedirects(response, reverse('admin:admin_views_readablepizza_changelist')) pizza_ctype = ContentType.objects.get_for_model(ReadablePizza, for_concrete_model=False) log = LogEntry.objects.filter(content_type=pizza_ctype, object_id=pizza.pk).first() self.assertEqual(log.get_change_message(), 'Changed Toppings.') def test_allows_attributeerror_to_bubble_up(self): """ AttributeErrors are allowed to bubble when raised inside a change list view. Requires a model to be created so there's something to display. Refs: #16655, #18593, and #18747 """ Simple.objects.create() with self.assertRaises(AttributeError): self.client.get(reverse('admin:admin_views_simple_changelist')) def test_changelist_with_no_change_url(self): """ ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url for change_view is removed from get_urls (#20934). """ o = UnchangeableObject.objects.create() response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist')) # Check the format of the shown object -- shouldn't contain a change link self.assertContains(response, '<th class="field-__str__">%s</th>' % o, html=True) def test_invalid_appindex_url(self): """ #21056 -- URL reversing shouldn't work for nonexistent apps. """ good_url = '/test_admin/admin/admin_views/' confirm_good_url = reverse('admin:app_list', kwargs={'app_label': 'admin_views'}) self.assertEqual(good_url, confirm_good_url) with self.assertRaises(NoReverseMatch): reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'}) with self.assertRaises(NoReverseMatch): reverse('admin:app_list', args=('admin_views2',)) def test_resolve_admin_views(self): index_match = resolve('/test_admin/admin4/') list_match = resolve('/test_admin/admin4/auth/user/') self.assertIs(index_match.func.admin_site, customadmin.simple_site) self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin) def test_adminsite_display_site_url(self): """ #13749 - Admin should display link to front-end site 'View site' """ url = reverse('admin:index') response = self.client.get(url) self.assertEqual(response.context['site_url'], '/my-site-url/') self.assertContains(response, '<a href="/my-site-url/">View site</a>') def test_date_hierarchy_empty_queryset(self): self.assertIs(Question.objects.exists(), False) response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertEqual(response.status_code, 200) @override_settings(TIME_ZONE='America/Sao_Paulo', USE_TZ=True) def test_date_hierarchy_timezone_dst(self): # This datetime doesn't exist in this timezone due to DST. for date in make_aware_datetimes(datetime.datetime(2016, 10, 16, 15), 'America/Sao_Paulo'): with self.subTest(repr(date.tzinfo)): q = Question.objects.create(question='Why?', expires=date) Answer2.objects.create(question=q, answer='Because.') response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertContains(response, 'question__expires__day=16') self.assertContains(response, 'question__expires__month=10') self.assertContains(response, 'question__expires__year=2016') @override_settings(TIME_ZONE='America/Los_Angeles', USE_TZ=True) def test_date_hierarchy_local_date_differ_from_utc(self): # This datetime is 2017-01-01 in UTC. for date in make_aware_datetimes(datetime.datetime(2016, 12, 31, 16), 'America/Los_Angeles'): with self.subTest(repr(date.tzinfo)): q = Question.objects.create(question='Why?', expires=date) Answer2.objects.create(question=q, answer='Because.') response = self.client.get(reverse('admin:admin_views_answer2_changelist')) self.assertContains(response, 'question__expires__day=31') self.assertContains(response, 'question__expires__month=12') self.assertContains(response, 'question__expires__year=2016') def test_sortable_by_columns_subset(self): expected_sortable_fields = ('date', 'callable_year') expected_not_sortable_fields = ( 'content', 'model_year', 'modeladmin_year', 'model_year_reversed', 'section', ) response = self.client.get(reverse('admin6:admin_views_article_changelist')) for field_name in expected_sortable_fields: self.assertContains(response, '<th scope="col" class="sortable column-%s">' % field_name) for field_name in expected_not_sortable_fields: self.assertContains(response, '<th scope="col" class="column-%s">' % field_name) def test_get_sortable_by_columns_subset(self): response = self.client.get(reverse('admin6:admin_views_actor_changelist')) self.assertContains(response, '<th scope="col" class="sortable column-age">') self.assertContains(response, '<th scope="col" class="column-name">') def test_sortable_by_no_column(self): expected_not_sortable_fields = ('title', 'book') response = self.client.get(reverse('admin6:admin_views_chapter_changelist')) for field_name in expected_not_sortable_fields: self.assertContains(response, '<th scope="col" class="column-%s">' % field_name) self.assertNotContains(response, '<th scope="col" class="sortable column') def test_get_sortable_by_no_column(self): response = self.client.get(reverse('admin6:admin_views_color_changelist')) self.assertContains(response, '<th scope="col" class="column-value">') self.assertNotContains(response, '<th scope="col" class="sortable column') def test_app_index_context(self): response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains( response, '<title>Admin_Views administration | Django site admin</title>', ) self.assertEqual(response.context['title'], 'Admin_Views administration') self.assertEqual(response.context['app_label'], 'admin_views') def test_change_view_subtitle_per_object(self): response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a1.pk,)), ) self.assertContains( response, '<title>Article 1 | Change article | Django site admin</title>', ) self.assertContains(response, '<h1>Change article</h1>') self.assertContains(response, '<h2>Article 1</h2>') response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a2.pk,)), ) self.assertContains( response, '<title>Article 2 | Change article | Django site admin</title>', ) self.assertContains(response, '<h1>Change article</h1>') self.assertContains(response, '<h2>Article 2</h2>') def test_view_subtitle_per_object(self): viewuser = User.objects.create_user( username='viewuser', password='secret', is_staff=True, ) viewuser.user_permissions.add( get_perm(Article, get_permission_codename('view', Article._meta)), ) self.client.force_login(viewuser) response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a1.pk,)), ) self.assertContains( response, '<title>Article 1 | View article | Django site admin</title>', ) self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<h2>Article 1</h2>') response = self.client.get( reverse('admin:admin_views_article_change', args=(self.a2.pk,)), ) self.assertContains( response, '<title>Article 2 | View article | Django site admin</title>', ) self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<h2>Article 2</h2>') def test_formset_kwargs_can_be_overridden(self): response = self.client.get(reverse('admin:admin_views_city_add')) self.assertContains(response, 'overridden_name') def test_render_views_no_subtitle(self): tests = [ reverse('admin:index'), reverse('admin:password_change'), reverse('admin:app_list', args=('admin_views',)), reverse('admin:admin_views_article_delete', args=(self.a1.pk,)), reverse('admin:admin_views_article_history', args=(self.a1.pk,)), # Login must be after logout. reverse('admin:logout'), reverse('admin:login'), ] for url in tests: with self.subTest(url=url): with self.assertNoLogs('django.template', 'DEBUG'): self.client.get(url) def test_render_delete_selected_confirmation_no_subtitle(self): post_data = { 'action': 'delete_selected', 'selected_across': '0', 'index': '0', '_selected_action': self.a1.pk, } with self.assertNoLogs('django.template', 'DEBUG'): self.client.post(reverse('admin:admin_views_article_changelist'), post_data) @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', # Put this app's and the shared tests templates dirs in DIRS to take precedence # over the admin's templates dir. 'DIRS': [ os.path.join(os.path.dirname(__file__), 'templates'), os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }]) class AdminCustomTemplateTests(AdminViewBasicTestCase): def test_custom_model_admin_templates(self): # Test custom change list template with custom extra context response = self.client.get(reverse('admin:admin_views_customarticle_changelist')) self.assertContains(response, "var hello = 'Hello!';") self.assertTemplateUsed(response, 'custom_admin/change_list.html') # Test custom add form template response = self.client.get(reverse('admin:admin_views_customarticle_add')) self.assertTemplateUsed(response, 'custom_admin/add_form.html') # Add an article so we can test delete, change, and history views post = self.client.post(reverse('admin:admin_views_customarticle_add'), { 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39' }) self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist')) self.assertEqual(CustomArticle.objects.all().count(), 1) article_pk = CustomArticle.objects.all()[0].pk # Test custom delete, change, and object history templates # Test custom change form template response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/change_form.html') response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html') response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={ 'index': 0, 'action': ['delete_selected'], '_selected_action': ['1'], }) self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html') response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,))) self.assertTemplateUsed(response, 'custom_admin/object_history.html') # A custom popup response template may be specified by # ModelAdmin.popup_response_template. response = self.client.post(reverse('admin:admin_views_customarticle_add') + '?%s=1' % IS_POPUP_VAR, { 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', IS_POPUP_VAR: '1' }) self.assertEqual(response.template_name, 'custom_admin/popup_response.html') def test_extended_bodyclass_template_change_form(self): """ The admin/change_form.html template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, 'bodyclass_consistency_check ') def test_change_password_template(self): user = User.objects.get(username='super') response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,))) # The auth/user/change_password.html template uses super in the # bodyclass block. self.assertContains(response, 'bodyclass_consistency_check ') # When a site has multiple passwords in the browser's password manager, # a browser pop up asks which user the new password is for. To prevent # this, the username is added to the change password form. self.assertContains(response, '<input type="text" name="username" value="super" class="hidden">') def test_extended_bodyclass_template_index(self): """ The admin/index.html template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:index')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_change_list(self): """ The admin/change_list.html' template uses block.super in the bodyclass block. """ response = self.client.get(reverse('admin:admin_views_article_changelist')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_login(self): """ The admin/login.html template uses block.super in the bodyclass block. """ self.client.logout() response = self.client.get(reverse('admin:login')) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_delete_confirmation(self): """ The admin/delete_confirmation.html template uses block.super in the bodyclass block. """ group = Group.objects.create(name="foogroup") response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,))) self.assertContains(response, 'bodyclass_consistency_check ') def test_extended_bodyclass_template_delete_selected_confirmation(self): """ The admin/delete_selected_confirmation.html template uses block.super in bodyclass block. """ group = Group.objects.create(name="foogroup") post_data = { 'action': 'delete_selected', 'selected_across': '0', 'index': '0', '_selected_action': group.id } response = self.client.post(reverse('admin:auth_group_changelist'), post_data) self.assertEqual(response.context['site_header'], 'Django administration') self.assertContains(response, 'bodyclass_consistency_check ') def test_filter_with_custom_template(self): """ A custom template can be used to render an admin filter. """ response = self.client.get(reverse('admin:admin_views_color2_changelist')) self.assertTemplateUsed(response, 'custom_filter_template.html') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewFormUrlTest(TestCase): current_app = "admin3" @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_change_form_URL_has_correct_value(self): """ change_view has form_url in response.context """ response = self.client.get( reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app) ) self.assertIn('form_url', response.context, msg='form_url not present in response.context') self.assertEqual(response.context['form_url'], 'pony') def test_initial_data_can_be_overridden(self): """ The behavior for setting initial form data can be overridden in the ModelAdmin class. Usually, the initial value is set via the GET params. """ response = self.client.get( reverse('admin:admin_views_restaurant_add', current_app=self.current_app), {'name': 'test_value'} ) # this would be the usual behaviour self.assertNotContains(response, 'value="test_value"') # this is the overridden behaviour self.assertContains(response, 'value="overridden_value"') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminJavaScriptTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_js_minified_only_if_debug_is_false(self): """ The minified versions of the JS files are only used when DEBUG is False. """ with override_settings(DEBUG=False): response = self.client.get(reverse('admin:admin_views_section_add')) self.assertNotContains(response, 'vendor/jquery/jquery.js') self.assertContains(response, 'vendor/jquery/jquery.min.js') self.assertContains(response, 'prepopulate.js') self.assertContains(response, 'actions.js') self.assertContains(response, 'collapse.js') self.assertContains(response, 'inlines.js') with override_settings(DEBUG=True): response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, 'vendor/jquery/jquery.js') self.assertNotContains(response, 'vendor/jquery/jquery.min.js') self.assertContains(response, 'prepopulate.js') self.assertContains(response, 'actions.js') self.assertContains(response, 'collapse.js') self.assertContains(response, 'inlines.js') @override_settings(ROOT_URLCONF='admin_views.urls') class SaveAsTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_save_as_duplication(self): """'save as' creates a new person""" post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42} response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data) self.assertEqual(len(Person.objects.filter(name='John M')), 1) self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1) new_person = Person.objects.latest('id') self.assertRedirects(response, reverse('admin:admin_views_person_change', args=(new_person.pk,))) def test_save_as_continue_false(self): """ Saving a new object using "Save as new" redirects to the changelist instead of the change view when ModelAdmin.save_as_continue=False. """ post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42} url = reverse('admin:admin_views_person_change', args=(self.per1.pk,), current_app=site2.name) response = self.client.post(url, post_data) self.assertEqual(len(Person.objects.filter(name='John M')), 1) self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1) self.assertRedirects(response, reverse('admin:admin_views_person_changelist', current_app=site2.name)) def test_save_as_new_with_validation_errors(self): """ When you click "Save as new" and have a validation error, you only see the "Save as new" button and not the other save buttons, and that only the "Save as" button is visible. """ response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), { '_saveasnew': '', 'gender': 'invalid', '_addanother': 'fail', }) self.assertContains(response, 'Please correct the errors below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) def test_save_as_new_with_validation_errors_with_inlines(self): parent = Parent.objects.create(name='Father') child = Child.objects.create(parent=parent, name='Child') response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), { '_saveasnew': 'Save as new', 'child_set-0-parent': parent.pk, 'child_set-0-id': child.pk, 'child_set-0-name': 'Child', 'child_set-INITIAL_FORMS': 1, 'child_set-MAX_NUM_FORMS': 1000, 'child_set-MIN_NUM_FORMS': 0, 'child_set-TOTAL_FORMS': 4, 'name': '_invalid', }) self.assertContains(response, 'Please correct the error below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) def test_save_as_new_with_inlines_with_validation_errors(self): parent = Parent.objects.create(name='Father') child = Child.objects.create(parent=parent, name='Child') response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), { '_saveasnew': 'Save as new', 'child_set-0-parent': parent.pk, 'child_set-0-id': child.pk, 'child_set-0-name': '_invalid', 'child_set-INITIAL_FORMS': 1, 'child_set-MAX_NUM_FORMS': 1000, 'child_set-MIN_NUM_FORMS': 0, 'child_set-TOTAL_FORMS': 4, 'name': 'Father', }) self.assertContains(response, 'Please correct the error below.') self.assertFalse(response.context['show_save_and_add_another']) self.assertFalse(response.context['show_save_and_continue']) self.assertTrue(response.context['show_save_as_new']) @override_settings(ROOT_URLCONF='admin_views.urls') class CustomModelAdminTest(AdminViewBasicTestCase): def test_custom_admin_site_login_form(self): self.client.logout() response = self.client.get(reverse('admin2:index'), follow=True) self.assertIsInstance(response, TemplateResponse) self.assertEqual(response.status_code, 200) login = self.client.post(reverse('admin2:login'), { REDIRECT_FIELD_NAME: reverse('admin2:index'), 'username': 'customform', 'password': 'secret', }, follow=True) self.assertIsInstance(login, TemplateResponse) self.assertContains(login, 'custom form error') self.assertContains(login, 'path/to/media.css') def test_custom_admin_site_login_template(self): self.client.logout() response = self.client.get(reverse('admin2:index'), follow=True) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/login.html') self.assertContains(response, 'Hello from a custom login template') def test_custom_admin_site_logout_template(self): response = self.client.get(reverse('admin2:logout')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/logout.html') self.assertContains(response, 'Hello from a custom logout template') def test_custom_admin_site_index_view_and_template(self): response = self.client.get(reverse('admin2:index')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/index.html') self.assertContains(response, 'Hello from a custom index template *bar*') def test_custom_admin_site_app_index_view_and_template(self): response = self.client.get(reverse('admin2:app_list', args=('admin_views',))) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/app_index.html') self.assertContains(response, 'Hello from a custom app_index template') def test_custom_admin_site_password_change_template(self): response = self.client.get(reverse('admin2:password_change')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_form.html') self.assertContains(response, 'Hello from a custom password change form template') def test_custom_admin_site_password_change_with_extra_context(self): response = self.client.get(reverse('admin2:password_change')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_form.html') self.assertContains(response, 'eggs') def test_custom_admin_site_password_change_done_template(self): response = self.client.get(reverse('admin2:password_change_done')) self.assertIsInstance(response, TemplateResponse) self.assertTemplateUsed(response, 'custom_admin/password_change_done.html') self.assertContains(response, 'Hello from a custom password change done template') def test_custom_admin_site_view(self): self.client.force_login(self.superuser) response = self.client.get(reverse('admin2:my_view')) self.assertEqual(response.content, b"Django is a magical pony!") def test_pwd_change_custom_template(self): self.client.force_login(self.superuser) su = User.objects.get(username='super') response = self.client.get(reverse('admin4:auth_user_password_change', args=(su.pk,))) self.assertEqual(response.status_code, 200) def get_perm(Model, codename): """Return the permission object, for the Model""" ct = ContentType.objects.get_for_model(Model, for_concrete_model=False) return Permission.objects.get(content_type=ct, codename=codename) @override_settings( ROOT_URLCONF='admin_views.urls', # Test with the admin's documented list of required context processors. TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], ) class AdminViewPermissionsTest(TestCase): """Tests for Admin Views Permissions.""" @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True) cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') cls.nostaffuser = User.objects.create_user(username='nostaff', password='secret') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1, another_section=cls.s1, ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') # Setup permissions, for our users who can add, change, and delete. opts = Article._meta # User who can view Articles cls.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('view', opts))) # User who can add Articles cls.adduser.user_permissions.add(get_perm(Article, get_permission_codename('add', opts))) # User who can change Articles cls.changeuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts))) cls.nostaffuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts))) # User who can delete Articles cls.deleteuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts))) cls.deleteuser.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta))) # login POST dicts cls.index_url = reverse('admin:index') cls.super_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'super', 'password': 'secret', } cls.super_email_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': '[email protected]', 'password': 'secret', } cls.super_email_bad_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': '[email protected]', 'password': 'notsecret', } cls.adduser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'adduser', 'password': 'secret', } cls.changeuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'changeuser', 'password': 'secret', } cls.deleteuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'deleteuser', 'password': 'secret', } cls.nostaff_login = { REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'), 'username': 'nostaff', 'password': 'secret', } cls.joepublic_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'joepublic', 'password': 'secret', } cls.viewuser_login = { REDIRECT_FIELD_NAME: cls.index_url, 'username': 'viewuser', 'password': 'secret', } cls.no_username_login = { REDIRECT_FIELD_NAME: cls.index_url, 'password': 'secret', } def test_login(self): """ Make sure only staff members can log in. Successful posts to the login page will redirect to the original url. Unsuccessful attempts will continue to render the login page with a 200 status code. """ login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index')) # Super User response = self.client.get(self.index_url) self.assertRedirects(response, login_url) login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Test if user enters email address response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.super_email_login) self.assertContains(login, ERROR_MESSAGE) # only correct passwords get a username hint login = self.client.post(login_url, self.super_email_bad_login) self.assertContains(login, ERROR_MESSAGE) new_user = User(username='jondoe', password='secret', email='[email protected]') new_user.save() # check to ensure if there are multiple email addresses a user doesn't get a 500 login = self.client.post(login_url, self.super_email_login) self.assertContains(login, ERROR_MESSAGE) # View User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.viewuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Add User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.adduser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Change User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.changeuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Delete User response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.deleteuser_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) # Regular User should not be able to login. response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.joepublic_login) self.assertContains(login, ERROR_MESSAGE) # Requests without username should not return 500 errors. response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) login = self.client.post(login_url, self.no_username_login) self.assertEqual(login.status_code, 200) self.assertFormError(login, 'form', 'username', ['This field is required.']) def test_login_redirect_for_direct_get(self): """ Login redirect should be to the admin index page when going directly to /admin/login/. """ response = self.client.get(reverse('admin:login')) self.assertEqual(response.status_code, 200) self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index')) def test_login_has_permission(self): # Regular User should not be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login) self.assertContains(login, 'permission denied') # User with permissions should be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login) self.assertRedirects(login, reverse('has_permission_admin:index')) self.assertFalse(login.context) self.client.get(reverse('has_permission_admin:logout')) # Staff should be able to login. response = self.client.get(reverse('has_permission_admin:index')) self.assertEqual(response.status_code, 302) login = self.client.post(reverse('has_permission_admin:login'), { REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'), 'username': 'deleteuser', 'password': 'secret', }) self.assertRedirects(login, reverse('has_permission_admin:index')) self.assertFalse(login.context) self.client.get(reverse('has_permission_admin:logout')) def test_login_successfully_redirects_to_original_URL(self): response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) query_string = 'the-answer=42' redirect_url = '%s?%s' % (self.index_url, query_string) new_next = {REDIRECT_FIELD_NAME: redirect_url} post_data = self.super_login.copy() post_data.pop(REDIRECT_FIELD_NAME) login = self.client.post( '%s?%s' % (reverse('admin:login'), urlencode(new_next)), post_data) self.assertRedirects(login, redirect_url) def test_double_login_is_not_allowed(self): """Regression test for #19327""" login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index')) response = self.client.get(self.index_url) self.assertEqual(response.status_code, 302) # Establish a valid admin session login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) # Logging in with non-admin user fails login = self.client.post(login_url, self.joepublic_login) self.assertContains(login, ERROR_MESSAGE) # Establish a valid admin session login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) # Logging in with admin user while already logged in login = self.client.post(login_url, self.super_login) self.assertRedirects(login, self.index_url) self.assertFalse(login.context) self.client.get(reverse('admin:logout')) def test_login_page_notice_for_non_staff_users(self): """ A logged-in non-staff user trying to access the admin index should be presented with the login page and a hint indicating that the current user doesn't have access to it. """ hint_template = 'You are authenticated as {}' # Anonymous user should not be shown the hint response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'login-form') self.assertNotContains(response, hint_template.format(''), status_code=200) # Non-staff user should be shown the hint self.client.force_login(self.nostaffuser) response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'login-form') self.assertContains(response, hint_template.format(self.nostaffuser.username), status_code=200) def test_add_view(self): """Test add view restricts access and actually adds items.""" add_dict = { 'title': 'Døm ikke', 'content': '<p>great article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } # Change User should not have access to add articles self.client.force_login(self.changeuser) # make sure the view removes test cookie self.assertIs(self.client.session.test_cookie_worked(), False) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.status_code, 403) # Try POST just to make sure post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.get(reverse('admin:logout')) # View User should not have access to add articles self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.status_code, 403) # Try POST just to make sure post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) # Now give the user permission to add but not change. self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta))) response = self.client.get(reverse('admin:admin_views_article_add')) self.assertEqual(response.context['title'], 'Add article') self.assertContains(response, '<title>Add article | Django site admin</title>') self.assertContains(response, '<input type="submit" value="Save and view" name="_continue">') post = self.client.post(reverse('admin:admin_views_article_add'), add_dict, follow=False) self.assertEqual(post.status_code, 302) self.assertEqual(Article.objects.count(), 4) article = Article.objects.latest('pk') response = self.client.get(reverse('admin:admin_views_article_change', args=(article.pk,))) self.assertContains(response, '<li class="success">The article “Døm ikke” was added successfully.</li>') article.delete() self.client.get(reverse('admin:logout')) # Add user may login and POST to add view, then redirect to admin root self.client.force_login(self.adduser) addpage = self.client.get(reverse('admin:admin_views_article_add')) change_list_link = '&rsaquo; <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist') self.assertNotContains( addpage, change_list_link, msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.' ) post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), 4) self.assertEqual(len(mail.outbox), 2) self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object') self.client.get(reverse('admin:logout')) # The addition was logged correctly addition_log = LogEntry.objects.all()[0] new_article = Article.objects.last() article_ct = ContentType.objects.get_for_model(Article) self.assertEqual(addition_log.user_id, self.adduser.pk) self.assertEqual(addition_log.content_type_id, article_ct.pk) self.assertEqual(addition_log.object_id, str(new_article.pk)) self.assertEqual(addition_log.object_repr, "Døm ikke") self.assertEqual(addition_log.action_flag, ADDITION) self.assertEqual(addition_log.get_change_message(), "Added.") # Super can add too, but is redirected to the change list view self.client.force_login(self.superuser) addpage = self.client.get(reverse('admin:admin_views_article_add')) self.assertContains( addpage, change_list_link, msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.' ) post = self.client.post(reverse('admin:admin_views_article_add'), add_dict) self.assertRedirects(post, reverse('admin:admin_views_article_changelist')) self.assertEqual(Article.objects.count(), 5) self.client.get(reverse('admin:logout')) # 8509 - if a normal user is already logged in, it is possible # to change user into the superuser without error self.client.force_login(self.joepublicuser) # Check and make sure that if user expires, data still persists self.client.force_login(self.superuser) # make sure the view removes test cookie self.assertIs(self.client.session.test_cookie_worked(), False) @mock.patch('django.contrib.admin.options.InlineModelAdmin.has_change_permission') def test_add_view_with_view_only_inlines(self, has_change_permission): """User with add permission to a section but view-only for inlines.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('add', Section._meta))) self.client.force_login(self.viewuser) # Valid POST creates a new section. data = { 'name': 'New obj', 'article_set-TOTAL_FORMS': 0, 'article_set-INITIAL_FORMS': 0, } response = self.client.post(reverse('admin:admin_views_section_add'), data) self.assertRedirects(response, reverse('admin:index')) self.assertEqual(Section.objects.latest('id').name, data['name']) # InlineModelAdmin.has_change_permission()'s obj argument is always # None during object add. self.assertEqual([obj for (request, obj), _ in has_change_permission.call_args_list], [None, None]) def test_change_view(self): """Change view should restrict access and allow users to edit items.""" change_dict = { 'title': 'Ikke fordømt', 'content': '<p>edited article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,)) article_changelist_url = reverse('admin:admin_views_article_changelist') # add user should not be able to view the list of article or change any of them self.client.force_login(self.adduser) response = self.client.get(article_changelist_url) self.assertEqual(response.status_code, 403) response = self.client.get(article_change_url) self.assertEqual(response.status_code, 403) post = self.client.post(article_change_url, change_dict) self.assertEqual(post.status_code, 403) self.client.get(reverse('admin:logout')) # view user can view articles but not make changes. self.client.force_login(self.viewuser) response = self.client.get(article_changelist_url) self.assertContains( response, '<title>Select article to view | Django site admin</title>', ) self.assertContains(response, '<h1>Select article to view</h1>') self.assertEqual(response.context['title'], 'Select article to view') response = self.client.get(article_change_url) self.assertContains(response, '<title>View article | Django site admin</title>') self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<label>Extra form field:</label>') self.assertContains(response, '<a href="/test_admin/admin/admin_views/article/" class="closelink">Close</a>') self.assertEqual(response.context['title'], 'View article') post = self.client.post(article_change_url, change_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>Middle content</p>') self.client.get(reverse('admin:logout')) # change user can view all items and edit them self.client.force_login(self.changeuser) response = self.client.get(article_changelist_url) self.assertEqual(response.context['title'], 'Select article to change') self.assertContains( response, '<title>Select article to change | Django site admin</title>', ) self.assertContains(response, '<h1>Select article to change</h1>') response = self.client.get(article_change_url) self.assertEqual(response.context['title'], 'Change article') self.assertContains( response, '<title>Change article | Django site admin</title>', ) self.assertContains(response, '<h1>Change article</h1>') post = self.client.post(article_change_url, change_dict) self.assertRedirects(post, article_changelist_url) self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>') # one error in form should produce singular error message, multiple errors plural change_dict['title'] = '' post = self.client.post(article_change_url, change_dict) self.assertContains( post, 'Please correct the error below.', msg_prefix='Singular error message not found in response to post with one error' ) change_dict['content'] = '' post = self.client.post(article_change_url, change_dict) self.assertContains( post, 'Please correct the errors below.', msg_prefix='Plural error message not found in response to post with multiple errors' ) self.client.get(reverse('admin:logout')) # Test redirection when using row-level change permissions. Refs #11513. r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id") r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id") r3 = RowLevelChangePermissionModel.objects.create(id=3, name='odd id mult 3') r6 = RowLevelChangePermissionModel.objects.create(id=6, name='even id mult 3') change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,)) change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,)) change_url_3 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r3.pk,)) change_url_6 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r6.pk,)) logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser] for login_user in logins: with self.subTest(login_user.username): self.client.force_login(login_user) response = self.client.get(change_url_1) self.assertEqual(response.status_code, 403) response = self.client.post(change_url_1, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id') self.assertEqual(response.status_code, 403) response = self.client.get(change_url_2) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_2, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed') self.assertRedirects(response, self.index_url) response = self.client.get(change_url_3) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_3, {'name': 'changed'}) self.assertEqual(response.status_code, 403) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=3).name, 'odd id mult 3') response = self.client.get(change_url_6) self.assertEqual(response.status_code, 200) response = self.client.post(change_url_6, {'name': 'changed'}) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=6).name, 'changed') self.assertRedirects(response, self.index_url) self.client.get(reverse('admin:logout')) for login_user in [self.joepublicuser, self.nostaffuser]: with self.subTest(login_user.username): self.client.force_login(login_user) response = self.client.get(change_url_1, follow=True) self.assertContains(response, 'login-form') response = self.client.post(change_url_1, {'name': 'changed'}, follow=True) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id') self.assertContains(response, 'login-form') response = self.client.get(change_url_2, follow=True) self.assertContains(response, 'login-form') response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True) self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed') self.assertContains(response, 'login-form') self.client.get(reverse('admin:logout')) def test_change_view_without_object_change_permission(self): """ The object should be read-only if the user has permission to view it and change objects of that type but not to change the current object. """ change_url = reverse('admin9:admin_views_article_change', args=(self.a1.pk,)) self.client.force_login(self.viewuser) response = self.client.get(change_url) self.assertEqual(response.context['title'], 'View article') self.assertContains(response, '<title>View article | Django site admin</title>') self.assertContains(response, '<h1>View article</h1>') self.assertContains(response, '<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close</a>') def test_change_view_save_as_new(self): """ 'Save as new' should raise PermissionDenied for users without the 'add' permission. """ change_dict_save_as_new = { '_saveasnew': 'Save as new', 'title': 'Ikke fordømt', 'content': '<p>edited article</p>', 'date_0': '2008-03-18', 'date_1': '10:54:39', 'section': self.s1.pk, } article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,)) # Add user can perform "Save as new". article_count = Article.objects.count() self.client.force_login(self.adduser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), article_count + 1) self.client.logout() # Change user cannot perform "Save as new" (no 'add' permission). article_count = Article.objects.count() self.client.force_login(self.changeuser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), article_count) # User with both add and change permissions should be redirected to the # change page for the newly created object. article_count = Article.objects.count() self.client.force_login(self.superuser) post = self.client.post(article_change_url, change_dict_save_as_new) self.assertEqual(Article.objects.count(), article_count + 1) new_article = Article.objects.latest('id') self.assertRedirects(post, reverse('admin:admin_views_article_change', args=(new_article.pk,))) def test_change_view_with_view_only_inlines(self): """ User with change permission to a section but view-only for inlines. """ self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.client.force_login(self.viewuser) # GET shows inlines. response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 3) # Valid POST changes the name. data = { 'name': 'Can edit name with view-only inlines', 'article_set-TOTAL_FORMS': 3, 'article_set-INITIAL_FORMS': 3 } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name']) # Invalid POST reshows inlines. del data['name'] response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 3) def test_change_view_with_view_and_add_inlines(self): """User has view and add permissions on the inline model.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta))) self.client.force_login(self.viewuser) # GET shows inlines. response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 6) # Valid POST creates a new article. data = { 'name': 'Can edit name with view-only inlines', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-3-id': [''], 'article_set-3-title': ['A title'], 'article_set-3-content': ['Added content'], 'article_set-3-date_0': ['2008-3-18'], 'article_set-3-date_1': ['11:54:58'], 'article_set-3-section': [str(self.s1.pk)], } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name']) self.assertEqual(Article.objects.count(), 4) # Invalid POST reshows inlines. del data['name'] response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['inline_admin_formsets']), 1) formset = response.context['inline_admin_formsets'][0] self.assertEqual(len(formset.forms), 6) def test_change_view_with_view_and_delete_inlines(self): """User has view and delete permissions on the inline model.""" self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta))) self.client.force_login(self.viewuser) data = { 'name': 'Name is required.', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-0-id': [str(self.a1.pk)], 'article_set-0-DELETE': ['on'], } # Inline POST details are ignored without delete permission. response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Article.objects.count(), 3) # Deletion successful when delete permission is added. self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', Article._meta))) data = { 'name': 'Name is required.', 'article_set-TOTAL_FORMS': 6, 'article_set-INITIAL_FORMS': 3, 'article_set-0-id': [str(self.a1.pk)], 'article_set-0-DELETE': ['on'], } response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data) self.assertRedirects(response, reverse('admin:admin_views_section_changelist')) self.assertEqual(Article.objects.count(), 2) def test_delete_view(self): """Delete view should restrict access and actually delete items.""" delete_dict = {'post': 'yes'} delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,)) # add user should not be able to delete articles self.client.force_login(self.adduser) response = self.client.get(delete_url) self.assertEqual(response.status_code, 403) post = self.client.post(delete_url, delete_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.logout() # view user should not be able to delete articles self.client.force_login(self.viewuser) response = self.client.get(delete_url) self.assertEqual(response.status_code, 403) post = self.client.post(delete_url, delete_dict) self.assertEqual(post.status_code, 403) self.assertEqual(Article.objects.count(), 3) self.client.logout() # Delete user can delete self.client.force_login(self.deleteuser) response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertContains(response, "<h2>Summary</h2>") self.assertContains(response, "<li>Articles: 3</li>") # test response contains link to related Article self.assertContains(response, "admin_views/article/%s/" % self.a1.pk) response = self.client.get(delete_url) self.assertContains(response, "admin_views/article/%s/" % self.a1.pk) self.assertContains(response, "<h2>Summary</h2>") self.assertContains(response, "<li>Articles: 1</li>") post = self.client.post(delete_url, delete_dict) self.assertRedirects(post, self.index_url) self.assertEqual(Article.objects.count(), 2) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object') article_ct = ContentType.objects.get_for_model(Article) logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION) self.assertEqual(logged.object_id, str(self.a1.pk)) def test_delete_view_with_no_default_permissions(self): """ The delete view allows users to delete collected objects without a 'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty). """ pizza = ReadOnlyPizza.objects.create(name='Double Cheese') delete_url = reverse('admin:admin_views_readonlypizza_delete', args=(pizza.pk,)) self.client.force_login(self.adduser) response = self.client.get(delete_url) self.assertContains(response, 'admin_views/readonlypizza/%s/' % pizza.pk) self.assertContains(response, '<h2>Summary</h2>') self.assertContains(response, '<li>Read only pizzas: 1</li>') post = self.client.post(delete_url, {'post': 'yes'}) self.assertRedirects(post, reverse('admin:admin_views_readonlypizza_changelist')) self.assertEqual(ReadOnlyPizza.objects.count(), 0) def test_delete_view_nonexistent_obj(self): self.client.force_login(self.deleteuser) url = reverse('admin:admin_views_article_delete', args=('nonexistent',)) response = self.client.get(url, follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['article with ID “nonexistent” doesn’t exist. Perhaps it was deleted?'] ) def test_history_view(self): """History view should restrict access.""" # add user should not be able to view the list of article or change any of them self.client.force_login(self.adduser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 403) self.client.get(reverse('admin:logout')) # view user can view all items self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 200) self.client.get(reverse('admin:logout')) # change user can view all items and edit them self.client.force_login(self.changeuser) response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,))) self.assertEqual(response.status_code, 200) # Test redirection when using row-level change permissions. Refs #11513. rl1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id") rl2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id") logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser] for login_user in logins: with self.subTest(login_user.username): self.client.force_login(login_user) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)) response = self.client.get(url) self.assertEqual(response.status_code, 403) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.client.get(reverse('admin:logout')) for login_user in [self.joepublicuser, self.nostaffuser]: with self.subTest(login_user.username): self.client.force_login(login_user) url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,)) response = self.client.get(url, follow=True) self.assertContains(response, 'login-form') url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,)) response = self.client.get(url, follow=True) self.assertContains(response, 'login-form') self.client.get(reverse('admin:logout')) def test_history_view_bad_url(self): self.client.force_login(self.changeuser) response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)), follow=True) self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['article with ID “foo” doesn’t exist. Perhaps it was deleted?'] ) def test_conditionally_show_add_section_link(self): """ The foreign key widget should only show the "add related" button if the user has permission to add that related item. """ self.client.force_login(self.adduser) # The user can't add sections yet, so they shouldn't see the "add section" link. url = reverse('admin:admin_views_article_add') add_link_text = 'add_id_section' response = self.client.get(url) self.assertNotContains(response, add_link_text) # Allow the user to add sections too. Now they can see the "add section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('add', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertContains(response, add_link_text) def test_conditionally_show_change_section_link(self): """ The foreign key widget should only show the "change related" button if the user has permission to change that related item. """ def get_change_related(response): return response.context['adminform'].form.fields['section'].widget.can_change_related self.client.force_login(self.adduser) # The user can't change sections yet, so they shouldn't see the "change section" link. url = reverse('admin:admin_views_article_add') change_link_text = 'change_id_section' response = self.client.get(url) self.assertFalse(get_change_related(response)) self.assertNotContains(response, change_link_text) # Allow the user to change sections too. Now they can see the "change section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('change', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertTrue(get_change_related(response)) self.assertContains(response, change_link_text) def test_conditionally_show_delete_section_link(self): """ The foreign key widget should only show the "delete related" button if the user has permission to delete that related item. """ def get_delete_related(response): return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related self.client.force_login(self.adduser) # The user can't delete sections yet, so they shouldn't see the "delete section" link. url = reverse('admin:admin_views_article_add') delete_link_text = 'delete_id_sub_section' response = self.client.get(url) self.assertFalse(get_delete_related(response)) self.assertNotContains(response, delete_link_text) # Allow the user to delete sections too. Now they can see the "delete section" link. user = User.objects.get(username='adduser') perm = get_perm(Section, get_permission_codename('delete', Section._meta)) user.user_permissions.add(perm) response = self.client.get(url) self.assertTrue(get_delete_related(response)) self.assertContains(response, delete_link_text) def test_disabled_permissions_when_logged_in(self): self.client.force_login(self.superuser) superuser = User.objects.get(username='super') superuser.is_active = False superuser.save() response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'id="login-form"') self.assertNotContains(response, 'Log out') response = self.client.get(reverse('secure_view'), follow=True) self.assertContains(response, 'id="login-form"') def test_disabled_staff_permissions_when_logged_in(self): self.client.force_login(self.superuser) superuser = User.objects.get(username='super') superuser.is_staff = False superuser.save() response = self.client.get(self.index_url, follow=True) self.assertContains(response, 'id="login-form"') self.assertNotContains(response, 'Log out') response = self.client.get(reverse('secure_view'), follow=True) self.assertContains(response, 'id="login-form"') def test_app_list_permissions(self): """ If a user has no module perms, the app list returns a 404. """ opts = Article._meta change_user = User.objects.get(username='changeuser') permission = get_perm(Article, get_permission_codename('change', opts)) self.client.force_login(self.changeuser) # the user has no module permissions change_user.user_permissions.remove(permission) response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(response.status_code, 404) # the user now has module permissions change_user.user_permissions.add(permission) response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(response.status_code, 200) def test_shortcut_view_only_available_to_staff(self): """ Only admin users should be able to use the admin shortcut view. """ model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey) obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo') shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk)) # Not logged in: we should see the login page. response = self.client.get(shortcut_url, follow=True) self.assertTemplateUsed(response, 'admin/login.html') # Logged in? Redirect. self.client.force_login(self.superuser) response = self.client.get(shortcut_url, follow=False) # Can't use self.assertRedirects() because User.get_absolute_url() is silly. self.assertEqual(response.status_code, 302) # Domain may depend on contrib.sites tests also run self.assertRegex(response.url, 'http://(testserver|example.com)/dummy/foo/') def test_has_module_permission(self): """ has_module_permission() returns True for all users who have any permission for that module (add, change, or delete), so that the module is displayed on the admin index page. """ self.client.force_login(self.superuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.viewuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.adduser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.changeuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') self.client.logout() self.client.force_login(self.deleteuser) response = self.client.get(self.index_url) self.assertContains(response, 'admin_views') self.assertContains(response, 'Articles') def test_overriding_has_module_permission(self): """ If has_module_permission() always returns False, the module shouldn't be displayed on the admin index page for any users. """ articles = Article._meta.verbose_name_plural.title() sections = Section._meta.verbose_name_plural.title() index_url = reverse('admin7:index') self.client.force_login(self.superuser) response = self.client.get(index_url) self.assertContains(response, sections) self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.viewuser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.adduser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.changeuser) response = self.client.get(index_url) self.assertNotContains(response, 'admin_views') self.assertNotContains(response, articles) self.client.logout() self.client.force_login(self.deleteuser) response = self.client.get(index_url) self.assertNotContains(response, articles) # The app list displays Sections but not Articles as the latter has # ModelAdmin.has_module_permission() = False. self.client.force_login(self.superuser) response = self.client.get(reverse('admin7:app_list', args=('admin_views',))) self.assertContains(response, sections) self.assertNotContains(response, articles) def test_post_save_message_no_forbidden_links_visible(self): """ Post-save message shouldn't contain a link to the change form if the user doesn't have the change permission. """ self.client.force_login(self.adduser) # Emulate Article creation for user with add-only permission. post_data = { "title": "Fun & games", "content": "Some content", "date_0": "2015-10-31", "date_1": "16:35:00", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_article_add'), post_data, follow=True) self.assertContains( response, '<li class="success">The article “Fun &amp; games” was added successfully.</li>', html=True ) @override_settings( ROOT_URLCONF='admin_views.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], ) class AdminViewProxyModelPermissionsTests(TestCase): """Tests for proxy models permissions in the admin.""" @classmethod def setUpTestData(cls): cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True) cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) # Setup permissions. opts = UserProxy._meta cls.viewuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('view', opts))) cls.adduser.user_permissions.add(get_perm(UserProxy, get_permission_codename('add', opts))) cls.changeuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('change', opts))) cls.deleteuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('delete', opts))) # UserProxy instances. cls.user_proxy = UserProxy.objects.create(username='user_proxy', password='secret') def test_add(self): self.client.force_login(self.adduser) url = reverse('admin:admin_views_userproxy_add') data = { 'username': 'can_add', 'password': 'secret', 'date_joined_0': '2019-01-15', 'date_joined_1': '16:59:10', } response = self.client.post(url, data, follow=True) self.assertEqual(response.status_code, 200) self.assertTrue(UserProxy.objects.filter(username='can_add').exists()) def test_view(self): self.client.force_login(self.viewuser) response = self.client.get(reverse('admin:admin_views_userproxy_changelist')) self.assertContains(response, '<h1>Select user proxy to view</h1>') response = self.client.get(reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,))) self.assertContains(response, '<h1>View user proxy</h1>') self.assertContains(response, '<div class="readonly">user_proxy</div>') def test_change(self): self.client.force_login(self.changeuser) data = { 'password': self.user_proxy.password, 'username': self.user_proxy.username, 'date_joined_0': self.user_proxy.date_joined.strftime('%Y-%m-%d'), 'date_joined_1': self.user_proxy.date_joined.strftime('%H:%M:%S'), 'first_name': 'first_name', } url = reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,)) response = self.client.post(url, data) self.assertRedirects(response, reverse('admin:admin_views_userproxy_changelist')) self.assertEqual(UserProxy.objects.get(pk=self.user_proxy.pk).first_name, 'first_name') def test_delete(self): self.client.force_login(self.deleteuser) url = reverse('admin:admin_views_userproxy_delete', args=(self.user_proxy.pk,)) response = self.client.post(url, {'post': 'yes'}, follow=True) self.assertEqual(response.status_code, 200) self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists()) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewsNoUrlTest(TestCase): """Regression test for #17333""" @classmethod def setUpTestData(cls): # User who can change Reports cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.changeuser.user_permissions.add(get_perm(Report, get_permission_codename('change', Report._meta))) def test_no_standard_modeladmin_urls(self): """Admin index views don't break when user's ModelAdmin removes standard urls""" self.client.force_login(self.changeuser) r = self.client.get(reverse('admin:index')) # we shouldn't get a 500 error caused by a NoReverseMatch self.assertEqual(r.status_code, 200) self.client.get(reverse('admin:logout')) @skipUnlessDBFeature('can_defer_constraint_checks') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewDeletedObjectsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True) cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.v1 = Villain.objects.create(name='Adam') cls.v2 = Villain.objects.create(name='Sue') cls.sv1 = SuperVillain.objects.create(name='Bob') cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2) cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2) cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1) cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1) cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1) cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1) cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1) cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1) cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1) def setUp(self): self.client.force_login(self.superuser) def test_nesting(self): """ Objects should be nested to display the relationships that cause them to be scheduled for deletion. """ pattern = re.compile( r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*' r'<li>Plot details: <a href="%s">almost finished</a>' % ( reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)), reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)), ) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertRegex(response.content.decode(), pattern) def test_cyclic(self): """ Cyclic relationships should still cause each object to only be listed once. """ one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % ( reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)), ) two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % ( reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)), ) response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,))) self.assertContains(response, one, 1) self.assertContains(response, two, 1) def test_perms_needed(self): self.client.logout() delete_user = User.objects.get(username='deleteuser') delete_user.user_permissions.add(get_perm(Plot, get_permission_codename('delete', Plot._meta))) self.client.force_login(self.deleteuser) response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,))) self.assertContains(response, "your account doesn't have permission to delete the following types of objects") self.assertContains(response, "<li>plot details</li>") def test_protected(self): q = Question.objects.create(question="Why?") a1 = Answer.objects.create(question=q, answer="Because.") a2 = Answer.objects.create(question=q, answer="Yes.") response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,))) self.assertContains(response, "would require deleting the following protected related objects") self.assertContains( response, '<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)) ) self.assertContains( response, '<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)) ) def test_post_delete_protected(self): """ A POST request to delete protected objects should display the page which says the deletion is prohibited. """ q = Question.objects.create(question='Why?') Answer.objects.create(question=q, answer='Because.') response = self.client.post(reverse('admin:admin_views_question_delete', args=(q.pk,)), {'post': 'yes'}) self.assertEqual(Question.objects.count(), 1) self.assertContains(response, "would require deleting the following protected related objects") def test_restricted(self): album = Album.objects.create(title='Amaryllis') song = Song.objects.create(album=album, name='Unity') response = self.client.get(reverse('admin:admin_views_album_delete', args=(album.pk,))) self.assertContains( response, 'would require deleting the following protected related objects', ) self.assertContains( response, '<li>Song: <a href="%s">Unity</a></li>' % reverse('admin:admin_views_song_change', args=(song.pk,)) ) def test_post_delete_restricted(self): album = Album.objects.create(title='Amaryllis') Song.objects.create(album=album, name='Unity') response = self.client.post( reverse('admin:admin_views_album_delete', args=(album.pk,)), {'post': 'yes'}, ) self.assertEqual(Album.objects.count(), 1) self.assertContains( response, 'would require deleting the following protected related objects', ) def test_not_registered(self): should_contain = """<li>Secret hideout: underground bunker""" response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertContains(response, should_contain, 1) def test_multiple_fkeys_to_same_model(self): """ If a deleted object has two relationships from another model, both of those should be followed in looking for related objects to delete. """ should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse( 'admin:admin_views_plot_change', args=(self.pl1.pk,) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,))) self.assertContains(response, should_contain) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,))) self.assertContains(response, should_contain) def test_multiple_fkeys_to_same_instance(self): """ If a deleted object has two relationships pointing to it from another object, the other object should still only be listed once. """ should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse( 'admin:admin_views_plot_change', args=(self.pl2.pk,) ) response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,))) self.assertContains(response, should_contain, 1) def test_inheritance(self): """ In the case of an inherited model, if either the child or parent-model instance is deleted, both instances are listed for deletion, as well as any relationships they have. """ should_contain = [ '<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)), '<li>Super villain: <a href="%s">Bob</a>' % reverse( 'admin:admin_views_supervillain_change', args=(self.sv1.pk,) ), '<li>Secret hideout: floating castle', '<li>Super secret hideout: super floating castle!', ] response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,))) for should in should_contain: self.assertContains(response, should, 1) response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,))) for should in should_contain: self.assertContains(response, should, 1) def test_generic_relations(self): """ If a deleted object has GenericForeignKeys pointing to it, those objects should be listed for deletion. """ plot = self.pl3 tag = FunkyTag.objects.create(content_object=plot, name='hott') should_contain = '<li>Funky tag: <a href="%s">hott' % reverse( 'admin:admin_views_funkytag_change', args=(tag.id,)) response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,))) self.assertContains(response, should_contain) def test_generic_relations_with_related_query_name(self): """ If a deleted object has GenericForeignKey with GenericRelation(related_query_name='...') pointing to it, those objects should be listed for deletion. """ bookmark = Bookmark.objects.create(name='djangoproject') tag = FunkyTag.objects.create(content_object=bookmark, name='django') tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,)) should_contain = '<li>Funky tag: <a href="%s">django' % tag_url response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,))) self.assertContains(response, should_contain) def test_delete_view_uses_get_deleted_objects(self): """The delete view uses ModelAdmin.get_deleted_objects().""" book = Book.objects.create(name='Test Book') response = self.client.get(reverse('admin2:admin_views_book_delete', args=(book.pk,))) # BookAdmin.get_deleted_objects() returns custom text. self.assertContains(response, 'a deletable object') @override_settings(ROOT_URLCONF='admin_views.urls') class TestGenericRelations(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.v1 = Villain.objects.create(name='Adam') cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1) def setUp(self): self.client.force_login(self.superuser) def test_generic_content_object_in_list_display(self): FunkyTag.objects.create(content_object=self.pl3, name='hott') response = self.client.get(reverse('admin:admin_views_funkytag_changelist')) self.assertContains(response, "%s</td>" % self.pl3) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewStringPrimaryKeyTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.pk = ( "abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 " r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`""" ) cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk) content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk user_pk = cls.superuser.pk LogEntry.objects.log_action(user_pk, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something') def setUp(self): self.client.force_login(self.superuser) def test_get_history_view(self): """ Retrieving the history for an object using urlencoded form of primary key should work. Refs #12349, #18550. """ response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,))) self.assertContains(response, escape(self.pk)) self.assertContains(response, 'Changed something') def test_get_change_view(self): "Retrieving the object using urlencoded form of primary key should work" response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,))) self.assertContains(response, escape(self.pk)) def test_changelist_to_changeform_link(self): "Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072" response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist')) # this URL now comes through reverse(), thus url quoting and iri_to_uri encoding pk_final_url = escape(iri_to_uri(quote(self.pk))) change_url = reverse( 'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',) ).replace('__fk__', pk_final_url) should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk)) self.assertContains(response, should_contain) def test_recentactions_link(self): "The link from the recent actions list referring to the changeform of the object should be quoted" response = self.client.get(reverse('admin:index')) link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),)) should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk)) self.assertContains(response, should_contain) def test_deleteconfirmation_link(self): "The link from the delete confirmation page referring back to the changeform of the object should be quoted" url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),)) response = self.client.get(url) # this URL now comes through reverse(), thus url quoting and iri_to_uri encoding change_url = reverse( 'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',) ).replace('__fk__', escape(iri_to_uri(quote(self.pk)))) should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk)) self.assertContains(response, should_contain) def test_url_conflicts_with_add(self): "A model with a primary key that ends with add or is `add` should be visible" add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add") add_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add") add_url = reverse('admin:admin_views_modelwithstringprimarykey_add') change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),)) self.assertNotEqual(add_url, change_url) def test_url_conflicts_with_delete(self): "A model with a primary key that ends with delete should be visible" delete_model = ModelWithStringPrimaryKey(pk="delete") delete_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) def test_url_conflicts_with_history(self): "A model with a primary key that ends with history should be visible" history_model = ModelWithStringPrimaryKey(pk="history") history_model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),)) ) should_contain = """<h1>Change model with string primary key</h1>""" self.assertContains(response, should_contain) def test_shortcut_view_with_escaping(self): "'View on site should' work properly with char fields" model = ModelWithStringPrimaryKey(pk='abc_123') model.save() response = self.client.get( reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),)) ) should_contain = '/%s/" class="viewsitelink">' % model.pk self.assertContains(response, should_contain) def test_change_view_history_link(self): """Object history button link should work and contain the pk value quoted.""" url = reverse( 'admin:%s_modelwithstringprimarykey_change' % ModelWithStringPrimaryKey._meta.app_label, args=(quote(self.pk),) ) response = self.client.get(url) self.assertEqual(response.status_code, 200) expected_link = reverse( 'admin:%s_modelwithstringprimarykey_history' % ModelWithStringPrimaryKey._meta.app_label, args=(quote(self.pk),) ) self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link)) def test_redirect_on_add_view_continue_button(self): """As soon as an object is added using "Save and continue editing" button, the user should be redirected to the object's change_view. In case primary key is a string containing some special characters like slash or underscore, these characters must be escaped (see #22266) """ response = self.client.post( reverse('admin:admin_views_modelwithstringprimarykey_add'), { 'string_pk': '123/history', "_continue": "1", # Save and continue editing } ) self.assertEqual(response.status_code, 302) # temporary redirect self.assertIn('/123_2Fhistory/', response.headers['location']) # PK is quoted @override_settings(ROOT_URLCONF='admin_views.urls') class SecureViewTests(TestCase): """ Test behavior of a view protected by the staff_member_required decorator. """ def test_secure_view_shows_login_if_not_logged_in(self): secure_url = reverse('secure_view') response = self.client.get(secure_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url)) response = self.client.get(secure_url, follow=True) self.assertTemplateUsed(response, 'admin/login.html') self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url) def test_staff_member_required_decorator_works_with_argument(self): """ Staff_member_required decorator works with an argument (redirect_field_name). """ secure_url = '/test_admin/admin/secure-view2/' response = self.client.get(secure_url) self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url)) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewUnicodeTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.b1 = Book.objects.create(name='Lærdommer') cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1) cls.chap1 = Chapter.objects.create( title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>', book=cls.b1 ) cls.chap2 = Chapter.objects.create( title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1) cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1) cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>') cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>') cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>') cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>') cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>') cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>') def setUp(self): self.client.force_login(self.superuser) def test_unicode_edit(self): """ A test to ensure that POST on edit_view handles non-ASCII characters. """ post_data = { "name": "Test lærdommer", # inline data "chapter_set-TOTAL_FORMS": "6", "chapter_set-INITIAL_FORMS": "3", "chapter_set-MAX_NUM_FORMS": "0", "chapter_set-0-id": self.chap1.pk, "chapter_set-0-title": "Norske bostaver æøå skaper problemer", "chapter_set-0-content": "&lt;p&gt;Svært frustrerende med UnicodeDecodeError&lt;/p&gt;", "chapter_set-1-id": self.chap2.id, "chapter_set-1-title": "Kjærlighet.", "chapter_set-1-content": "&lt;p&gt;La kjærligheten til de lidende seire.&lt;/p&gt;", "chapter_set-2-id": self.chap3.id, "chapter_set-2-title": "Need a title.", "chapter_set-2-content": "&lt;p&gt;Newest content&lt;/p&gt;", "chapter_set-3-id": "", "chapter_set-3-title": "", "chapter_set-3-content": "", "chapter_set-4-id": "", "chapter_set-4-title": "", "chapter_set-4-content": "", "chapter_set-5-id": "", "chapter_set-5-title": "", "chapter_set-5-content": "", } response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere def test_unicode_delete(self): """ The delete_view handles non-ASCII characters """ delete_dict = {'post': 'yes'} delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,)) response = self.client.get(delete_url) self.assertEqual(response.status_code, 200) response = self.client.post(delete_url, delete_dict) self.assertRedirects(response, reverse('admin:admin_views_book_changelist')) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewListEditable(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_inheritance(self): Podcast.objects.create(name="This Week in Django", release_date=datetime.date.today()) response = self.client.get(reverse('admin:admin_views_podcast_changelist')) self.assertEqual(response.status_code, 200) def test_inheritance_2(self): Vodcast.objects.create(name="This Week in Django", released=True) response = self.client.get(reverse('admin:admin_views_vodcast_changelist')) self.assertEqual(response.status_code, 200) def test_custom_pk(self): Language.objects.create(iso='en', name='English', english_name='English') response = self.client.get(reverse('admin:admin_views_language_changelist')) self.assertEqual(response.status_code, 200) def test_changelist_input_html(self): response = self.client.get(reverse('admin:admin_views_person_changelist')) # 2 inputs per object(the field and the hidden id field) = 6 # 4 management hidden fields = 4 # 4 action inputs (3 regular checkboxes, 1 checkbox to select all) # main form submit button = 1 # search field and search submit button = 2 # CSRF field = 1 # field to track 'select all' across paginated views = 1 # 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs self.assertContains(response, "<input", count=20) # 1 select per object = 3 selects self.assertContains(response, "<select", count=4) def test_post_messages(self): # Ticket 12707: Saving inline editable should not show admin # action warnings data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data, follow=True) self.assertEqual(len(response.context['messages']), 1) def test_post_submission(self): data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) # test a filtered page data = { "form-TOTAL_FORMS": "2", "form-INITIAL_FORMS": "2", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "form-0-alive": "checked", "form-1-id": str(self.per3.pk), "form-1-gender": "1", "form-1-alive": "checked", "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) # test a searched page data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per1.pk), "form-0-gender": "1", "_save": "Save", } self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) def test_non_field_errors(self): """ Non-field errors are displayed for each of the forms in the changelist's formset. """ fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai') fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india') fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza') data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "pizza", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' 'with this Driver and Restaurant already exists.</li></ul></td></tr>', 1, html=True ) data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-id": str(fd1.id), "form-0-reference": "123", "form-0-driver": "bill", "form-0-restaurant": "thai", # Same data as above: Forbidden because of unique_together! "form-1-id": str(fd2.id), "form-1-reference": "456", "form-1-driver": "bill", "form-1-restaurant": "thai", # Same data also. "form-2-id": str(fd3.id), "form-2-reference": "789", "form-2-driver": "bill", "form-2-restaurant": "thai", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data) self.assertContains( response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery ' 'with this Driver and Restaurant already exists.</li></ul></td></tr>', 2, html=True ) def test_non_form_errors(self): # test if non-form errors are handled; ticket #12716 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertContains(response, "Grace is not a Zombie") def test_non_form_errors_is_errorlist(self): # test if non-form errors are correctly handled; ticket #12878 data = { "form-TOTAL_FORMS": "1", "form-INITIAL_FORMS": "1", "form-MAX_NUM_FORMS": "0", "form-0-id": str(self.per2.pk), "form-0-alive": "1", "form-0-gender": "2", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_person_changelist'), data) non_form_errors = response.context['cl'].formset.non_form_errors() self.assertIsInstance(non_form_errors, ErrorList) self.assertEqual( str(non_form_errors), str(ErrorList(['Grace is not a Zombie'], error_class='nonform')), ) def test_list_editable_ordering(self): collector = Collector.objects.create(id=1, name="Frederick Clegg") Category.objects.create(id=1, order=1, collector=collector) Category.objects.create(id=2, order=2, collector=collector) Category.objects.create(id=3, order=0, collector=collector) Category.objects.create(id=4, order=0, collector=collector) # NB: The order values must be changed so that the items are reordered. data = { "form-TOTAL_FORMS": "4", "form-INITIAL_FORMS": "4", "form-MAX_NUM_FORMS": "0", "form-0-order": "14", "form-0-id": "1", "form-0-collector": "1", "form-1-order": "13", "form-1-id": "2", "form-1-collector": "1", "form-2-order": "1", "form-2-id": "3", "form-2-collector": "1", "form-3-order": "0", "form-3-id": "4", "form-3-collector": "1", # The form processing understands this as a list_editable "Save" # and not an action "Go". "_save": "Save", } response = self.client.post(reverse('admin:admin_views_category_changelist'), data) # Successful post will redirect self.assertEqual(response.status_code, 302) # The order values have been applied to the right objects self.assertEqual(Category.objects.get(id=1).order, 14) self.assertEqual(Category.objects.get(id=2).order, 13) self.assertEqual(Category.objects.get(id=3).order, 1) self.assertEqual(Category.objects.get(id=4).order, 0) def test_list_editable_pagination(self): """ Pagination works for list_editable items. """ UnorderedObject.objects.create(id=1, name='Unordered object #1') UnorderedObject.objects.create(id=2, name='Unordered object #2') UnorderedObject.objects.create(id=3, name='Unordered object #3') response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist')) self.assertContains(response, 'Unordered object #3') self.assertContains(response, 'Unordered object #2') self.assertNotContains(response, 'Unordered object #1') response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=2') self.assertNotContains(response, 'Unordered object #3') self.assertNotContains(response, 'Unordered object #2') self.assertContains(response, 'Unordered object #1') def test_list_editable_action_submit(self): # List editable changes should not be executed if the action "Go" button is # used to submit the form. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": "1", "form-1-gender": "2", "form-1-id": "2", "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": "3", "index": "0", "_selected_action": ['3'], "action": ['', 'delete_selected'], } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, True) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1) def test_list_editable_action_choices(self): # List editable changes should be executed if the "Save" button is # used to submit the form - any action choices should be ignored. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MAX_NUM_FORMS": "0", "form-0-gender": "1", "form-0-id": str(self.per1.pk), "form-1-gender": "2", "form-1-id": str(self.per2.pk), "form-2-alive": "checked", "form-2-gender": "1", "form-2-id": str(self.per3.pk), "_save": "Save", "_selected_action": ['1'], "action": ['', 'delete_selected'], } self.client.post(reverse('admin:admin_views_person_changelist'), data) self.assertIs(Person.objects.get(name="John Mauchly").alive, False) self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2) def test_list_editable_popup(self): """ Fields should not be list-editable in popups. """ response = self.client.get(reverse('admin:admin_views_person_changelist')) self.assertNotEqual(response.context['cl'].list_editable, ()) response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR) self.assertEqual(response.context['cl'].list_editable, ()) def test_pk_hidden_fields(self): """ hidden pk fields aren't displayed in the table body and their corresponding human-readable value is displayed instead. The hidden pk fields are displayed but separately (not in the table) and only once. """ story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...') story2 = Story.objects.create( title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...', ) response = self.client.get(reverse('admin:admin_views_story_changelist')) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>' % (story2.id, story1.id), html=True ) self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1) self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1) def test_pk_hidden_fields_with_list_display_links(self): """ Similarly as test_pk_hidden_fields, but when the hidden pk fields are referenced in list_display_links. Refs #12475. """ story1 = OtherStory.objects.create( title='The adventures of Guido', content='Once upon a time in Djangoland...', ) story2 = OtherStory.objects.create( title='Crouching Tiger, Hidden Python', content='The Python was sneaking into...', ) link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,)) link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,)) response = self.client.get(reverse('admin:admin_views_otherstory_changelist')) # Only one hidden field, in a separate place than the table. self.assertContains(response, 'id="id_form-0-id"', 1) self.assertContains(response, 'id="id_form-1-id"', 1) self.assertContains( response, '<div class="hiddenfields">\n' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>' % (story2.id, story1.id), html=True ) self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1) self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminSearchTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) Person.objects.create(name='John Doe', gender=1) Person.objects.create(name='John O"Hara', gender=1) Person.objects.create(name="John O'Hara", gender=1) cls.t1 = Recommender.objects.create() cls.t2 = Recommendation.objects.create(the_recommender=cls.t1) cls.t3 = Recommender.objects.create() cls.t4 = Recommendation.objects.create(the_recommender=cls.t3) cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar') cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo') cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few') cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas') def setUp(self): self.client.force_login(self.superuser) def test_search_on_sibling_models(self): "A search that mentions sibling models" response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') # confirm the search returned 1 object self.assertContains(response, "\n1 recommendation\n") def test_with_fk_to_field(self): """ The to_field GET parameter is preserved when a search is performed. Refs #10918. """ response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR) self.assertContains(response, "\n1 user\n") self.assertContains(response, '<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR, html=True) def test_exact_matches(self): response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') # confirm the search returned one object self.assertContains(response, "\n1 recommendation\n") response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba') # confirm the search returned zero objects self.assertContains(response, "\n0 recommendations\n") def test_beginning_matches(self): response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui') # confirm the search returned one object self.assertContains(response, "\n1 person\n") self.assertContains(response, "Guido") response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido') # confirm the search returned zero objects self.assertContains(response, "\n0 persons\n") self.assertNotContains(response, "Guido") def test_pluggable_search(self): PluggableSearchPerson.objects.create(name="Bob", age=10) PluggableSearchPerson.objects.create(name="Amy", age=20) response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob') # confirm the search returned one object self.assertContains(response, "\n1 pluggable search person\n") self.assertContains(response, "Bob") response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20') # confirm the search returned one object self.assertContains(response, "\n1 pluggable search person\n") self.assertContains(response, "Amy") def test_reset_link(self): """ Test presence of reset link in search bar ("1 result (_x total_)"). """ # 1 query for session + 1 for fetching user # + 1 for filtered result + 1 for filtered count # + 1 for total count with self.assertNumQueries(5): response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui') self.assertContains( response, """<span class="small quiet">1 result (<a href="?">6 total</a>)</span>""", html=True ) def test_no_total_count(self): """ #8408 -- "Show all" should be displayed instead of the total count if ModelAdmin.show_full_result_count is False. """ # 1 query for session + 1 for fetching user # + 1 for filtered result + 1 for filtered count with self.assertNumQueries(4): response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar') self.assertContains( response, """<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""", html=True ) self.assertTrue(response.context['cl'].show_admin_actions) def test_search_with_spaces(self): url = reverse('admin:admin_views_person_changelist') + '?q=%s' tests = [ ('"John Doe"', 1), ("'John Doe'", 1), ('John Doe', 0), ('"John Doe" John', 1), ("'John Doe' John", 1), ("John Doe John", 0), ('"John Do"', 1), ("'John Do'", 1), ("'John O\'Hara'", 0), ("'John O\\'Hara'", 1), ('"John O\"Hara"', 0), ('"John O\\"Hara"', 1), ] for search, hits in tests: with self.subTest(search=search): response = self.client.get(url % search) self.assertContains(response, '\n%s person' % hits) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInheritedInlinesTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_inline(self): """ Inline models which inherit from a common parent are correctly handled. """ foo_user = "foo username" bar_user = "bar username" name_re = re.compile(b'name="(.*?)"') # test the add case response = self.client.get(reverse('admin:admin_views_persona_add')) names = name_re.findall(response.content) # make sure we have no duplicate HTML names self.assertEqual(len(names), len(set(names))) # test the add case post_data = { "name": "Test Name", # inline data "accounts-TOTAL_FORMS": "1", "accounts-INITIAL_FORMS": "0", "accounts-MAX_NUM_FORMS": "0", "accounts-0-username": foo_user, "accounts-2-TOTAL_FORMS": "1", "accounts-2-INITIAL_FORMS": "0", "accounts-2-MAX_NUM_FORMS": "0", "accounts-2-0-username": bar_user, } response = self.client.post(reverse('admin:admin_views_persona_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere self.assertEqual(Persona.objects.count(), 1) self.assertEqual(FooAccount.objects.count(), 1) self.assertEqual(BarAccount.objects.count(), 1) self.assertEqual(FooAccount.objects.all()[0].username, foo_user) self.assertEqual(BarAccount.objects.all()[0].username, bar_user) self.assertEqual(Persona.objects.all()[0].accounts.count(), 2) persona_id = Persona.objects.all()[0].id foo_id = FooAccount.objects.all()[0].id bar_id = BarAccount.objects.all()[0].id # test the edit case response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,))) names = name_re.findall(response.content) # make sure we have no duplicate HTML names self.assertEqual(len(names), len(set(names))) post_data = { "name": "Test Name", "accounts-TOTAL_FORMS": "2", "accounts-INITIAL_FORMS": "1", "accounts-MAX_NUM_FORMS": "0", "accounts-0-username": "%s-1" % foo_user, "accounts-0-account_ptr": str(foo_id), "accounts-0-persona": str(persona_id), "accounts-2-TOTAL_FORMS": "2", "accounts-2-INITIAL_FORMS": "1", "accounts-2-MAX_NUM_FORMS": "0", "accounts-2-0-username": "%s-1" % bar_user, "accounts-2-0-account_ptr": str(bar_id), "accounts-2-0-persona": str(persona_id), } response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Persona.objects.count(), 1) self.assertEqual(FooAccount.objects.count(), 1) self.assertEqual(BarAccount.objects.count(), 1) self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user) self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user) self.assertEqual(Persona.objects.all()[0].accounts.count(), 2) @override_settings(ROOT_URLCONF='admin_views.urls') class TestCustomChangeList(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_custom_changelist(self): """ Validate that a custom ChangeList class can be used (#9749) """ # Insert some data post_data = {"name": "First Gadget"} response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data) self.assertEqual(response.status_code, 302) # redirect somewhere # Hit the page once to get messages out of the queue message list response = self.client.get(reverse('admin:admin_views_gadget_changelist')) # Data is still not visible on the page response = self.client.get(reverse('admin:admin_views_gadget_changelist')) self.assertNotContains(response, 'First Gadget') @override_settings(ROOT_URLCONF='admin_views.urls') class TestInlineNotEditable(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_GET_parent_add(self): """ InlineModelAdmin broken? """ response = self.client.get(reverse('admin:admin_views_parent_add')) self.assertEqual(response.status_code, 200) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminCustomQuerysetTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.pks = [EmptyModel.objects.create().id for i in range(3)] def setUp(self): self.client.force_login(self.superuser) self.super_login = { REDIRECT_FIELD_NAME: reverse('admin:index'), 'username': 'super', 'password': 'secret', } def test_changelist_view(self): response = self.client.get(reverse('admin:admin_views_emptymodel_changelist')) for i in self.pks: if i > 1: self.assertContains(response, 'Primary key = %s' % i) else: self.assertNotContains(response, 'Primary key = %s' % i) def test_changelist_view_count_queries(self): # create 2 Person objects Person.objects.create(name='person1', gender=1) Person.objects.create(name='person2', gender=2) changelist_url = reverse('admin:admin_views_person_changelist') # 5 queries are expected: 1 for the session, 1 for the user, # 2 for the counts and 1 for the objects on the page with self.assertNumQueries(5): resp = self.client.get(changelist_url) self.assertEqual(resp.context['selection_note'], '0 of 2 selected') self.assertEqual(resp.context['selection_note_all'], 'All 2 selected') with self.assertNumQueries(5): extra = {'q': 'not_in_name'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 0 selected') self.assertEqual(resp.context['selection_note_all'], 'All 0 selected') with self.assertNumQueries(5): extra = {'q': 'person'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 2 selected') self.assertEqual(resp.context['selection_note_all'], 'All 2 selected') with self.assertNumQueries(5): extra = {'gender__exact': '1'} resp = self.client.get(changelist_url, extra) self.assertEqual(resp.context['selection_note'], '0 of 1 selected') self.assertEqual(resp.context['selection_note_all'], '1 selected') def test_change_view(self): for i in self.pks: url = reverse('admin:admin_views_emptymodel_change', args=(i,)) response = self.client.get(url, follow=True) if i > 1: self.assertEqual(response.status_code, 200) else: self.assertRedirects(response, reverse('admin:index')) self.assertEqual( [m.message for m in response.context['messages']], ['empty model with ID “1” doesn’t exist. Perhaps it was deleted?'] ) def test_add_model_modeladmin_defer_qs(self): # Test for #14529. defer() is used in ModelAdmin.get_queryset() # model has __str__ method self.assertEqual(CoverLetter.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "author": "Candidate, Best", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_coverletter_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(CoverLetter.objects.count(), 1) # Message should contain non-ugly model verbose name pk = CoverLetter.objects.all()[0].pk self.assertContains( response, '<li class="success">The cover letter “<a href="%s">' 'Candidate, Best</a>” was added successfully.</li>' % reverse('admin:admin_views_coverletter_change', args=(pk,)), html=True ) # model has no __str__ method self.assertEqual(ShortMessage.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "content": "What's this SMS thing?", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_shortmessage_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(ShortMessage.objects.count(), 1) # Message should contain non-ugly model verbose name sm = ShortMessage.objects.all()[0] self.assertContains( response, '<li class="success">The short message “<a href="%s">' '%s</a>” was added successfully.</li>' % (reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True ) def test_add_model_modeladmin_only_qs(self): # Test for #14529. only() is used in ModelAdmin.get_queryset() # model has __str__ method self.assertEqual(Telegram.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "title": "Urgent telegram", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_telegram_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Telegram.objects.count(), 1) # Message should contain non-ugly model verbose name pk = Telegram.objects.all()[0].pk self.assertContains( response, '<li class="success">The telegram “<a href="%s">' 'Urgent telegram</a>” was added successfully.</li>' % reverse('admin:admin_views_telegram_change', args=(pk,)), html=True ) # model has no __str__ method self.assertEqual(Paper.objects.count(), 0) # Emulate model instance creation via the admin post_data = { "title": "My Modified Paper Title", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_paper_add'), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Paper.objects.count(), 1) # Message should contain non-ugly model verbose name p = Paper.objects.all()[0] self.assertContains( response, '<li class="success">The paper “<a href="%s">' '%s</a>” was added successfully.</li>' % (reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True ) def test_edit_model_modeladmin_defer_qs(self): # Test for #14529. defer() is used in ModelAdmin.get_queryset() # model has __str__ method cl = CoverLetter.objects.create(author="John Doe") self.assertEqual(CoverLetter.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "author": "John Doe II", "_save": "Save", } url = reverse('admin:admin_views_coverletter_change', args=(cl.pk,)) response = self.client.post(url, post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(CoverLetter.objects.count(), 1) # Message should contain non-ugly model verbose name. Instance # representation is set by model's __str__() self.assertContains( response, '<li class="success">The cover letter “<a href="%s">' 'John Doe II</a>” was changed successfully.</li>' % reverse('admin:admin_views_coverletter_change', args=(cl.pk,)), html=True ) # model has no __str__ method sm = ShortMessage.objects.create(content="This is expensive") self.assertEqual(ShortMessage.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "content": "Too expensive", "_save": "Save", } url = reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)) response = self.client.post(url, post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(ShortMessage.objects.count(), 1) # Message should contain non-ugly model verbose name. The ugly(!) # instance representation is set by __str__(). self.assertContains( response, '<li class="success">The short message “<a href="%s">' '%s</a>” was changed successfully.</li>' % (reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True ) def test_edit_model_modeladmin_only_qs(self): # Test for #14529. only() is used in ModelAdmin.get_queryset() # model has __str__ method t = Telegram.objects.create(title="First Telegram") self.assertEqual(Telegram.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "title": "Telegram without typo", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Telegram.objects.count(), 1) # Message should contain non-ugly model verbose name. The instance # representation is set by model's __str__() self.assertContains( response, '<li class="success">The telegram “<a href="%s">' 'Telegram without typo</a>” was changed successfully.</li>' % reverse('admin:admin_views_telegram_change', args=(t.pk,)), html=True ) # model has no __str__ method p = Paper.objects.create(title="My Paper Title") self.assertEqual(Paper.objects.count(), 1) response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,))) self.assertEqual(response.status_code, 200) # Emulate model instance edit via the admin post_data = { "title": "My Modified Paper Title", "_save": "Save", } response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)), post_data, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(Paper.objects.count(), 1) # Message should contain non-ugly model verbose name. The ugly(!) # instance representation is set by __str__(). self.assertContains( response, '<li class="success">The paper “<a href="%s">' '%s</a>” was changed successfully.</li>' % (reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True ) def test_history_view_custom_qs(self): """ Custom querysets are considered for the admin history view. """ self.client.post(reverse('admin:login'), self.super_login) FilteredManager.objects.create(pk=1) FilteredManager.objects.create(pk=2) response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist')) self.assertContains(response, "PK=1") self.assertContains(response, "PK=2") self.assertEqual( self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200 ) self.assertEqual( self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200 ) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInlineFileUploadTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') file1 = tempfile.NamedTemporaryFile(suffix=".file1") file1.write(b'a' * (2 ** 21)) filename = file1.name file1.close() cls.gallery = Gallery.objects.create(name='Test Gallery') cls.picture = Picture.objects.create( name='Test Picture', image=filename, gallery=cls.gallery, ) def setUp(self): self.client.force_login(self.superuser) def test_form_has_multipart_enctype(self): response = self.client.get( reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)) ) self.assertIs(response.context['has_file_field'], True) self.assertContains(response, MULTIPART_ENCTYPE) def test_inline_file_upload_edit_validation_error_post(self): """ Inline file uploads correctly display prior data (#10002). """ post_data = { "name": "Test Gallery", "pictures-TOTAL_FORMS": "2", "pictures-INITIAL_FORMS": "1", "pictures-MAX_NUM_FORMS": "0", "pictures-0-id": str(self.picture.id), "pictures-0-gallery": str(self.gallery.id), "pictures-0-name": "Test Picture", "pictures-0-image": "", "pictures-1-id": "", "pictures-1-gallery": str(self.gallery.id), "pictures-1-name": "Test Picture 2", "pictures-1-image": "", } response = self.client.post( reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data ) self.assertContains(response, b"Currently") @override_settings(ROOT_URLCONF='admin_views.urls') class AdminInlineTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.collector = Collector.objects.create(pk=1, name='John Fowles') def setUp(self): self.post_data = { "name": "Test Name", "widget_set-TOTAL_FORMS": "3", "widget_set-INITIAL_FORMS": "0", "widget_set-MAX_NUM_FORMS": "0", "widget_set-0-id": "", "widget_set-0-owner": "1", "widget_set-0-name": "", "widget_set-1-id": "", "widget_set-1-owner": "1", "widget_set-1-name": "", "widget_set-2-id": "", "widget_set-2-owner": "1", "widget_set-2-name": "", "doohickey_set-TOTAL_FORMS": "3", "doohickey_set-INITIAL_FORMS": "0", "doohickey_set-MAX_NUM_FORMS": "0", "doohickey_set-0-owner": "1", "doohickey_set-0-code": "", "doohickey_set-0-name": "", "doohickey_set-1-owner": "1", "doohickey_set-1-code": "", "doohickey_set-1-name": "", "doohickey_set-2-owner": "1", "doohickey_set-2-code": "", "doohickey_set-2-name": "", "grommet_set-TOTAL_FORMS": "3", "grommet_set-INITIAL_FORMS": "0", "grommet_set-MAX_NUM_FORMS": "0", "grommet_set-0-code": "", "grommet_set-0-owner": "1", "grommet_set-0-name": "", "grommet_set-1-code": "", "grommet_set-1-owner": "1", "grommet_set-1-name": "", "grommet_set-2-code": "", "grommet_set-2-owner": "1", "grommet_set-2-name": "", "whatsit_set-TOTAL_FORMS": "3", "whatsit_set-INITIAL_FORMS": "0", "whatsit_set-MAX_NUM_FORMS": "0", "whatsit_set-0-owner": "1", "whatsit_set-0-index": "", "whatsit_set-0-name": "", "whatsit_set-1-owner": "1", "whatsit_set-1-index": "", "whatsit_set-1-name": "", "whatsit_set-2-owner": "1", "whatsit_set-2-index": "", "whatsit_set-2-name": "", "fancydoodad_set-TOTAL_FORMS": "3", "fancydoodad_set-INITIAL_FORMS": "0", "fancydoodad_set-MAX_NUM_FORMS": "0", "fancydoodad_set-0-doodad_ptr": "", "fancydoodad_set-0-owner": "1", "fancydoodad_set-0-name": "", "fancydoodad_set-0-expensive": "on", "fancydoodad_set-1-doodad_ptr": "", "fancydoodad_set-1-owner": "1", "fancydoodad_set-1-name": "", "fancydoodad_set-1-expensive": "on", "fancydoodad_set-2-doodad_ptr": "", "fancydoodad_set-2-owner": "1", "fancydoodad_set-2-name": "", "fancydoodad_set-2-expensive": "on", "category_set-TOTAL_FORMS": "3", "category_set-INITIAL_FORMS": "0", "category_set-MAX_NUM_FORMS": "0", "category_set-0-order": "", "category_set-0-id": "", "category_set-0-collector": "1", "category_set-1-order": "", "category_set-1-id": "", "category_set-1-collector": "1", "category_set-2-order": "", "category_set-2-id": "", "category_set-2-collector": "1", } self.client.force_login(self.superuser) def test_simple_inline(self): "A simple model can be saved as inlines" # First add a new inline self.post_data['widget_set-0-name'] = "Widget 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1") widget_id = Widget.objects.all()[0].id # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="widget_set-0-id"') # No file or image fields, no enctype on the forms self.assertIs(response.context['has_file_field'], False) self.assertNotContains(response, MULTIPART_ENCTYPE) # Now resave that inline self.post_data['widget_set-INITIAL_FORMS'] = "1" self.post_data['widget_set-0-id'] = str(widget_id) self.post_data['widget_set-0-name'] = "Widget 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1") # Now modify that inline self.post_data['widget_set-INITIAL_FORMS'] = "1" self.post_data['widget_set-0-id'] = str(widget_id) self.post_data['widget_set-0-name'] = "Widget 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Widget.objects.count(), 1) self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated") def test_explicit_autofield_inline(self): "A model with an explicit autofield primary key can be saved as inlines. Regression for #8093" # First add a new inline self.post_data['grommet_set-0-name'] = "Grommet 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="grommet_set-0-code"') # Now resave that inline self.post_data['grommet_set-INITIAL_FORMS'] = "1" self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code) self.post_data['grommet_set-0-name'] = "Grommet 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1") # Now modify that inline self.post_data['grommet_set-INITIAL_FORMS'] = "1" self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code) self.post_data['grommet_set-0-name'] = "Grommet 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Grommet.objects.count(), 1) self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated") def test_char_pk_inline(self): "A model with a character PK can be saved as inlines. Regression for #10992" # First add a new inline self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="doohickey_set-0-code"') # Now resave that inline self.post_data['doohickey_set-INITIAL_FORMS'] = "1" self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1") # Now modify that inline self.post_data['doohickey_set-INITIAL_FORMS'] = "1" self.post_data['doohickey_set-0-code'] = "DH1" self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(DooHickey.objects.count(), 1) self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated") def test_integer_pk_inline(self): "A model with an integer PK can be saved as inlines. Regression for #10992" # First add a new inline self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1") # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="whatsit_set-0-index"') # Now resave that inline self.post_data['whatsit_set-INITIAL_FORMS'] = "1" self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1") # Now modify that inline self.post_data['whatsit_set-INITIAL_FORMS'] = "1" self.post_data['whatsit_set-0-index'] = "42" self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(Whatsit.objects.count(), 1) self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated") def test_inherited_inline(self): "An inherited model can be saved as inlines. Regression for #11042" # First add a new inline self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1" collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1") doodad_pk = FancyDoodad.objects.all()[0].pk # The PK link exists on the rendered form response = self.client.get(collector_url) self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"') # Now resave that inline self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1" self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk) self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1") # Now modify that inline self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1" self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk) self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated" response = self.client.post(collector_url, self.post_data) self.assertEqual(response.status_code, 302) self.assertEqual(FancyDoodad.objects.count(), 1) self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated") def test_ordered_inline(self): """ An inline with an editable ordering fields is updated correctly. """ # Create some objects with an initial ordering Category.objects.create(id=1, order=1, collector=self.collector) Category.objects.create(id=2, order=2, collector=self.collector) Category.objects.create(id=3, order=0, collector=self.collector) Category.objects.create(id=4, order=0, collector=self.collector) # NB: The order values must be changed so that the items are reordered. self.post_data.update({ "name": "Frederick Clegg", "category_set-TOTAL_FORMS": "7", "category_set-INITIAL_FORMS": "4", "category_set-MAX_NUM_FORMS": "0", "category_set-0-order": "14", "category_set-0-id": "1", "category_set-0-collector": "1", "category_set-1-order": "13", "category_set-1-id": "2", "category_set-1-collector": "1", "category_set-2-order": "1", "category_set-2-id": "3", "category_set-2-collector": "1", "category_set-3-order": "0", "category_set-3-id": "4", "category_set-3-collector": "1", "category_set-4-order": "", "category_set-4-id": "", "category_set-4-collector": "1", "category_set-5-order": "", "category_set-5-id": "", "category_set-5-collector": "1", "category_set-6-order": "", "category_set-6-id": "", "category_set-6-collector": "1", }) collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,)) response = self.client.post(collector_url, self.post_data) # Successful post will redirect self.assertEqual(response.status_code, 302) # The order values have been applied to the right objects self.assertEqual(self.collector.category_set.count(), 4) self.assertEqual(Category.objects.get(id=1).order, 14) self.assertEqual(Category.objects.get(id=2).order, 13) self.assertEqual(Category.objects.get(id=3).order, 1) self.assertEqual(Category.objects.get(id=4).order, 0) @override_settings(ROOT_URLCONF='admin_views.urls') class NeverCacheTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') def setUp(self): self.client.force_login(self.superuser) def test_admin_index(self): "Check the never-cache status of the main index" response = self.client.get(reverse('admin:index')) self.assertEqual(get_max_age(response), 0) def test_app_index(self): "Check the never-cache status of an application index" response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertEqual(get_max_age(response), 0) def test_model_index(self): "Check the never-cache status of a model index" response = self.client.get(reverse('admin:admin_views_fabric_changelist')) self.assertEqual(get_max_age(response), 0) def test_model_add(self): "Check the never-cache status of a model add page" response = self.client.get(reverse('admin:admin_views_fabric_add')) self.assertEqual(get_max_age(response), 0) def test_model_view(self): "Check the never-cache status of a model edit page" response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_model_history(self): "Check the never-cache status of a model history page" response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_model_delete(self): "Check the never-cache status of a model delete page" response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertEqual(get_max_age(response), 0) def test_login(self): "Check the never-cache status of login views" self.client.logout() response = self.client.get(reverse('admin:index')) self.assertEqual(get_max_age(response), 0) def test_logout(self): "Check the never-cache status of logout view" response = self.client.get(reverse('admin:logout')) self.assertEqual(get_max_age(response), 0) def test_password_change(self): "Check the never-cache status of the password change view" self.client.logout() response = self.client.get(reverse('admin:password_change')) self.assertIsNone(get_max_age(response)) def test_password_change_done(self): "Check the never-cache status of the password change done view" response = self.client.get(reverse('admin:password_change_done')) self.assertIsNone(get_max_age(response)) def test_JS_i18n(self): "Check the never-cache status of the JavaScript i18n view" response = self.client.get(reverse('admin:jsi18n')) self.assertIsNone(get_max_age(response)) @override_settings(ROOT_URLCONF='admin_views.urls') class PrePopulatedTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_prepopulated_on(self): response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add')) self.assertContains(response, "&quot;id&quot;: &quot;#id_slug&quot;") self.assertContains(response, "&quot;dependency_ids&quot;: [&quot;#id_title&quot;]") self.assertContains(response, "&quot;id&quot;: &quot;#id_prepopulatedsubpost_set-0-subslug&quot;") def test_prepopulated_off(self): response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,))) self.assertContains(response, "A Long Title") self.assertNotContains(response, "&quot;id&quot;: &quot;#id_slug&quot;") self.assertNotContains(response, "&quot;dependency_ids&quot;: [&quot;#id_title&quot;]") self.assertNotContains( response, "&quot;id&quot;: &quot;#id_prepopulatedsubpost_set-0-subslug&quot;" ) @override_settings(USE_THOUSAND_SEPARATOR=True) def test_prepopulated_maxlength_localized(self): """ Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure that maxLength (in the JavaScript) is rendered without separators. """ response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add')) self.assertContains(response, "&quot;maxLength&quot;: 1000") # instead of 1,000 def test_view_only_add_form(self): """ PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug' which is present in the add view, even if the ModelAdmin.has_change_permission() returns False. """ response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_add')) self.assertContains(response, 'data-prepopulated-fields=') self.assertContains(response, '&quot;id&quot;: &quot;#id_slug&quot;') def test_view_only_change_form(self): """ PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That doesn't break a view-only change view. """ response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_change', args=(self.p1.pk,))) self.assertContains(response, 'data-prepopulated-fields="[]"') self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug) @override_settings(ROOT_URLCONF='admin_views.urls') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps def setUp(self): self.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def test_login_button_centered(self): from selenium.webdriver.common.by import By self.selenium.get(self.live_server_url + reverse('admin:login')) button = self.selenium.find_element(By.CSS_SELECTOR, '.submit-row input') offset_left = button.get_property('offsetLeft') offset_right = ( button.get_property('offsetParent').get_property('offsetWidth') - (offset_left + button.get_property('offsetWidth')) ) # Use assertAlmostEqual to avoid pixel rounding errors. self.assertAlmostEqual(offset_left, offset_right, delta=3) def test_prepopulated_fields(self): """ The JavaScript-automated prepopulated fields work with the main form and with stacked and tabular inlines. Refs #13068, #9264, #9983, #9784. """ from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_mainprepopulated_add')) self.wait_for('.select2') # Main form ---------------------------------------------------------- self.selenium.find_element(By.ID, 'id_pubdate').send_keys('2012-02-18') self.select_option('#id_status', 'option two') self.selenium.find_element(By.ID, 'id_name').send_keys(' the mAin nÀMë and it\'s awεšomeıııİ') slug1 = self.selenium.find_element(By.ID, 'id_slug1').get_attribute('value') slug2 = self.selenium.find_element(By.ID, 'id_slug2').get_attribute('value') slug3 = self.selenium.find_element(By.ID, 'id_slug3').get_attribute('value') self.assertEqual(slug1, 'the-main-name-and-its-awesomeiiii-2012-02-18') self.assertEqual(slug2, 'option-two-the-main-name-and-its-awesomeiiii') self.assertEqual(slug3, 'the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i') # Stacked inlines with fieldsets ------------------------------------- # Initial inline self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17') self.select_option('#id_relatedprepopulated_set-0-status', 'option one') self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-0-name').send_keys( ' here is a sŤāÇkeð inline ! ' ) slug1 = self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-0-slug1').get_attribute('value') slug2 = self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-0-slug2').get_attribute('value') self.assertEqual(slug1, 'here-is-a-stacked-inline-2011-12-17') self.assertEqual(slug2, 'option-one-here-is-a-stacked-inline') initial_select2_inputs = self.selenium.find_elements(By.CLASS_NAME, 'select2-selection') # Inline formsets have empty/invisible forms. # Only the 4 visible select2 inputs are initialized. num_initial_select2_inputs = len(initial_select2_inputs) self.assertEqual(num_initial_select2_inputs, 4) # Add an inline self.selenium.find_elements(By.LINK_TEXT, 'Add another Related prepopulated')[0].click() self.assertEqual( len(self.selenium.find_elements(By.CLASS_NAME, 'select2-selection')), num_initial_select2_inputs + 2 ) self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25') self.select_option('#id_relatedprepopulated_set-1-status', 'option two') self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-1-name').send_keys( ' now you haVe anöther sŤāÇkeð inline with a very ... ' 'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... ' ) slug1 = self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-1-slug1').get_attribute('value') slug2 = self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-1-slug2').get_attribute('value') # 50 characters maximum for slug1 field self.assertEqual(slug1, 'now-you-have-another-stacked-inline-with-a-very-lo') # 60 characters maximum for slug2 field self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-with-a-very-l') # Tabular inlines ---------------------------------------------------- # Initial inline element = self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-2-0-status') self.selenium.execute_script('window.scrollTo(0, %s);' % element.location['y']) self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07') self.select_option('#id_relatedprepopulated_set-2-0-status', 'option two') self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-2-0-name').send_keys( 'And now, with a tÃbűlaŘ inline !!!' ) slug1 = self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-2-0-slug1').get_attribute('value') slug2 = self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-2-0-slug2').get_attribute('value') self.assertEqual(slug1, 'and-now-with-a-tabular-inline-1234-12-07') self.assertEqual(slug2, 'option-two-and-now-with-a-tabular-inline') # Add an inline # Button may be outside the browser frame. element = self.selenium.find_elements(By.LINK_TEXT, 'Add another Related prepopulated')[1] self.selenium.execute_script('window.scrollTo(0, %s);' % element.location['y']) element.click() self.assertEqual( len(self.selenium.find_elements(By.CLASS_NAME, 'select2-selection')), num_initial_select2_inputs + 4 ) self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22') self.select_option('#id_relatedprepopulated_set-2-1-status', 'option one') self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-2-1-name').send_keys( r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters' ) slug1 = self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-2-1-slug1').get_attribute('value') slug2 = self.selenium.find_element(By.ID, 'id_relatedprepopulated_set-2-1-slug2').get_attribute('value') self.assertEqual(slug1, 'tabular-inline-with-ignored-characters-1981-08-22') self.assertEqual(slug2, 'option-one-tabular-inline-with-ignored-characters') # Add an inline without an initial inline. # The button is outside of the browser frame. self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);") self.selenium.find_elements(By.LINK_TEXT, 'Add another Related prepopulated')[2].click() self.assertEqual( len(self.selenium.find_elements(By.CLASS_NAME, 'select2-selection')), num_initial_select2_inputs + 6 ) # Stacked Inlines without fieldsets ---------------------------------- # Initial inline. row_id = 'id_relatedprepopulated_set-4-0-' self.selenium.find_element(By.ID, f'{row_id}pubdate').send_keys('2011-12-12') self.select_option(f'#{row_id}status', 'option one') self.selenium.find_element(By.ID, f'{row_id}name').send_keys(' sŤāÇkeð inline ! ') slug1 = self.selenium.find_element(By.ID, f'{row_id}slug1').get_attribute('value') slug2 = self.selenium.find_element(By.ID, f'{row_id}slug2').get_attribute('value') self.assertEqual(slug1, 'stacked-inline-2011-12-12') self.assertEqual(slug2, 'option-one') # Add inline. self.selenium.find_elements( By.LINK_TEXT, 'Add another Related prepopulated', )[3].click() row_id = 'id_relatedprepopulated_set-4-1-' self.selenium.find_element(By.ID, f'{row_id}pubdate').send_keys('1999-01-20') self.select_option(f'#{row_id}status', 'option two') self.selenium.find_element(By.ID, f'{row_id}name').send_keys( ' now you haVe anöther sŤāÇkeð inline with a very loooong ' ) slug1 = self.selenium.find_element(By.ID, f'{row_id}slug1').get_attribute('value') slug2 = self.selenium.find_element(By.ID, f'{row_id}slug2').get_attribute('value') self.assertEqual(slug1, 'now-you-have-another-stacked-inline-with-a-very-lo') self.assertEqual(slug2, 'option-two') # Save and check that everything is properly stored in the database with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.assertEqual(MainPrepopulated.objects.all().count(), 1) MainPrepopulated.objects.get( name=' the mAin nÀMë and it\'s awεšomeıııİ', pubdate='2012-02-18', status='option two', slug1='the-main-name-and-its-awesomeiiii-2012-02-18', slug2='option-two-the-main-name-and-its-awesomeiiii', slug3='the-main-nàmë-and-its-awεšomeıııi', ) self.assertEqual(RelatedPrepopulated.objects.all().count(), 6) RelatedPrepopulated.objects.get( name=' here is a sŤāÇkeð inline ! ', pubdate='2011-12-17', status='option one', slug1='here-is-a-stacked-inline-2011-12-17', slug2='option-one-here-is-a-stacked-inline', ) RelatedPrepopulated.objects.get( # 75 characters in name field name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo', pubdate='1999-01-25', status='option two', slug1='now-you-have-another-stacked-inline-with-a-very-lo', slug2='option-two-now-you-have-another-stacked-inline-with-a-very-l', ) RelatedPrepopulated.objects.get( name='And now, with a tÃbűlaŘ inline !!!', pubdate='1234-12-07', status='option two', slug1='and-now-with-a-tabular-inline-1234-12-07', slug2='option-two-and-now-with-a-tabular-inline', ) RelatedPrepopulated.objects.get( name=r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters', pubdate='1981-08-22', status='option one', slug1='tabular-inline-with-ignored-characters-1981-08-22', slug2='option-one-tabular-inline-with-ignored-characters', ) def test_populate_existing_object(self): """ The prepopulation works for existing objects too, as long as the original field is empty (#19082). """ from selenium.webdriver.common.by import By # Slugs are empty to start with. item = MainPrepopulated.objects.create( name=' this is the mAin nÀMë', pubdate='2012-02-18', status='option two', slug1='', slug2='', ) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) object_url = self.live_server_url + reverse('admin:admin_views_mainprepopulated_change', args=(item.id,)) self.selenium.get(object_url) self.selenium.find_element(By.ID, 'id_name').send_keys(' the best') # The slugs got prepopulated since they were originally empty slug1 = self.selenium.find_element(By.ID, 'id_slug1').get_attribute('value') slug2 = self.selenium.find_element(By.ID, 'id_slug2').get_attribute('value') self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18') self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best') # Save the object with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.selenium.get(object_url) self.selenium.find_element(By.ID, 'id_name').send_keys(' hello') # The slugs got prepopulated didn't change since they were originally not empty slug1 = self.selenium.find_element(By.ID, 'id_slug1').get_attribute('value') slug2 = self.selenium.find_element(By.ID, 'id_slug2').get_attribute('value') self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18') self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best') def test_collapsible_fieldset(self): """ The 'collapse' class in fieldsets definition allows to show/hide the appropriate field section. """ from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_add')) self.assertFalse(self.selenium.find_element(By.ID, 'id_title').is_displayed()) self.selenium.find_elements(By.LINK_TEXT, 'Show')[0].click() self.assertTrue(self.selenium.find_element(By.ID, 'id_title').is_displayed()) self.assertEqual(self.selenium.find_element(By.ID, 'fieldsetcollapser0').text, "Hide") def test_selectbox_height_collapsible_fieldset(self): from selenium.webdriver.common.by import By self.admin_login( username='super', password='secret', login_url=reverse('admin7:index'), ) url = self.live_server_url + reverse('admin7:admin_views_pizza_add') self.selenium.get(url) self.selenium.find_elements(By.LINK_TEXT, 'Show')[0].click() filter_box = self.selenium.find_element(By.ID, 'id_toppings_filter') from_box = self.selenium.find_element(By.ID, 'id_toppings_from') to_box = self.selenium.find_element(By.ID, 'id_toppings_to') self.assertEqual( to_box.get_property('offsetHeight'), ( filter_box.get_property('offsetHeight') + from_box.get_property('offsetHeight') ), ) def test_selectbox_height_not_collapsible_fieldset(self): from selenium.webdriver.common.by import By self.admin_login( username='super', password='secret', login_url=reverse('admin7:index'), ) url = self.live_server_url + reverse('admin7:admin_views_question_add') self.selenium.get(url) filter_box = self.selenium.find_element(By.ID, 'id_related_questions_filter') from_box = self.selenium.find_element(By.ID, 'id_related_questions_from') to_box = self.selenium.find_element(By.ID, 'id_related_questions_to') self.assertEqual( to_box.get_property('offsetHeight'), ( filter_box.get_property('offsetHeight') + from_box.get_property('offsetHeight') ), ) def test_first_field_focus(self): """JavaScript-assisted auto-focus on first usable form field.""" from selenium.webdriver.common.by import By # First form field has a single widget self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) with self.wait_page_loaded(): self.selenium.get(self.live_server_url + reverse('admin:admin_views_picture_add')) self.assertEqual( self.selenium.switch_to.active_element, self.selenium.find_element(By.ID, 'id_name') ) # First form field has a MultiWidget with self.wait_page_loaded(): self.selenium.get(self.live_server_url + reverse('admin:admin_views_reservation_add')) self.assertEqual( self.selenium.switch_to.active_element, self.selenium.find_element(By.ID, 'id_start_date_0') ) def test_cancel_delete_confirmation(self): "Cancelling the deletion of an object takes the user back one page." from selenium.webdriver.common.by import By pizza = Pizza.objects.create(name="Double Cheese") url = reverse('admin:admin_views_pizza_change', args=(pizza.id,)) full_url = self.live_server_url + url self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(full_url) self.selenium.find_element(By.CLASS_NAME, 'deletelink').click() # Click 'cancel' on the delete page. self.selenium.find_element(By.CLASS_NAME, 'cancel-link').click() # Wait until we're back on the change page. self.wait_for_text('#content h1', 'Change pizza') self.assertEqual(self.selenium.current_url, full_url) self.assertEqual(Pizza.objects.count(), 1) def test_cancel_delete_related_confirmation(self): """ Cancelling the deletion of an object with relations takes the user back one page. """ from selenium.webdriver.common.by import By pizza = Pizza.objects.create(name="Double Cheese") topping1 = Topping.objects.create(name="Cheddar") topping2 = Topping.objects.create(name="Mozzarella") pizza.toppings.add(topping1, topping2) url = reverse('admin:admin_views_pizza_change', args=(pizza.id,)) full_url = self.live_server_url + url self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(full_url) self.selenium.find_element(By.CLASS_NAME, 'deletelink').click() # Click 'cancel' on the delete page. self.selenium.find_element(By.CLASS_NAME, 'cancel-link').click() # Wait until we're back on the change page. self.wait_for_text('#content h1', 'Change pizza') self.assertEqual(self.selenium.current_url, full_url) self.assertEqual(Pizza.objects.count(), 1) self.assertEqual(Topping.objects.count(), 2) def test_list_editable_popups(self): """ list_editable foreign keys have add/change popups. """ from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select s1 = Section.objects.create(name='Test section') Article.objects.create( title='foo', content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=s1, ) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist')) # Change popup self.selenium.find_element(By.ID, 'change_id_form-0-section').click() self.wait_for_and_switch_to_popup() self.wait_for_text('#content h1', 'Change section') name_input = self.selenium.find_element(By.ID, 'id_name') name_input.clear() name_input.send_keys('<i>edited section</i>') self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) # Hide sidebar. toggle_button = self.selenium.find_element(By.CSS_SELECTOR, '#toggle-nav-sidebar') toggle_button.click() select = Select(self.selenium.find_element(By.ID, 'id_form-0-section')) self.assertEqual(select.first_selected_option.text, '<i>edited section</i>') # Rendered select2 input. select2_display = self.selenium.find_element(By.CLASS_NAME, 'select2-selection__rendered') # Clear button (×\n) is included in text. self.assertEqual(select2_display.text, '×\n<i>edited section</i>') # Add popup self.selenium.find_element(By.ID, 'add_id_form-0-section').click() self.wait_for_and_switch_to_popup() self.wait_for_text('#content h1', 'Add section') self.selenium.find_element(By.ID, 'id_name').send_keys('new section') self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element(By.ID, 'id_form-0-section')) self.assertEqual(select.first_selected_option.text, 'new section') select2_display = self.selenium.find_element(By.CLASS_NAME, 'select2-selection__rendered') # Clear button (×\n) is included in text. self.assertEqual(select2_display.text, '×\nnew section') def test_inline_uuid_pk_edit_with_popup(self): from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element(By.ID, 'change_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element(By.ID, 'id_parent')) self.assertEqual(select.first_selected_option.text, str(parent.id)) self.assertEqual(select.first_selected_option.get_attribute('value'), str(parent.id)) def test_inline_uuid_pk_add_with_popup(self): from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) self.selenium.get(self.live_server_url + reverse('admin:admin_views_relatedwithuuidpkmodel_add')) self.selenium.find_element(By.ID, 'add_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element(By.ID, 'id_title').send_keys('test') self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element(By.ID, 'id_parent')) uuid_id = str(ParentWithUUIDPK.objects.first().id) self.assertEqual(select.first_selected_option.text, uuid_id) self.assertEqual(select.first_selected_option.get_attribute('value'), uuid_id) def test_inline_uuid_pk_delete_with_popup(self): from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element(By.ID, 'delete_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element(By.XPATH, '//input[@value="Yes, I’m sure"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) select = Select(self.selenium.find_element(By.ID, 'id_parent')) self.assertEqual(ParentWithUUIDPK.objects.count(), 0) self.assertEqual(select.first_selected_option.text, '---------') self.assertEqual(select.first_selected_option.get_attribute('value'), '') def test_inline_with_popup_cancel_delete(self): """Clicking ""No, take me back" on a delete popup closes the window.""" from selenium.webdriver.common.by import By parent = ParentWithUUIDPK.objects.create(title='test') related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,)) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element(By.ID, 'delete_id_parent').click() self.wait_for_and_switch_to_popup() self.selenium.find_element(By.XPATH, '//a[text()="No, take me back"]').click() self.selenium.switch_to.window(self.selenium.window_handles[0]) self.assertEqual(len(self.selenium.window_handles), 1) def test_list_editable_raw_id_fields(self): from selenium.webdriver.common.by import By parent = ParentWithUUIDPK.objects.create(title='test') parent2 = ParentWithUUIDPK.objects.create(title='test2') RelatedWithUUIDPKModel.objects.create(parent=parent) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name) self.selenium.get(self.live_server_url + change_url) self.selenium.find_element(By.ID, 'lookup_id_form-0-parent').click() self.wait_for_and_switch_to_popup() # Select "parent2" in the popup. self.selenium.find_element(By.LINK_TEXT, str(parent2.pk)).click() self.selenium.switch_to.window(self.selenium.window_handles[0]) # The newly selected pk should appear in the raw id input. value = self.selenium.find_element(By.ID, 'id_form-0-parent').get_attribute('value') self.assertEqual(value, str(parent2.pk)) def test_input_element_font(self): """ Browsers' default stylesheets override the font of inputs. The admin adds additional CSS to handle this. """ from selenium.webdriver.common.by import By self.selenium.get(self.live_server_url + reverse('admin:login')) element = self.selenium.find_element(By.ID, 'id_username') # Some browsers quotes the fonts, some don't. fonts = [ font.strip().strip('"') for font in element.value_of_css_property('font-family').split(',') ] self.assertEqual( fonts, ['Roboto', 'Lucida Grande', 'Verdana', 'Arial', 'sans-serif'], ) def test_search_input_filtered_page(self): from selenium.webdriver.common.by import By Person.objects.create(name='Guido van Rossum', gender=1, alive=True) Person.objects.create(name='Grace Hopper', gender=1, alive=False) self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) person_url = reverse('admin:admin_views_person_changelist') + '?q=Gui' self.selenium.get(self.live_server_url + person_url) self.assertGreater( self.selenium.find_element(By.ID, 'searchbar').rect['width'], 50, ) def test_related_popup_index(self): """ Create a chain of 'self' related objects via popups. """ from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) add_url = reverse('admin:admin_views_box_add', current_app=site.name) self.selenium.get(self.live_server_url + add_url) base_window = self.selenium.current_window_handle self.selenium.find_element(By.ID, 'add_id_next_box').click() self.wait_for_and_switch_to_popup() popup_window_test = self.selenium.current_window_handle self.selenium.find_element(By.ID, 'id_title').send_keys('test') self.selenium.find_element(By.ID, 'add_id_next_box').click() self.wait_for_and_switch_to_popup(num_windows=3) popup_window_test2 = self.selenium.current_window_handle self.selenium.find_element(By.ID, 'id_title').send_keys('test2') self.selenium.find_element(By.ID, 'add_id_next_box').click() self.wait_for_and_switch_to_popup(num_windows=4) self.selenium.find_element(By.ID, 'id_title').send_keys('test3') self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.selenium.switch_to.window(popup_window_test2) select = Select(self.selenium.find_element(By.ID, 'id_next_box')) next_box_id = str(Box.objects.get(title="test3").id) self.assertEqual(select.first_selected_option.get_attribute('value'), next_box_id) self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.selenium.switch_to.window(popup_window_test) select = Select(self.selenium.find_element(By.ID, 'id_next_box')) next_box_id = str(Box.objects.get(title="test2").id) self.assertEqual(select.first_selected_option.get_attribute('value'), next_box_id) self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.selenium.switch_to.window(base_window) select = Select(self.selenium.find_element(By.ID, 'id_next_box')) next_box_id = str(Box.objects.get(title="test").id) self.assertEqual(select.first_selected_option.get_attribute('value'), next_box_id) def test_related_popup_incorrect_close(self): """ Cleanup child popups when closing a parent popup. """ from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret', login_url=reverse('admin:index')) add_url = reverse('admin:admin_views_box_add', current_app=site.name) self.selenium.get(self.live_server_url + add_url) self.selenium.find_element(By.ID, 'add_id_next_box').click() self.wait_for_and_switch_to_popup() test_window = self.selenium.current_window_handle self.selenium.find_element(By.ID, 'id_title').send_keys('test') self.selenium.find_element(By.ID, 'add_id_next_box').click() self.wait_for_and_switch_to_popup(num_windows=3) test2_window = self.selenium.current_window_handle self.selenium.find_element(By.ID, 'id_title').send_keys('test2') self.selenium.find_element(By.ID, 'add_id_next_box').click() self.wait_for_and_switch_to_popup(num_windows=4) self.assertEqual(len(self.selenium.window_handles), 4) self.selenium.switch_to.window(test2_window) self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.wait_until(lambda d: len(d.window_handles) == 2, 1) self.assertEqual(len(self.selenium.window_handles), 2) # Close final popup to clean up test. self.selenium.switch_to.window(test_window) self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.wait_until(lambda d: len(d.window_handles) == 1, 1) self.selenium.switch_to.window(self.selenium.window_handles[-1]) def test_hidden_fields_small_window(self): from selenium.webdriver.common.by import By self.admin_login( username='super', password='secret', login_url=reverse('admin:index'), ) self.selenium.get(self.live_server_url + reverse('admin:admin_views_story_add')) field_title = self.selenium.find_element(By.CLASS_NAME, 'field-title') current_size = self.selenium.get_window_size() try: self.selenium.set_window_size(1024, 768) self.assertIs(field_title.is_displayed(), False) self.selenium.set_window_size(767, 575) self.assertIs(field_title.is_displayed(), False) finally: self.selenium.set_window_size(current_size['width'], current_size['height']) @override_settings(ROOT_URLCONF='admin_views.urls') class ReadonlyTest(AdminFieldExtractionMixin, TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_readonly_get(self): response = self.client.get(reverse('admin:admin_views_post_add')) self.assertNotContains(response, 'name="posted"') # 3 fields + 2 submit buttons + 5 inline management form fields, + 2 # hidden fields for inlines + 1 field for the inline + 2 empty form self.assertContains(response, "<input", count=16) self.assertContains(response, formats.localize(datetime.date.today())) self.assertContains(response, "<label>Awesomeness level:</label>") self.assertContains(response, "Very awesome.") self.assertContains(response, "Unknown coolness.") self.assertContains(response, "foo") # Multiline text in a readonly field gets <br> tags self.assertContains(response, 'Multiline<br>test<br>string') self.assertContains(response, '<div class="readonly">Multiline<br>html<br>content</div>', html=True) self.assertContains(response, 'InlineMultiline<br>test<br>string') self.assertContains(response, formats.localize(datetime.date.today() - datetime.timedelta(days=7))) self.assertContains(response, '<div class="form-row field-coolness">') self.assertContains(response, '<div class="form-row field-awesomeness_level">') self.assertContains(response, '<div class="form-row field-posted">') self.assertContains(response, '<div class="form-row field-value">') self.assertContains(response, '<div class="form-row">') self.assertContains(response, '<div class="help">', 3) self.assertContains( response, '<div class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) self.assertContains( response, '<div class="help">Some help text for the content (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) self.assertContains( response, '<div class="help">Some help text for the date (with Unicode ŠĐĆŽćžšđ)</div>', html=True ) p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff") response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,))) self.assertContains(response, "%d amount of cool" % p.pk) def test_readonly_text_field(self): p = Post.objects.create( title="Readonly test", content="test", readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest', ) Link.objects.create( url="http://www.djangoproject.com", post=p, readonly_link_content="test\r\nlink", ) response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,))) # Checking readonly field. self.assertContains(response, 'test<br><br>test<br><br>test<br><br>test') # Checking readonly field in inline. self.assertContains(response, 'test<br>link') def test_readonly_post(self): data = { "title": "Django Got Readonly Fields", "content": "This is an incredible development.", "link_set-TOTAL_FORMS": "1", "link_set-INITIAL_FORMS": "0", "link_set-MAX_NUM_FORMS": "0", } response = self.client.post(reverse('admin:admin_views_post_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(Post.objects.count(), 1) p = Post.objects.get() self.assertEqual(p.posted, datetime.date.today()) data["posted"] = "10-8-1990" # some date that's not today response = self.client.post(reverse('admin:admin_views_post_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(Post.objects.count(), 2) p = Post.objects.order_by('-id')[0] self.assertEqual(p.posted, datetime.date.today()) def test_readonly_manytomany(self): "Regression test for #13004" response = self.client.get(reverse('admin:admin_views_pizza_add')) self.assertEqual(response.status_code, 200) def test_user_password_change_limited_queryset(self): su = User.objects.filter(is_superuser=True)[0] response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,))) self.assertEqual(response.status_code, 404) def test_change_form_renders_correct_null_choice_value(self): """ Regression test for #17911. """ choice = Choice.objects.create(choice=None) response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,))) self.assertContains(response, '<div class="readonly">No opinion</div>', html=True) def _test_readonly_foreignkey_links(self, admin_site): """ ForeignKey readonly fields render as links if the target model is registered in admin. """ chapter = Chapter.objects.create( title='Chapter 1', content='content', book=Book.objects.create(name='Book 1'), ) language = Language.objects.create(iso='_40', name='Test') obj = ReadOnlyRelatedField.objects.create( chapter=chapter, language=language, user=self.superuser, ) response = self.client.get( reverse(f'{admin_site}:admin_views_readonlyrelatedfield_change', args=(obj.pk,)), ) # Related ForeignKey object registered in admin. user_url = reverse(f'{admin_site}:auth_user_change', args=(self.superuser.pk,)) self.assertContains( response, '<div class="readonly"><a href="%s">super</a></div>' % user_url, html=True, ) # Related ForeignKey with the string primary key registered in admin. language_url = reverse( f'{admin_site}:admin_views_language_change', args=(quote(language.pk),), ) self.assertContains( response, '<div class="readonly"><a href="%s">_40</a></div>' % language_url, html=True, ) # Related ForeignKey object not registered in admin. self.assertContains(response, '<div class="readonly">Chapter 1</div>', html=True) def test_readonly_foreignkey_links_default_admin_site(self): self._test_readonly_foreignkey_links('admin') def test_readonly_foreignkey_links_custom_admin_site(self): self._test_readonly_foreignkey_links('namespaced_admin') def test_readonly_manytomany_backwards_ref(self): """ Regression test for #16433 - backwards references for related objects broke if the related field is read-only due to the help_text attribute """ topping = Topping.objects.create(name='Salami') pizza = Pizza.objects.create(name='Americano') pizza.toppings.add(topping) response = self.client.get(reverse('admin:admin_views_topping_add')) self.assertEqual(response.status_code, 200) def test_readonly_manytomany_forwards_ref(self): topping = Topping.objects.create(name='Salami') pizza = Pizza.objects.create(name='Americano') pizza.toppings.add(topping) response = self.client.get(reverse('admin:admin_views_pizza_change', args=(pizza.pk,))) self.assertContains(response, '<label>Toppings:</label>', html=True) self.assertContains(response, '<div class="readonly">Salami</div>', html=True) def test_readonly_onetoone_backwards_ref(self): """ Can reference a reverse OneToOneField in ModelAdmin.readonly_fields. """ v1 = Villain.objects.create(name='Adam') pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1) pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl) response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,))) field = self.get_admin_readonly_field(response, 'plotdetails') pd_url = reverse('admin:admin_views_plotdetails_change', args=(pd.pk,)) self.assertEqual(field.contents(), '<a href="%s">Brand New Plot</a>' % pd_url) # The reverse relation also works if the OneToOneField is null. pd.plot = None pd.save() response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,))) field = self.get_admin_readonly_field(response, 'plotdetails') self.assertEqual(field.contents(), '-') # default empty value def test_readonly_field_overrides(self): """ Regression test for #22087 - ModelForm Meta overrides are ignored by AdminReadonlyField """ p = FieldOverridePost.objects.create(title="Test Post", content="Test Content") response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,))) self.assertContains(response, '<div class="help">Overridden help text for the date</div>') self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True) self.assertNotContains(response, 'Some help text for the date (with Unicode ŠĐĆŽćžšđ)') def test_correct_autoescaping(self): """ Make sure that non-field readonly elements are properly autoescaped (#24461) """ section = Section.objects.create(name='<a>evil</a>') response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,))) self.assertNotContains(response, "<a>evil</a>", status_code=200) self.assertContains(response, "&lt;a&gt;evil&lt;/a&gt;", status_code=200) def test_label_suffix_translated(self): pizza = Pizza.objects.create(name='Americano') url = reverse('admin:admin_views_pizza_change', args=(pizza.pk,)) with self.settings(LANGUAGE_CODE='fr'): response = self.client.get(url) self.assertContains(response, '<label>Toppings\u00A0:</label>', html=True) @override_settings(ROOT_URLCONF='admin_views.urls') class LimitChoicesToInAdminTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_limit_choices_to_as_callable(self): """Test for ticket 2445 changes to admin.""" threepwood = Character.objects.create( username='threepwood', last_action=datetime.datetime.today() + datetime.timedelta(days=1), ) marley = Character.objects.create( username='marley', last_action=datetime.datetime.today() - datetime.timedelta(days=1), ) response = self.client.get(reverse('admin:admin_views_stumpjoke_add')) # The allowed option should appear twice; the limited option should not appear. self.assertContains(response, threepwood.username, count=2) self.assertNotContains(response, marley.username) @override_settings(ROOT_URLCONF='admin_views.urls') class RawIdFieldsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_limit_choices_to(self): """Regression test for 14880""" actor = Actor.objects.create(name="Palin", age=27) Inquisition.objects.create(expected=True, leader=actor, country="England") Inquisition.objects.create(expected=False, leader=actor, country="Spain") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step also tests integers, strings and booleans in the # lookup query string; in model we define inquisition field to have a # limit_choices_to option that includes a filter on a string field # (inquisition__actor__name), a filter on an integer field # (inquisition__actor__age), and a filter on a boolean field # (inquisition__expected). response2 = self.client.get(popup_url) self.assertContains(response2, "Spain") self.assertNotContains(response2, "England") def test_limit_choices_to_isnull_false(self): """Regression test for 20182""" Actor.objects.create(name="Palin", age=27) Actor.objects.create(name="Kilbraken", age=50, title="Judge") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step tests field__isnull=0 gets parsed correctly from the # lookup query string; in model we define defendant0 field to have a # limit_choices_to option that includes "actor__title__isnull=False". response2 = self.client.get(popup_url) self.assertContains(response2, "Kilbraken") self.assertNotContains(response2, "Palin") def test_limit_choices_to_isnull_true(self): """Regression test for 20182""" Actor.objects.create(name="Palin", age=27) Actor.objects.create(name="Kilbraken", age=50, title="Judge") response = self.client.get(reverse('admin:admin_views_sketch_add')) # Find the link m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content) self.assertTrue(m) # Got a match popup_url = m[1].decode().replace('&amp;', '&') # Handle relative links popup_url = urljoin(response.request['PATH_INFO'], popup_url) # Get the popup and verify the correct objects show up in the resulting # page. This step tests field__isnull=1 gets parsed correctly from the # lookup query string; in model we define defendant1 field to have a # limit_choices_to option that includes "actor__title__isnull=True". response2 = self.client.get(popup_url) self.assertNotContains(response2, "Kilbraken") self.assertContains(response2, "Palin") def test_list_display_method_same_name_as_reverse_accessor(self): """ Should be able to use a ModelAdmin method in list_display that has the same name as a reverse model field ("sketch" in this case). """ actor = Actor.objects.create(name="Palin", age=27) Inquisition.objects.create(expected=True, leader=actor, country="England") response = self.client.get(reverse('admin:admin_views_inquisition_changelist')) self.assertContains(response, 'list-display-sketch') @override_settings(ROOT_URLCONF='admin_views.urls') class UserAdminTest(TestCase): """ Tests user CRUD functionality. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True) cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True) cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True) cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False) cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True) def setUp(self): self.client.force_login(self.superuser) def test_save_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', }) new_user = User.objects.get(username='newuser') self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,))) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) def test_save_continue_editing_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_continue': '1', }) new_user = User.objects.get(username='newuser') new_user_url = reverse('admin:auth_user_change', args=(new_user.pk,)) self.assertRedirects(response, new_user_url, fetch_redirect_response=False) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) response = self.client.get(new_user_url) self.assertContains( response, '<li class="success">The user “<a href="%s">' '%s</a>” was added successfully. You may edit it again below.</li>' % (new_user_url, new_user), html=True, ) def test_password_mismatch(self): response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'mismatch', }) self.assertEqual(response.status_code, 200) self.assertFormError(response, 'adminform', 'password', []) self.assertFormError(response, 'adminform', 'password2', ['The two password fields didn’t match.']) def test_user_fk_add_popup(self): """User addition through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_add')) self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"') response = self.client.get(reverse('admin:auth_user_add') + '?%s=1' % IS_POPUP_VAR) self.assertNotContains(response, 'name="_continue"') self.assertNotContains(response, 'name="_addanother"') data = { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', IS_POPUP_VAR: '1', '_save': '1', } response = self.client.post(reverse('admin:auth_user_add') + '?%s=1' % IS_POPUP_VAR, data, follow=True) self.assertContains(response, '&quot;obj&quot;: &quot;newuser&quot;') def test_user_fk_change_popup(self): """User change through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',))) self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"') user = User.objects.get(username='changeuser') url = reverse('admin:auth_user_change', args=(user.pk,)) + '?%s=1' % IS_POPUP_VAR response = self.client.get(url) self.assertNotContains(response, 'name="_continue"') self.assertNotContains(response, 'name="_addanother"') data = { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', 'last_login_0': '2007-05-30', 'last_login_1': '13:20:10', 'date_joined_0': '2007-05-30', 'date_joined_1': '13:20:10', IS_POPUP_VAR: '1', '_save': '1', } response = self.client.post(url, data, follow=True) self.assertContains(response, '&quot;obj&quot;: &quot;newuser&quot;') self.assertContains(response, '&quot;action&quot;: &quot;change&quot;') def test_user_fk_delete_popup(self): """User deletion through a FK popup should return the appropriate JavaScript response.""" response = self.client.get(reverse('admin:admin_views_album_add')) self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',))) self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"') user = User.objects.get(username='changeuser') url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?%s=1' % IS_POPUP_VAR response = self.client.get(url) self.assertEqual(response.status_code, 200) data = { 'post': 'yes', IS_POPUP_VAR: '1', } response = self.client.post(url, data, follow=True) self.assertContains(response, '&quot;action&quot;: &quot;delete&quot;') def test_save_add_another_button(self): user_count = User.objects.count() response = self.client.post(reverse('admin:auth_user_add'), { 'username': 'newuser', 'password1': 'newpassword', 'password2': 'newpassword', '_addanother': '1', }) new_user = User.objects.order_by('-id')[0] self.assertRedirects(response, reverse('admin:auth_user_add')) self.assertEqual(User.objects.count(), user_count + 1) self.assertTrue(new_user.has_usable_password()) def test_user_permission_performance(self): u = User.objects.all()[0] # Don't depend on a warm cache, see #17377. ContentType.objects.clear_cache() with self.assertNumQueries(10): response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,))) self.assertEqual(response.status_code, 200) def test_form_url_present_in_context(self): u = User.objects.all()[0] response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,))) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['form_url'], 'pony') @override_settings(ROOT_URLCONF='admin_views.urls') class GroupAdminTest(TestCase): """ Tests group CRUD functionality. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_save_button(self): group_count = Group.objects.count() response = self.client.post(reverse('admin:auth_group_add'), { 'name': 'newgroup', }) Group.objects.order_by('-id')[0] self.assertRedirects(response, reverse('admin:auth_group_changelist')) self.assertEqual(Group.objects.count(), group_count + 1) def test_group_permission_performance(self): g = Group.objects.create(name="test_group") # Ensure no queries are skipped due to cached content type for Group. ContentType.objects.clear_cache() with self.assertNumQueries(8): response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,))) self.assertEqual(response.status_code, 200) @override_settings(ROOT_URLCONF='admin_views.urls') class CSSTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = Section.objects.create(name='Test section') cls.a1 = Article.objects.create( content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a2 = Article.objects.create( content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1 ) cls.a3 = Article.objects.create( content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1 ) cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title') def setUp(self): self.client.force_login(self.superuser) def test_field_prefix_css_classes(self): """ Fields have a CSS class name with a 'field-' prefix. """ response = self.client.get(reverse('admin:admin_views_post_add')) # The main form self.assertContains(response, 'class="form-row field-title"') self.assertContains(response, 'class="form-row field-content"') self.assertContains(response, 'class="form-row field-public"') self.assertContains(response, 'class="form-row field-awesomeness_level"') self.assertContains(response, 'class="form-row field-coolness"') self.assertContains(response, 'class="form-row field-value"') self.assertContains(response, 'class="form-row"') # The lambda function # The tabular inline self.assertContains(response, '<td class="field-url">') self.assertContains(response, '<td class="field-posted">') def test_index_css_classes(self): """ CSS class names are used for each app and model on the admin index pages (#17050). """ # General index page response = self.client.get(reverse('admin:index')) self.assertContains(response, '<div class="app-admin_views module') self.assertContains(response, '<tr class="model-actor">') self.assertContains(response, '<tr class="model-album">') # App index page response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains(response, '<div class="app-admin_views module') self.assertContains(response, '<tr class="model-actor">') self.assertContains(response, '<tr class="model-album">') def test_app_model_in_form_body_class(self): """ Ensure app and model tag are correctly read by change_form template """ response = self.client.get(reverse('admin:admin_views_section_add')) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_list_body_class(self): """ Ensure app and model tag are correctly read by change_list template """ response = self.client.get(reverse('admin:admin_views_section_changelist')) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_delete_confirmation_body_class(self): """ Ensure app and model tag are correctly read by delete_confirmation template """ response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,))) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_app_model_in_app_index_body_class(self): """ Ensure app and model tag are correctly read by app_index template """ response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertContains(response, '<body class=" dashboard app-admin_views') def test_app_model_in_delete_selected_confirmation_body_class(self): """ Ensure app and model tag are correctly read by delete_selected_confirmation template """ action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'delete_selected', 'index': 0, } response = self.client.post(reverse('admin:admin_views_section_changelist'), action_data) self.assertContains(response, '<body class=" app-admin_views model-section ') def test_changelist_field_classes(self): """ Cells of the change list table should contain the field name in their class attribute Refs #11195. """ Podcast.objects.create(name="Django Dose", release_date=datetime.date.today()) response = self.client.get(reverse('admin:admin_views_podcast_changelist')) self.assertContains(response, '<th class="field-name">') self.assertContains(response, '<td class="field-release_date nowrap">') self.assertContains(response, '<td class="action-checkbox">') try: import docutils except ImportError: docutils = None @unittest.skipUnless(docutils, "no docutils installed.") @override_settings(ROOT_URLCONF='admin_views.urls') @modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']}) class AdminDocsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_tags(self): response = self.client.get(reverse('django-admindocs-tags')) # The builtin tag group exists self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True) # A builtin tag exists in both the index and detail self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True) self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True) # An app tag exists in both the index and detail self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True) self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True) # The admin list tag group exists self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True) # An admin list tag exists in both the index and detail self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True) self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True) def test_filters(self): response = self.client.get(reverse('django-admindocs-filters')) # The builtin filter group exists self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True) # A builtin filter exists in both the index and detail self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True) self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True) @override_settings( ROOT_URLCONF='admin_views.urls', TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }], ) class ValidXHTMLTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_lang_name_present(self): with translation.override(None): response = self.client.get(reverse('admin:app_list', args=('admin_views',))) self.assertNotContains(response, ' lang=""') self.assertNotContains(response, ' xml:lang=""') @override_settings(ROOT_URLCONF='admin_views.urls', USE_THOUSAND_SEPARATOR=True) class DateHierarchyTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def assert_non_localized_year(self, response, year): """ The year is not localized with USE_THOUSAND_SEPARATOR (#15234). """ self.assertNotContains(response, formats.number_format(year)) def assert_contains_year_link(self, response, date): self.assertContains(response, '?release_date__year=%d"' % date.year) def assert_contains_month_link(self, response, date): self.assertContains( response, '?release_date__month=%d&amp;release_date__year=%d"' % ( date.month, date.year)) def assert_contains_day_link(self, response, date): self.assertContains( response, '?release_date__day=%d&amp;' 'release_date__month=%d&amp;release_date__year=%d"' % ( date.day, date.month, date.year)) def test_empty(self): """ No date hierarchy links display with empty changelist. """ response = self.client.get( reverse('admin:admin_views_podcast_changelist')) self.assertNotContains(response, 'release_date__year=') self.assertNotContains(response, 'release_date__month=') self.assertNotContains(response, 'release_date__day=') def test_single(self): """ Single day-level date hierarchy appears for single object. """ DATE = datetime.date(2000, 6, 30) Podcast.objects.create(release_date=DATE) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) self.assert_contains_day_link(response, DATE) self.assert_non_localized_year(response, 2000) def test_within_month(self): """ day-level links appear for changelist within single month. """ DATES = (datetime.date(2000, 6, 30), datetime.date(2000, 6, 15), datetime.date(2000, 6, 3)) for date in DATES: Podcast.objects.create(release_date=date) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) for date in DATES: self.assert_contains_day_link(response, date) self.assert_non_localized_year(response, 2000) def test_within_year(self): """ month-level links appear for changelist within single year. """ DATES = (datetime.date(2000, 1, 30), datetime.date(2000, 3, 15), datetime.date(2000, 5, 3)) for date in DATES: Podcast.objects.create(release_date=date) url = reverse('admin:admin_views_podcast_changelist') response = self.client.get(url) # no day-level links self.assertNotContains(response, 'release_date__day=') for date in DATES: self.assert_contains_month_link(response, date) self.assert_non_localized_year(response, 2000) def test_multiple_years(self): """ year-level links appear for year-spanning changelist. """ DATES = (datetime.date(2001, 1, 30), datetime.date(2003, 3, 15), datetime.date(2005, 5, 3)) for date in DATES: Podcast.objects.create(release_date=date) response = self.client.get( reverse('admin:admin_views_podcast_changelist')) # no day/month-level links self.assertNotContains(response, 'release_date__day=') self.assertNotContains(response, 'release_date__month=') for date in DATES: self.assert_contains_year_link(response, date) # and make sure GET parameters still behave correctly for date in DATES: url = '%s?release_date__year=%d' % ( reverse('admin:admin_views_podcast_changelist'), date.year) response = self.client.get(url) self.assert_contains_month_link(response, date) self.assert_non_localized_year(response, 2000) self.assert_non_localized_year(response, 2003) self.assert_non_localized_year(response, 2005) url = '%s?release_date__year=%d&release_date__month=%d' % ( reverse('admin:admin_views_podcast_changelist'), date.year, date.month) response = self.client.get(url) self.assert_contains_day_link(response, date) self.assert_non_localized_year(response, 2000) self.assert_non_localized_year(response, 2003) self.assert_non_localized_year(response, 2005) def test_related_field(self): questions_data = ( # (posted data, number of answers), (datetime.date(2001, 1, 30), 0), (datetime.date(2003, 3, 15), 1), (datetime.date(2005, 5, 3), 2), ) for date, answer_count in questions_data: question = Question.objects.create(posted=date) for i in range(answer_count): question.answer_set.create() response = self.client.get(reverse('admin:admin_views_answer_changelist')) for date, answer_count in questions_data: link = '?question__posted__year=%d"' % date.year if answer_count > 0: self.assertContains(response, link) else: self.assertNotContains(response, link) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminCustomSaveRelatedTests(TestCase): """ One can easily customize the way related objects are saved. Refs #16115. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_should_be_able_to_edit_related_objects_on_add_view(self): post = { 'child_set-TOTAL_FORMS': '3', 'child_set-INITIAL_FORMS': '0', 'name': 'Josh Stone', 'child_set-0-name': 'Paul', 'child_set-1-name': 'Catherine', } self.client.post(reverse('admin:admin_views_parent_add'), post) self.assertEqual(1, Parent.objects.count()) self.assertEqual(2, Child.objects.count()) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) def test_should_be_able_to_edit_related_objects_on_change_view(self): parent = Parent.objects.create(name='Josh Stone') paul = Child.objects.create(parent=parent, name='Paul') catherine = Child.objects.create(parent=parent, name='Catherine') post = { 'child_set-TOTAL_FORMS': '5', 'child_set-INITIAL_FORMS': '2', 'name': 'Josh Stone', 'child_set-0-name': 'Paul', 'child_set-0-id': paul.id, 'child_set-1-name': 'Catherine', 'child_set-1-id': catherine.id, } self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) def test_should_be_able_to_edit_related_objects_on_changelist_view(self): parent = Parent.objects.create(name='Josh Rock') Child.objects.create(parent=parent, name='Paul') Child.objects.create(parent=parent, name='Catherine') post = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': parent.id, 'form-0-name': 'Josh Stone', '_save': 'Save' } self.client.post(reverse('admin:admin_views_parent_changelist'), post) children_names = list(Child.objects.order_by('name').values_list('name', flat=True)) self.assertEqual('Josh Stone', Parent.objects.latest('id').name) self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewLogoutTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def test_logout(self): self.client.force_login(self.superuser) response = self.client.get(reverse('admin:logout')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'registration/logged_out.html') self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout')) self.assertFalse(response.context['has_permission']) self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible. def test_client_logout_url_can_be_used_to_login(self): response = self.client.get(reverse('admin:logout')) self.assertEqual(response.status_code, 302) # we should be redirected to the login page. # follow the redirect and test results. response = self.client.get(reverse('admin:logout'), follow=True) self.assertContains( response, '<input type="hidden" name="next" value="%s">' % reverse('admin:index'), ) self.assertTemplateUsed(response, 'admin/login.html') self.assertEqual(response.request['PATH_INFO'], reverse('admin:login')) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminUserMessageTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def send_message(self, level): """ Helper that sends a post to the dummy test methods and asserts that a message with the level has appeared in the response. """ action_data = { ACTION_CHECKBOX_NAME: [1], 'action': 'message_%s' % level, 'index': 0, } response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'), action_data, follow=True) self.assertContains(response, '<li class="%s">Test %s</li>' % (level, level), html=True) @override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request def test_message_debug(self): self.send_message('debug') def test_message_info(self): self.send_message('info') def test_message_success(self): self.send_message('success') def test_message_warning(self): self.send_message('warning') def test_message_error(self): self.send_message('error') def test_message_extra_tags(self): action_data = { ACTION_CHECKBOX_NAME: [1], 'action': 'message_extra_tags', 'index': 0, } response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'), action_data, follow=True) self.assertContains(response, '<li class="extra_tag info">Test tags</li>', html=True) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminKeepChangeListFiltersTests(TestCase): admin_site = site @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret') def setUp(self): self.client.force_login(self.superuser) def assertURLEqual(self, url1, url2, msg_prefix=''): """ Assert that two URLs are equal despite the ordering of their querystring. Refs #22360. """ parsed_url1 = urlparse(url1) path1 = parsed_url1.path parsed_qs1 = dict(parse_qsl(parsed_url1.query)) parsed_url2 = urlparse(url2) path2 = parsed_url2.path parsed_qs2 = dict(parse_qsl(parsed_url2.query)) for parsed_qs in [parsed_qs1, parsed_qs2]: if '_changelist_filters' in parsed_qs: changelist_filters = parsed_qs['_changelist_filters'] parsed_filters = dict(parse_qsl(changelist_filters)) parsed_qs['_changelist_filters'] = parsed_filters self.assertEqual(path1, path2) self.assertEqual(parsed_qs1, parsed_qs2) def test_assert_url_equal(self): # Test equality. change_user_url = reverse('admin:auth_user_change', args=(self.joepublicuser.pk,)) self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ) ) # Test inequality. with self.assertRaises(AssertionError): self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), 'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format( change_user_url ) ) # Ignore scheme and host. self.assertURLEqual( 'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format( change_user_url ), '{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url) ) # Ignore ordering of querystring. self.assertURLEqual( '{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')), '{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist')) ) # Ignore ordering of _changelist_filters. self.assertURLEqual( '{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url), '{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url) ) def get_changelist_filters(self): return { 'is_superuser__exact': 0, 'is_staff__exact': 0, } def get_changelist_filters_querystring(self): return urlencode(self.get_changelist_filters()) def get_preserved_filters_querystring(self): return urlencode({ '_changelist_filters': self.get_changelist_filters_querystring() }) def get_sample_user_id(self): return self.joepublicuser.pk def get_changelist_url(self): return '%s?%s' % ( reverse('admin:auth_user_changelist', current_app=self.admin_site.name), self.get_changelist_filters_querystring(), ) def get_add_url(self, add_preserved_filters=True): url = reverse('admin:auth_user_add', current_app=self.admin_site.name) if add_preserved_filters: url = '%s?%s' % (url, self.get_preserved_filters_querystring()) return url def get_change_url(self, user_id=None, add_preserved_filters=True): if user_id is None: user_id = self.get_sample_user_id() url = reverse('admin:auth_user_change', args=(user_id,), current_app=self.admin_site.name) if add_preserved_filters: url = '%s?%s' % (url, self.get_preserved_filters_querystring()) return url def get_history_url(self, user_id=None): if user_id is None: user_id = self.get_sample_user_id() return "%s?%s" % ( reverse('admin:auth_user_history', args=(user_id,), current_app=self.admin_site.name), self.get_preserved_filters_querystring(), ) def get_delete_url(self, user_id=None): if user_id is None: user_id = self.get_sample_user_id() return "%s?%s" % ( reverse('admin:auth_user_delete', args=(user_id,), current_app=self.admin_site.name), self.get_preserved_filters_querystring(), ) def test_changelist_view(self): response = self.client.get(self.get_changelist_url()) self.assertEqual(response.status_code, 200) # Check the `change_view` link has the correct querystring. detail_link = re.search( '<a href="(.*?)">{}</a>'.format(self.joepublicuser.username), response.content.decode() ) self.assertURLEqual(detail_link[1], self.get_change_url()) def test_change_view(self): # Get the `change_view`. response = self.client.get(self.get_change_url()) self.assertEqual(response.status_code, 200) # Check the form action. form_action = re.search( '<form action="(.*?)" method="post" id="user_form" novalidate>', response.content.decode() ) self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring()) # Check the history link. history_link = re.search( '<a href="(.*?)" class="historylink">History</a>', response.content.decode() ) self.assertURLEqual(history_link[1], self.get_history_url()) # Check the delete link. delete_link = re.search( '<a href="(.*?)" class="deletelink">Delete</a>', response.content.decode() ) self.assertURLEqual(delete_link[1], self.get_delete_url()) # Test redirect on "Save". post_data = { 'username': 'joepublic', 'last_login_0': '2007-05-30', 'last_login_1': '13:20:10', 'date_joined_0': '2007-05-30', 'date_joined_1': '13:20:10', } post_data['_save'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_changelist_url()) post_data.pop('_save') # Test redirect on "Save and continue". post_data['_continue'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_change_url()) post_data.pop('_continue') # Test redirect on "Save and add new". post_data['_addanother'] = 1 response = self.client.post(self.get_change_url(), data=post_data) self.assertRedirects(response, self.get_add_url()) post_data.pop('_addanother') def test_change_view_without_preserved_filters(self): response = self.client.get(self.get_change_url(add_preserved_filters=False)) # The action attribute is omitted. self.assertContains(response, '<form method="post" id="user_form" novalidate>') def test_add_view(self): # Get the `add_view`. response = self.client.get(self.get_add_url()) self.assertEqual(response.status_code, 200) # Check the form action. form_action = re.search( '<form action="(.*?)" method="post" id="user_form" novalidate>', response.content.decode() ) self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring()) post_data = { 'username': 'dummy', 'password1': 'test', 'password2': 'test', } # Test redirect on "Save". post_data['_save'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy').pk)) post_data.pop('_save') # Test redirect on "Save and continue". post_data['username'] = 'dummy2' post_data['_continue'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy2').pk)) post_data.pop('_continue') # Test redirect on "Save and add new". post_data['username'] = 'dummy3' post_data['_addanother'] = 1 response = self.client.post(self.get_add_url(), data=post_data) self.assertRedirects(response, self.get_add_url()) post_data.pop('_addanother') def test_add_view_without_preserved_filters(self): response = self.client.get(self.get_add_url(add_preserved_filters=False)) # The action attribute is omitted. self.assertContains(response, '<form method="post" id="user_form" novalidate>') def test_delete_view(self): # Test redirect on "Delete". response = self.client.post(self.get_delete_url(), {'post': 'yes'}) self.assertRedirects(response, self.get_changelist_url()) def test_url_prefix(self): context = { 'preserved_filters': self.get_preserved_filters_querystring(), 'opts': User._meta, } prefixes = ('', '/prefix/', '/後台/') for prefix in prefixes: with self.subTest(prefix=prefix), override_script_prefix(prefix): url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name) self.assertURLEqual( self.get_changelist_url(), add_preserved_filters(context, url), ) class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests): admin_site = site2 @override_settings(ROOT_URLCONF='admin_views.urls') class TestLabelVisibility(TestCase): """ #11277 -Labels of hidden fields in admin were not hidden. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_all_fields_visible(self): response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add')) self.assert_fieldline_visible(response) self.assert_field_visible(response, 'first') self.assert_field_visible(response, 'second') def test_all_fields_hidden(self): response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add')) self.assert_fieldline_hidden(response) self.assert_field_hidden(response, 'first') self.assert_field_hidden(response, 'second') def test_mixin(self): response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add')) self.assert_fieldline_visible(response) self.assert_field_hidden(response, 'first') self.assert_field_visible(response, 'second') def assert_field_visible(self, response, field_name): self.assertContains(response, '<div class="fieldBox field-%s">' % field_name) def assert_field_hidden(self, response, field_name): self.assertContains(response, '<div class="fieldBox field-%s hidden">' % field_name) def assert_fieldline_visible(self, response): self.assertContains(response, '<div class="form-row field-first field-second">') def assert_fieldline_hidden(self, response): self.assertContains(response, '<div class="form-row hidden') @override_settings(ROOT_URLCONF='admin_views.urls') class AdminViewOnSiteTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = State.objects.create(name='New York') cls.s2 = State.objects.create(name='Illinois') cls.s3 = State.objects.create(name='California') cls.c1 = City.objects.create(state=cls.s1, name='New York') cls.c2 = City.objects.create(state=cls.s2, name='Chicago') cls.c3 = City.objects.create(state=cls.s3, name='San Francisco') cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza') cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard') cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner') cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels') cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away') cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant') cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi') cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi') cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe') def setUp(self): self.client.force_login(self.superuser) def test_add_view_form_and_formsets_run_validation(self): """ Issue #20522 Verifying that if the parent form fails validation, the inlines also run validation even if validation is contingent on parent form data. Also, assertFormError() and assertFormsetError() is usable for admin forms and formsets. """ # The form validation should fail because 'some_required_info' is # not included on the parent form, and the family_name of the parent # does not match that of the child post_data = { 'family_name': 'Test1', 'dependentchild_set-TOTAL_FORMS': '1', 'dependentchild_set-INITIAL_FORMS': '0', 'dependentchild_set-MAX_NUM_FORMS': '1', 'dependentchild_set-0-id': '', 'dependentchild_set-0-parent': '', 'dependentchild_set-0-family_name': 'Test2', } response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'), post_data) self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.']) msg = "The form 'adminform' in context 0 does not contain the non-field error 'Error'" with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'adminform', None, ['Error']) self.assertFormsetError( response, 'inline_admin_formset', 0, None, ['Children must share a family name with their parents in this contrived test case'] ) msg = "The formset 'inline_admin_formset' in context 22 does not contain any non-form errors." with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(response, 'inline_admin_formset', None, None, ['Error']) def test_change_view_form_and_formsets_run_validation(self): """ Issue #20522 Verifying that if the parent form fails validation, the inlines also run validation even if validation is contingent on parent form data """ pwdc = ParentWithDependentChildren.objects.create(some_required_info=6, family_name='Test1') # The form validation should fail because 'some_required_info' is # not included on the parent form, and the family_name of the parent # does not match that of the child post_data = { 'family_name': 'Test2', 'dependentchild_set-TOTAL_FORMS': '1', 'dependentchild_set-INITIAL_FORMS': '0', 'dependentchild_set-MAX_NUM_FORMS': '1', 'dependentchild_set-0-id': '', 'dependentchild_set-0-parent': str(pwdc.id), 'dependentchild_set-0-family_name': 'Test1', } response = self.client.post( reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data ) self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.']) self.assertFormsetError( response, 'inline_admin_formset', 0, None, ['Children must share a family name with their parents in this contrived test case'] ) def test_check(self): "The view_on_site value is either a boolean or a callable" try: admin = CityAdmin(City, AdminSite()) CityAdmin.view_on_site = True self.assertEqual(admin.check(), []) CityAdmin.view_on_site = False self.assertEqual(admin.check(), []) CityAdmin.view_on_site = lambda obj: obj.get_absolute_url() self.assertEqual(admin.check(), []) CityAdmin.view_on_site = [] self.assertEqual(admin.check(), [ Error( "The value of 'view_on_site' must be a callable or a boolean value.", obj=CityAdmin, id='admin.E025', ), ]) finally: # Restore the original values for the benefit of other tests. CityAdmin.view_on_site = True def test_false(self): "The 'View on site' button is not displayed if view_on_site is False" response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,))) content_type_pk = ContentType.objects.get_for_model(Restaurant).pk self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1))) def test_true(self): "The default behavior is followed if view_on_site is True" response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,))) content_type_pk = ContentType.objects.get_for_model(City).pk self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk))) def test_callable(self): "The right link is displayed if view_on_site is a callable" response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,))) self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name)) def test_missing_get_absolute_url(self): "None is returned if model doesn't have get_absolute_url" model_admin = ModelAdmin(Worker, None) self.assertIsNone(model_admin.get_view_on_site_url(Worker())) @override_settings(ROOT_URLCONF='admin_views.urls') class InlineAdminViewOnSiteTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = State.objects.create(name='New York') cls.s2 = State.objects.create(name='Illinois') cls.s3 = State.objects.create(name='California') cls.c1 = City.objects.create(state=cls.s1, name='New York') cls.c2 = City.objects.create(state=cls.s2, name='Chicago') cls.c3 = City.objects.create(state=cls.s3, name='San Francisco') cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza') cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard') cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner') cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels') cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away') cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant') cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi') cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi') cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe') def setUp(self): self.client.force_login(self.superuser) def test_false(self): "The 'View on site' button is not displayed if view_on_site is False" response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,))) content_type_pk = ContentType.objects.get_for_model(City).pk self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk))) def test_true(self): "The 'View on site' button is displayed if view_on_site is True" response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,))) content_type_pk = ContentType.objects.get_for_model(Restaurant).pk self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk))) def test_callable(self): "The right link is displayed if view_on_site is a callable" response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,))) self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name)) @override_settings(ROOT_URLCONF='admin_views.urls') class GetFormsetsWithInlinesArgumentTest(TestCase): """ #23934 - When adding a new model instance in the admin, the 'obj' argument of get_formsets_with_inlines() should be None. When changing, it should be equal to the existing model instance. The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception if obj is not None during add_view or obj is None during change_view. """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_explicitly_provided_pk(self): post_data = {'name': '1'} response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data) self.assertEqual(response.status_code, 302) post_data = {'name': '2'} response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data) self.assertEqual(response.status_code, 302) def test_implicitly_generated_pk(self): post_data = {'name': '1'} response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data) self.assertEqual(response.status_code, 302) post_data = {'name': '2'} response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data) self.assertEqual(response.status_code, 302) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminSiteFinalCatchAllPatternTests(TestCase): """ Verifies the behaviour of the admin catch-all view. * Anonynous/non-staff users are redirected to login for all URLs, whether otherwise valid or not. * APPEND_SLASH is applied for staff if needed. * Otherwise Http404. * Catch-all view disabled via AdminSite.final_catch_all_view. """ def test_unknown_url_redirects_login_if_not_authenticated(self): unknown_url = '/test_admin/admin/unknown/' response = self.client.get(unknown_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), unknown_url)) def test_unknown_url_404_if_authenticated(self): superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) self.client.force_login(superuser) unknown_url = '/test_admin/admin/unknown/' response = self.client.get(unknown_url) self.assertEqual(response.status_code, 404) def test_known_url_redirects_login_if_not_authenticated(self): known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), known_url)) def test_known_url_missing_slash_redirects_login_if_not_authenticated(self): known_url = reverse('admin:admin_views_article_changelist')[:-1] response = self.client.get(known_url) # Redirects with the next URL also missing the slash. self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), known_url)) def test_non_admin_url_shares_url_prefix(self): url = reverse('non_admin')[:-1] response = self.client.get(url) # Redirects with the next URL also missing the slash. self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url)) def test_url_without_trailing_slash_if_not_authenticated(self): url = reverse('admin:article_extra_json') response = self.client.get(url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url)) def test_unkown_url_without_trailing_slash_if_not_authenticated(self): url = reverse('admin:article_extra_json')[:-1] response = self.client.get(url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url)) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_unknown_url(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) unknown_url = '/test_admin/admin/unknown/' response = self.client.get(unknown_url[:-1]) self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, known_url, status_code=301, target_status_code=403) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_script_name(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1], SCRIPT_NAME='/prefix/') self.assertRedirects( response, '/prefix' + known_url, status_code=301, fetch_redirect_response=False, ) @override_settings(APPEND_SLASH=True, FORCE_SCRIPT_NAME='/prefix/') def test_missing_slash_append_slash_true_force_script_name(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects( response, '/prefix' + known_url, status_code=301, fetch_redirect_response=False, ) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_non_staff_user(self): user = User.objects.create_user( username='user', password='secret', email='[email protected]', is_staff=False, ) self.client.force_login(user) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, '/test_admin/admin/login/?next=/test_admin/admin/admin_views/article') @override_settings(APPEND_SLASH=False) def test_missing_slash_append_slash_false(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=True) def test_single_model_no_append_slash(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin9:admin_views_actor_changelist') response = self.client.get(known_url[:-1]) self.assertEqual(response.status_code, 404) # Same tests above with final_catch_all_view=False. def test_unknown_url_404_if_not_authenticated_without_final_catch_all_view(self): unknown_url = '/test_admin/admin10/unknown/' response = self.client.get(unknown_url) self.assertEqual(response.status_code, 404) def test_unknown_url_404_if_authenticated_without_final_catch_all_view(self): superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) self.client.force_login(superuser) unknown_url = '/test_admin/admin10/unknown/' response = self.client.get(unknown_url) self.assertEqual(response.status_code, 404) def test_known_url_redirects_login_if_not_authenticated_without_final_catch_all_view(self): known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin10:login'), known_url)) def test_known_url_missing_slash_redirects_with_slash_if_not_authenticated_without_final_catch_all_view(self): known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, known_url, status_code=301, fetch_redirect_response=False) def test_non_admin_url_shares_url_prefix_without_final_catch_all_view(self): url = reverse('non_admin10') response = self.client.get(url[:-1]) self.assertRedirects(response, url, status_code=301) def test_url_without_trailing_slash_if_not_authenticated_without_final_catch_all_view(self): url = reverse('admin10:article_extra_json') response = self.client.get(url) self.assertRedirects(response, '%s?next=%s' % (reverse('admin10:login'), url)) def test_unkown_url_without_trailing_slash_if_not_authenticated_without_final_catch_all_view(self): url = reverse('admin10:article_extra_json')[:-1] response = self.client.get(url) # Matches test_admin/admin10/admin_views/article/<path:object_id>/ self.assertRedirects(response, url + '/', status_code=301, fetch_redirect_response=False) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_unknown_url_without_final_catch_all_view(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) unknown_url = '/test_admin/admin10/unknown/' response = self.client.get(unknown_url[:-1]) self.assertEqual(response.status_code, 404) @override_settings(APPEND_SLASH=True) def test_missing_slash_append_slash_true_without_final_catch_all_view(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertRedirects(response, known_url, status_code=301, target_status_code=403) @override_settings(APPEND_SLASH=False) def test_missing_slash_append_slash_false_without_final_catch_all_view(self): superuser = User.objects.create_user( username='staff', password='secret', email='[email protected]', is_staff=True, ) self.client.force_login(superuser) known_url = reverse('admin10:admin_views_article_changelist') response = self.client.get(known_url[:-1]) self.assertEqual(response.status_code, 404) # Outside admin. def test_non_admin_url_404_if_not_authenticated(self): unknown_url = '/unknown/' response = self.client.get(unknown_url) # Does not redirect to the admin login. self.assertEqual(response.status_code, 404)
151155b7e7066d735f32bb8c75bd85291d06ac83a1034e2bca8d0d656561ff73
import datetime import tempfile import uuid from django.contrib import admin from django.contrib.auth.models import User from django.contrib.contenttypes.fields import ( GenericForeignKey, GenericRelation, ) from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.core.files.storage import FileSystemStorage from django.db import models class Section(models.Model): """ A simple section that links to articles, to test linking to related items in admin views. """ name = models.CharField(max_length=100) def __str__(self): return self.name @property def name_property(self): """ A property that simply returns the name. Used to test #24461 """ return self.name class Article(models.Model): """ A simple article to test admin views. Test backwards compatibility. """ title = models.CharField(max_length=100) content = models.TextField() date = models.DateTimeField() section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True) another_section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True, related_name='+') sub_section = models.ForeignKey(Section, models.SET_NULL, null=True, blank=True, related_name='+') def __str__(self): return self.title @admin.display(ordering='date', description='') def model_year(self): return self.date.year @admin.display(ordering='-date', description='') def model_year_reversed(self): return self.date.year @property @admin.display(ordering='date') def model_property_year(self): return self.date.year @property def model_month(self): return self.date.month class Book(models.Model): """ A simple book that has chapters. """ name = models.CharField(max_length=100, verbose_name='¿Name?') def __str__(self): return self.name class Promo(models.Model): name = models.CharField(max_length=100, verbose_name='¿Name?') book = models.ForeignKey(Book, models.CASCADE) author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True) def __str__(self): return self.name class Chapter(models.Model): title = models.CharField(max_length=100, verbose_name='¿Title?') content = models.TextField() book = models.ForeignKey(Book, models.CASCADE) class Meta: # Use a utf-8 bytestring to ensure it works (see #11710) verbose_name = '¿Chapter?' def __str__(self): return self.title class ChapterXtra1(models.Model): chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?') xtra = models.CharField(max_length=100, verbose_name='¿Xtra?') guest_author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True) def __str__(self): return '¿Xtra1: %s' % self.xtra class ChapterXtra2(models.Model): chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?') xtra = models.CharField(max_length=100, verbose_name='¿Xtra?') def __str__(self): return '¿Xtra2: %s' % self.xtra class RowLevelChangePermissionModel(models.Model): name = models.CharField(max_length=100, blank=True) class CustomArticle(models.Model): content = models.TextField() date = models.DateTimeField() class ModelWithStringPrimaryKey(models.Model): string_pk = models.CharField(max_length=255, primary_key=True) def __str__(self): return self.string_pk def get_absolute_url(self): return '/dummy/%s/' % self.string_pk class Color(models.Model): value = models.CharField(max_length=10) warm = models.BooleanField(default=False) def __str__(self): return self.value # we replicate Color to register with another ModelAdmin class Color2(Color): class Meta: proxy = True class Thing(models.Model): title = models.CharField(max_length=20) color = models.ForeignKey(Color, models.CASCADE, limit_choices_to={'warm': True}) pub_date = models.DateField(blank=True, null=True) def __str__(self): return self.title class Actor(models.Model): name = models.CharField(max_length=50) age = models.IntegerField() title = models.CharField(max_length=50, null=True, blank=True) def __str__(self): return self.name class Inquisition(models.Model): expected = models.BooleanField(default=False) leader = models.ForeignKey(Actor, models.CASCADE) country = models.CharField(max_length=20) def __str__(self): return "by %s from %s" % (self.leader, self.country) class Sketch(models.Model): title = models.CharField(max_length=100) inquisition = models.ForeignKey( Inquisition, models.CASCADE, limit_choices_to={ 'leader__name': 'Palin', 'leader__age': 27, 'expected': False, }, ) defendant0 = models.ForeignKey( Actor, models.CASCADE, limit_choices_to={'title__isnull': False}, related_name='as_defendant0', ) defendant1 = models.ForeignKey( Actor, models.CASCADE, limit_choices_to={'title__isnull': True}, related_name='as_defendant1', ) def __str__(self): return self.title def today_callable_dict(): return {"last_action__gte": datetime.datetime.today()} def today_callable_q(): return models.Q(last_action__gte=datetime.datetime.today()) class Character(models.Model): username = models.CharField(max_length=100) last_action = models.DateTimeField() def __str__(self): return self.username class StumpJoke(models.Model): variation = models.CharField(max_length=100) most_recently_fooled = models.ForeignKey( Character, models.CASCADE, limit_choices_to=today_callable_dict, related_name="+", ) has_fooled_today = models.ManyToManyField(Character, limit_choices_to=today_callable_q, related_name="+") def __str__(self): return self.variation class Fabric(models.Model): NG_CHOICES = ( ('Textured', ( ('x', 'Horizontal'), ('y', 'Vertical'), )), ('plain', 'Smooth'), ) surface = models.CharField(max_length=20, choices=NG_CHOICES) class Person(models.Model): GENDER_CHOICES = ( (1, "Male"), (2, "Female"), ) name = models.CharField(max_length=100) gender = models.IntegerField(choices=GENDER_CHOICES) age = models.IntegerField(default=21) alive = models.BooleanField(default=True) def __str__(self): return self.name class Persona(models.Model): """ A simple persona associated with accounts, to test inlining of related accounts which inherit from a common accounts class. """ name = models.CharField(blank=False, max_length=80) def __str__(self): return self.name class Account(models.Model): """ A simple, generic account encapsulating the information shared by all types of accounts. """ username = models.CharField(blank=False, max_length=80) persona = models.ForeignKey(Persona, models.CASCADE, related_name="accounts") servicename = 'generic service' def __str__(self): return "%s: %s" % (self.servicename, self.username) class FooAccount(Account): """A service-specific account of type Foo.""" servicename = 'foo' class BarAccount(Account): """A service-specific account of type Bar.""" servicename = 'bar' class Subscriber(models.Model): name = models.CharField(blank=False, max_length=80) email = models.EmailField(blank=False, max_length=175) def __str__(self): return "%s (%s)" % (self.name, self.email) class ExternalSubscriber(Subscriber): pass class OldSubscriber(Subscriber): pass class Media(models.Model): name = models.CharField(max_length=60) class Podcast(Media): release_date = models.DateField() class Meta: ordering = ('release_date',) # overridden in PodcastAdmin class Vodcast(Media): media = models.OneToOneField(Media, models.CASCADE, primary_key=True, parent_link=True) released = models.BooleanField(default=False) class Parent(models.Model): name = models.CharField(max_length=128) def clean(self): if self.name == '_invalid': raise ValidationError('invalid') class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE, editable=False) name = models.CharField(max_length=30, blank=True) def clean(self): if self.name == '_invalid': raise ValidationError('invalid') class PKChild(models.Model): """ Used to check autocomplete to_field resolution when ForeignKey is PK. """ parent = models.ForeignKey(Parent, models.CASCADE, primary_key=True) name = models.CharField(max_length=128) class Meta: ordering = ['parent'] def __str__(self): return self.name class Toy(models.Model): child = models.ForeignKey(PKChild, models.CASCADE) class EmptyModel(models.Model): def __str__(self): return "Primary key = %s" % self.id temp_storage = FileSystemStorage(tempfile.mkdtemp()) class Gallery(models.Model): name = models.CharField(max_length=100) class Picture(models.Model): name = models.CharField(max_length=100) image = models.FileField(storage=temp_storage, upload_to='test_upload') gallery = models.ForeignKey(Gallery, models.CASCADE, related_name="pictures") class Language(models.Model): iso = models.CharField(max_length=5, primary_key=True) name = models.CharField(max_length=50) english_name = models.CharField(max_length=50) shortlist = models.BooleanField(default=False) def __str__(self): return self.iso class Meta: ordering = ('iso',) # a base class for Recommender and Recommendation class Title(models.Model): pass class TitleTranslation(models.Model): title = models.ForeignKey(Title, models.CASCADE) text = models.CharField(max_length=100) class Recommender(Title): pass class Recommendation(Title): the_recommender = models.ForeignKey(Recommender, models.CASCADE) class Collector(models.Model): name = models.CharField(max_length=100) class Widget(models.Model): owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class DooHickey(models.Model): code = models.CharField(max_length=10, primary_key=True) owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class Grommet(models.Model): code = models.AutoField(primary_key=True) owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class Whatsit(models.Model): index = models.IntegerField(primary_key=True) owner = models.ForeignKey(Collector, models.CASCADE) name = models.CharField(max_length=100) class Doodad(models.Model): name = models.CharField(max_length=100) class FancyDoodad(Doodad): owner = models.ForeignKey(Collector, models.CASCADE) expensive = models.BooleanField(default=True) class Category(models.Model): collector = models.ForeignKey(Collector, models.CASCADE) order = models.PositiveIntegerField() class Meta: ordering = ('order',) def __str__(self): return '%s:o%s' % (self.id, self.order) def link_posted_default(): return datetime.date.today() - datetime.timedelta(days=7) class Link(models.Model): posted = models.DateField(default=link_posted_default) url = models.URLField() post = models.ForeignKey("Post", models.CASCADE) readonly_link_content = models.TextField() class PrePopulatedPost(models.Model): title = models.CharField(max_length=100) published = models.BooleanField(default=False) slug = models.SlugField() class PrePopulatedSubPost(models.Model): post = models.ForeignKey(PrePopulatedPost, models.CASCADE) subtitle = models.CharField(max_length=100) subslug = models.SlugField() class Post(models.Model): title = models.CharField(max_length=100, help_text='Some help text for the title (with Unicode ŠĐĆŽćžšđ)') content = models.TextField(help_text='Some help text for the content (with Unicode ŠĐĆŽćžšđ)') readonly_content = models.TextField() posted = models.DateField( default=datetime.date.today, help_text='Some help text for the date (with Unicode ŠĐĆŽćžšđ)', ) public = models.BooleanField(null=True, blank=True) def awesomeness_level(self): return "Very awesome." # Proxy model to test overridden fields attrs on Post model so as not to # interfere with other tests. class FieldOverridePost(Post): class Meta: proxy = True class Gadget(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name class Villain(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name class SuperVillain(Villain): pass class FunkyTag(models.Model): "Because we all know there's only one real use case for GFKs." name = models.CharField(max_length=25) content_type = models.ForeignKey(ContentType, models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id') def __str__(self): return self.name class Plot(models.Model): name = models.CharField(max_length=100) team_leader = models.ForeignKey(Villain, models.CASCADE, related_name='lead_plots') contact = models.ForeignKey(Villain, models.CASCADE, related_name='contact_plots') tags = GenericRelation(FunkyTag) def __str__(self): return self.name class PlotDetails(models.Model): details = models.CharField(max_length=100) plot = models.OneToOneField(Plot, models.CASCADE, null=True, blank=True) def __str__(self): return self.details class PlotProxy(Plot): class Meta: proxy = True class SecretHideout(models.Model): """ Secret! Not registered with the admin! """ location = models.CharField(max_length=100) villain = models.ForeignKey(Villain, models.CASCADE) def __str__(self): return self.location class SuperSecretHideout(models.Model): """ Secret! Not registered with the admin! """ location = models.CharField(max_length=100) supervillain = models.ForeignKey(SuperVillain, models.CASCADE) def __str__(self): return self.location class Bookmark(models.Model): name = models.CharField(max_length=60) tag = GenericRelation(FunkyTag, related_query_name='bookmark') def __str__(self): return self.name class CyclicOne(models.Model): name = models.CharField(max_length=25) two = models.ForeignKey('CyclicTwo', models.CASCADE) def __str__(self): return self.name class CyclicTwo(models.Model): name = models.CharField(max_length=25) one = models.ForeignKey(CyclicOne, models.CASCADE) def __str__(self): return self.name class Topping(models.Model): name = models.CharField(max_length=20) def __str__(self): return self.name class Pizza(models.Model): name = models.CharField(max_length=20) toppings = models.ManyToManyField('Topping', related_name='pizzas') # Pizza's ModelAdmin has readonly_fields = ['toppings']. # toppings is editable for this model's admin. class ReadablePizza(Pizza): class Meta: proxy = True # No default permissions are created for this model and both name and toppings # are readonly for this model's admin. class ReadOnlyPizza(Pizza): class Meta: proxy = True default_permissions = () class Album(models.Model): owner = models.ForeignKey(User, models.SET_NULL, null=True, blank=True) title = models.CharField(max_length=30) class Song(models.Model): name = models.CharField(max_length=20) album = models.ForeignKey(Album, on_delete=models.RESTRICT) def __str__(self): return self.name class Employee(Person): code = models.CharField(max_length=20) class Meta: ordering = ['name'] class WorkHour(models.Model): datum = models.DateField() employee = models.ForeignKey(Employee, models.CASCADE) class Manager(Employee): """ A multi-layer MTI child. """ pass class Bonus(models.Model): recipient = models.ForeignKey(Manager, on_delete=models.CASCADE) class Question(models.Model): big_id = models.BigAutoField(primary_key=True) question = models.CharField(max_length=20) posted = models.DateField(default=datetime.date.today) expires = models.DateTimeField(null=True, blank=True) related_questions = models.ManyToManyField('self') uuid = models.UUIDField(default=uuid.uuid4, unique=True) def __str__(self): return self.question class Answer(models.Model): question = models.ForeignKey(Question, models.PROTECT) question_with_to_field = models.ForeignKey( Question, models.SET_NULL, blank=True, null=True, to_field='uuid', related_name='uuid_answers', limit_choices_to=~models.Q(question__istartswith='not'), ) related_answers = models.ManyToManyField('self') answer = models.CharField(max_length=20) def __str__(self): return self.answer class Answer2(Answer): class Meta: proxy = True class Reservation(models.Model): start_date = models.DateTimeField() price = models.IntegerField() class FoodDelivery(models.Model): DRIVER_CHOICES = ( ('bill', 'Bill G'), ('steve', 'Steve J'), ) RESTAURANT_CHOICES = ( ('indian', 'A Taste of India'), ('thai', 'Thai Pography'), ('pizza', 'Pizza Mama'), ) reference = models.CharField(max_length=100) driver = models.CharField(max_length=100, choices=DRIVER_CHOICES, blank=True) restaurant = models.CharField(max_length=100, choices=RESTAURANT_CHOICES, blank=True) class Meta: unique_together = (("driver", "restaurant"),) class CoverLetter(models.Model): author = models.CharField(max_length=30) date_written = models.DateField(null=True, blank=True) def __str__(self): return self.author class Paper(models.Model): title = models.CharField(max_length=30) author = models.CharField(max_length=30, blank=True, null=True) class ShortMessage(models.Model): content = models.CharField(max_length=140) timestamp = models.DateTimeField(null=True, blank=True) class Telegram(models.Model): title = models.CharField(max_length=30) date_sent = models.DateField(null=True, blank=True) def __str__(self): return self.title class Story(models.Model): title = models.CharField(max_length=100) content = models.TextField() class OtherStory(models.Model): title = models.CharField(max_length=100) content = models.TextField() class ComplexSortedPerson(models.Model): name = models.CharField(max_length=100) age = models.PositiveIntegerField() is_employee = models.BooleanField(null=True) class PluggableSearchPerson(models.Model): name = models.CharField(max_length=100) age = models.PositiveIntegerField() class PrePopulatedPostLargeSlug(models.Model): """ Regression test for #15938: a large max_length for the slugfield must not be localized in prepopulated_fields_js.html or it might end up breaking the JavaScript (ie, using THOUSAND_SEPARATOR ends up with maxLength=1,000) """ title = models.CharField(max_length=100) published = models.BooleanField(default=False) # `db_index=False` because MySQL cannot index large CharField (#21196). slug = models.SlugField(max_length=1000, db_index=False) class AdminOrderedField(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) class AdminOrderedModelMethod(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) @admin.display(ordering='order') def some_order(self): return self.order class AdminOrderedAdminMethod(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) class AdminOrderedCallable(models.Model): order = models.IntegerField() stuff = models.CharField(max_length=200) class Report(models.Model): title = models.CharField(max_length=100) def __str__(self): return self.title class MainPrepopulated(models.Model): name = models.CharField(max_length=100) pubdate = models.DateField() status = models.CharField( max_length=20, choices=(('option one', 'Option One'), ('option two', 'Option Two'))) slug1 = models.SlugField(blank=True) slug2 = models.SlugField(blank=True) slug3 = models.SlugField(blank=True, allow_unicode=True) class RelatedPrepopulated(models.Model): parent = models.ForeignKey(MainPrepopulated, models.CASCADE) name = models.CharField(max_length=75) fk = models.ForeignKey('self', models.CASCADE, blank=True, null=True) m2m = models.ManyToManyField('self', blank=True) pubdate = models.DateField() status = models.CharField( max_length=20, choices=(('option one', 'Option One'), ('option two', 'Option Two'))) slug1 = models.SlugField(max_length=50) slug2 = models.SlugField(max_length=60) class UnorderedObject(models.Model): """ Model without any defined `Meta.ordering`. Refs #16819. """ name = models.CharField(max_length=255) bool = models.BooleanField(default=True) class UndeletableObject(models.Model): """ Model whose show_delete in admin change_view has been disabled Refs #10057. """ name = models.CharField(max_length=255) class UnchangeableObject(models.Model): """ Model whose change_view is disabled in admin Refs #20640. """ class UserMessenger(models.Model): """ Dummy class for testing message_user functions on ModelAdmin """ class Simple(models.Model): """ Simple model with nothing on it for use in testing """ class Choice(models.Model): choice = models.IntegerField( blank=True, null=True, choices=((1, 'Yes'), (0, 'No'), (None, 'No opinion')), ) class ParentWithDependentChildren(models.Model): """ Issue #20522 Model where the validation of child foreign-key relationships depends on validation of the parent """ some_required_info = models.PositiveIntegerField() family_name = models.CharField(max_length=255, blank=False) class DependentChild(models.Model): """ Issue #20522 Model that depends on validation of the parent class for one of its fields to validate during clean """ parent = models.ForeignKey(ParentWithDependentChildren, models.CASCADE) family_name = models.CharField(max_length=255) class _Manager(models.Manager): def get_queryset(self): return super().get_queryset().filter(pk__gt=1) class FilteredManager(models.Model): def __str__(self): return "PK=%d" % self.pk pk_gt_1 = _Manager() objects = models.Manager() class EmptyModelVisible(models.Model): """ See ticket #11277. """ class EmptyModelHidden(models.Model): """ See ticket #11277. """ class EmptyModelMixin(models.Model): """ See ticket #11277. """ class State(models.Model): name = models.CharField(max_length=100, verbose_name='State verbose_name') class City(models.Model): state = models.ForeignKey(State, models.CASCADE) name = models.CharField(max_length=100, verbose_name='City verbose_name') def get_absolute_url(self): return '/dummy/%s/' % self.pk class Restaurant(models.Model): city = models.ForeignKey(City, models.CASCADE) name = models.CharField(max_length=100) def get_absolute_url(self): return '/dummy/%s/' % self.pk class Worker(models.Model): work_at = models.ForeignKey(Restaurant, models.CASCADE) name = models.CharField(max_length=50) surname = models.CharField(max_length=50) # Models for #23329 class ReferencedByParent(models.Model): name = models.CharField(max_length=20, unique=True) class ParentWithFK(models.Model): fk = models.ForeignKey( ReferencedByParent, models.CASCADE, to_field='name', related_name='hidden+', ) class ChildOfReferer(ParentWithFK): pass # Models for #23431 class InlineReferer(models.Model): pass class ReferencedByInline(models.Model): name = models.CharField(max_length=20, unique=True) class InlineReference(models.Model): referer = models.ForeignKey(InlineReferer, models.CASCADE) fk = models.ForeignKey( ReferencedByInline, models.CASCADE, to_field='name', related_name='hidden+', ) class Recipe(models.Model): rname = models.CharField(max_length=20, unique=True) class Ingredient(models.Model): iname = models.CharField(max_length=20, unique=True) recipes = models.ManyToManyField(Recipe, through='RecipeIngredient') class RecipeIngredient(models.Model): ingredient = models.ForeignKey(Ingredient, models.CASCADE, to_field='iname') recipe = models.ForeignKey(Recipe, models.CASCADE, to_field='rname') # Model for #23839 class NotReferenced(models.Model): # Don't point any FK at this model. pass # Models for #23934 class ExplicitlyProvidedPK(models.Model): name = models.IntegerField(primary_key=True) class ImplicitlyGeneratedPK(models.Model): name = models.IntegerField(unique=True) # Models for #25622 class ReferencedByGenRel(models.Model): content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id') class GenRelReference(models.Model): references = GenericRelation(ReferencedByGenRel) class ParentWithUUIDPK(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) title = models.CharField(max_length=100) def __str__(self): return str(self.id) class RelatedWithUUIDPKModel(models.Model): parent = models.ForeignKey(ParentWithUUIDPK, on_delete=models.SET_NULL, null=True, blank=True) class Author(models.Model): pass class Authorship(models.Model): book = models.ForeignKey(Book, models.CASCADE) author = models.ForeignKey(Author, models.CASCADE) class UserProxy(User): """Proxy a model with a different app_label.""" class Meta: proxy = True class ReadOnlyRelatedField(models.Model): chapter = models.ForeignKey(Chapter, models.CASCADE) language = models.ForeignKey(Language, models.CASCADE) user = models.ForeignKey(User, models.CASCADE) class Héllo(models.Model): pass class Box(models.Model): title = models.CharField(max_length=100) next_box = models.ForeignKey("self", null=True, on_delete=models.SET_NULL, blank=True) next_box = models.ForeignKey("self", null=True, on_delete=models.SET_NULL, blank=True)
c1a50b43c6acf45984ac88670501fec79acaa2ee63ff4325d76b928127e2de1d
import datetime from io import StringIO from wsgiref.util import FileWrapper from django import forms from django.contrib import admin from django.contrib.admin import BooleanFieldListFilter from django.contrib.admin.views.main import ChangeList from django.contrib.auth.admin import GroupAdmin, UserAdmin from django.contrib.auth.models import Group, User from django.core.exceptions import ValidationError from django.core.mail import EmailMessage from django.db import models from django.forms.models import BaseModelFormSet from django.http import HttpResponse, JsonResponse, StreamingHttpResponse from django.urls import path from django.utils.html import format_html from django.utils.safestring import mark_safe from django.views.decorators.common import no_append_slash from .forms import MediaActionForm from .models import ( Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField, AdminOrderedModelMethod, Album, Answer, Answer2, Article, BarAccount, Book, Bookmark, Box, Category, Chapter, ChapterXtra1, Child, ChildOfReferer, Choice, City, Collector, Color, Color2, ComplexSortedPerson, CoverLetter, CustomArticle, CyclicOne, CyclicTwo, DependentChild, DooHickey, EmptyModel, EmptyModelHidden, EmptyModelMixin, EmptyModelVisible, ExplicitlyProvidedPK, ExternalSubscriber, Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount, FoodDelivery, FunkyTag, Gadget, Gallery, GenRelReference, Grommet, ImplicitlyGeneratedPK, Ingredient, InlineReference, InlineReferer, Inquisition, Language, Link, MainPrepopulated, ModelWithStringPrimaryKey, NotReferenced, OldSubscriber, OtherStory, Paper, Parent, ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona, Picture, Pizza, Plot, PlotDetails, PlotProxy, PluggableSearchPerson, Podcast, Post, PrePopulatedPost, PrePopulatedPostLargeSlug, PrePopulatedSubPost, Promo, Question, ReadablePizza, ReadOnlyPizza, ReadOnlyRelatedField, Recipe, Recommendation, Recommender, ReferencedByGenRel, ReferencedByInline, ReferencedByParent, RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Reservation, Restaurant, RowLevelChangePermissionModel, Section, ShortMessage, Simple, Sketch, Song, State, Story, StumpJoke, Subscriber, SuperVillain, Telegram, Thing, Topping, UnchangeableObject, UndeletableObject, UnorderedObject, UserMessenger, UserProxy, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour, ) @admin.display(ordering='date') def callable_year(dt_value): try: return dt_value.year except AttributeError: return None class ArticleInline(admin.TabularInline): model = Article fk_name = 'section' prepopulated_fields = { 'title': ('content',) } fieldsets = ( ('Some fields', { 'classes': ('collapse',), 'fields': ('title', 'content') }), ('Some other fields', { 'classes': ('wide',), 'fields': ('date', 'section') }) ) class ChapterInline(admin.TabularInline): model = Chapter class ChapterXtra1Admin(admin.ModelAdmin): list_filter = ( 'chap', 'chap__title', 'chap__book', 'chap__book__name', 'chap__book__promo', 'chap__book__promo__name', 'guest_author__promo__book', ) class ArticleForm(forms.ModelForm): extra_form_field = forms.BooleanField(required=False) class Meta: fields = '__all__' model = Article class ArticleAdminWithExtraUrl(admin.ModelAdmin): def get_urls(self): urlpatterns = super().get_urls() urlpatterns.append( path('extra.json', self.admin_site.admin_view(self.extra_json), name='article_extra_json') ) return urlpatterns def extra_json(self, request): return JsonResponse({}) class ArticleAdmin(ArticleAdminWithExtraUrl): list_display = ( 'content', 'date', callable_year, 'model_year', 'modeladmin_year', 'model_year_reversed', 'section', lambda obj: obj.title, 'order_by_expression', 'model_property_year', 'model_month', 'order_by_f_expression', 'order_by_orderby_expression', ) list_editable = ('section',) list_filter = ('date', 'section') autocomplete_fields = ('section',) view_on_site = False form = ArticleForm fieldsets = ( ('Some fields', { 'classes': ('collapse',), 'fields': ('title', 'content', 'extra_form_field'), }), ('Some other fields', { 'classes': ('wide',), 'fields': ('date', 'section', 'sub_section') }) ) # These orderings aren't particularly useful but show that expressions can # be used for admin_order_field. @admin.display(ordering=models.F('date') + datetime.timedelta(days=3)) def order_by_expression(self, obj): return obj.model_year @admin.display(ordering=models.F('date')) def order_by_f_expression(self, obj): return obj.model_year @admin.display(ordering=models.F('date').asc(nulls_last=True)) def order_by_orderby_expression(self, obj): return obj.model_year def changelist_view(self, request): return super().changelist_view(request, extra_context={'extra_var': 'Hello!'}) @admin.display(ordering='date', description=None) def modeladmin_year(self, obj): return obj.date.year def delete_model(self, request, obj): EmailMessage( 'Greetings from a deleted object', 'I hereby inform you that some user deleted me', '[email protected]', ['[email protected]'] ).send() return super().delete_model(request, obj) def save_model(self, request, obj, form, change=True): EmailMessage( 'Greetings from a created object', 'I hereby inform you that some user created me', '[email protected]', ['[email protected]'] ).send() return super().save_model(request, obj, form, change) class ArticleAdmin2(admin.ModelAdmin): def has_module_permission(self, request): return False class RowLevelChangePermissionModelAdmin(admin.ModelAdmin): def has_change_permission(self, request, obj=None): """ Only allow changing objects with even id number """ return request.user.is_staff and (obj is not None) and (obj.id % 2 == 0) def has_view_permission(self, request, obj=None): """Only allow viewing objects if id is a multiple of 3.""" return request.user.is_staff and obj is not None and obj.id % 3 == 0 class CustomArticleAdmin(admin.ModelAdmin): """ Tests various hooks for using custom templates and contexts. """ change_list_template = 'custom_admin/change_list.html' change_form_template = 'custom_admin/change_form.html' add_form_template = 'custom_admin/add_form.html' object_history_template = 'custom_admin/object_history.html' delete_confirmation_template = 'custom_admin/delete_confirmation.html' delete_selected_confirmation_template = 'custom_admin/delete_selected_confirmation.html' popup_response_template = 'custom_admin/popup_response.html' def changelist_view(self, request): return super().changelist_view(request, extra_context={'extra_var': 'Hello!'}) class ThingAdmin(admin.ModelAdmin): list_filter = ('color', 'color__warm', 'color__value', 'pub_date') class InquisitionAdmin(admin.ModelAdmin): list_display = ('leader', 'country', 'expected', 'sketch') @admin.display def sketch(self, obj): # A method with the same name as a reverse accessor. return 'list-display-sketch' class SketchAdmin(admin.ModelAdmin): raw_id_fields = ('inquisition', 'defendant0', 'defendant1') class FabricAdmin(admin.ModelAdmin): list_display = ('surface',) list_filter = ('surface',) class BasePersonModelFormSet(BaseModelFormSet): def clean(self): for person_dict in self.cleaned_data: person = person_dict.get('id') alive = person_dict.get('alive') if person and alive and person.name == "Grace Hopper": raise ValidationError("Grace is not a Zombie") class PersonAdmin(admin.ModelAdmin): list_display = ('name', 'gender', 'alive') list_editable = ('gender', 'alive') list_filter = ('gender',) search_fields = ('^name',) save_as = True def get_changelist_formset(self, request, **kwargs): return super().get_changelist_formset(request, formset=BasePersonModelFormSet, **kwargs) def get_queryset(self, request): # Order by a field that isn't in list display, to be able to test # whether ordering is preserved. return super().get_queryset(request).order_by('age') class FooAccountAdmin(admin.StackedInline): model = FooAccount extra = 1 class BarAccountAdmin(admin.StackedInline): model = BarAccount extra = 1 class PersonaAdmin(admin.ModelAdmin): inlines = ( FooAccountAdmin, BarAccountAdmin ) class SubscriberAdmin(admin.ModelAdmin): actions = ['mail_admin'] action_form = MediaActionForm def delete_queryset(self, request, queryset): SubscriberAdmin.overridden = True super().delete_queryset(request, queryset) @admin.action def mail_admin(self, request, selected): EmailMessage( 'Greetings from a ModelAdmin action', 'This is the test email from an admin action', '[email protected]', ['[email protected]'] ).send() @admin.action(description='External mail (Another awesome action)') def external_mail(modeladmin, request, selected): EmailMessage( 'Greetings from a function action', 'This is the test email from a function action', '[email protected]', ['[email protected]'] ).send() @admin.action(description='Redirect to (Awesome action)') def redirect_to(modeladmin, request, selected): from django.http import HttpResponseRedirect return HttpResponseRedirect('/some-where-else/') @admin.action(description='Download subscription') def download(modeladmin, request, selected): buf = StringIO('This is the content of the file') return StreamingHttpResponse(FileWrapper(buf)) @admin.action(description='No permission to run') def no_perm(modeladmin, request, selected): return HttpResponse(content='No permission to perform this action', status=403) class ExternalSubscriberAdmin(admin.ModelAdmin): actions = [redirect_to, external_mail, download, no_perm] class PodcastAdmin(admin.ModelAdmin): list_display = ('name', 'release_date') list_editable = ('release_date',) date_hierarchy = 'release_date' ordering = ('name',) class VodcastAdmin(admin.ModelAdmin): list_display = ('name', 'released') list_editable = ('released',) ordering = ('name',) class ChildInline(admin.StackedInline): model = Child class ParentAdmin(admin.ModelAdmin): model = Parent inlines = [ChildInline] save_as = True list_display = ('id', 'name',) list_display_links = ('id',) list_editable = ('name',) def save_related(self, request, form, formsets, change): super().save_related(request, form, formsets, change) first_name, last_name = form.instance.name.split() for child in form.instance.child_set.all(): if len(child.name.split()) < 2: child.name = child.name + ' ' + last_name child.save() class EmptyModelAdmin(admin.ModelAdmin): def get_queryset(self, request): return super().get_queryset(request).filter(pk__gt=1) class OldSubscriberAdmin(admin.ModelAdmin): actions = None class PictureInline(admin.TabularInline): model = Picture extra = 1 class GalleryAdmin(admin.ModelAdmin): inlines = [PictureInline] class PictureAdmin(admin.ModelAdmin): pass class LanguageAdmin(admin.ModelAdmin): list_display = ['iso', 'shortlist', 'english_name', 'name'] list_editable = ['shortlist'] class RecommendationAdmin(admin.ModelAdmin): show_full_result_count = False search_fields = ('=titletranslation__text', '=the_recommender__titletranslation__text',) class WidgetInline(admin.StackedInline): model = Widget class DooHickeyInline(admin.StackedInline): model = DooHickey class GrommetInline(admin.StackedInline): model = Grommet class WhatsitInline(admin.StackedInline): model = Whatsit class FancyDoodadInline(admin.StackedInline): model = FancyDoodad class CategoryAdmin(admin.ModelAdmin): list_display = ('id', 'collector', 'order') list_editable = ('order',) class CategoryInline(admin.StackedInline): model = Category class CollectorAdmin(admin.ModelAdmin): inlines = [ WidgetInline, DooHickeyInline, GrommetInline, WhatsitInline, FancyDoodadInline, CategoryInline ] class LinkInline(admin.TabularInline): model = Link extra = 1 readonly_fields = ("posted", "multiline", "readonly_link_content") @admin.display def multiline(self, instance): return "InlineMultiline\ntest\nstring" class SubPostInline(admin.TabularInline): model = PrePopulatedSubPost prepopulated_fields = { 'subslug': ('subtitle',) } def get_readonly_fields(self, request, obj=None): if obj and obj.published: return ('subslug',) return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): if obj and obj.published: return {} return self.prepopulated_fields class PrePopulatedPostAdmin(admin.ModelAdmin): list_display = ['title', 'slug'] prepopulated_fields = { 'slug': ('title',) } inlines = [SubPostInline] def get_readonly_fields(self, request, obj=None): if obj and obj.published: return ('slug',) return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): if obj and obj.published: return {} return self.prepopulated_fields class PrePopulatedPostReadOnlyAdmin(admin.ModelAdmin): prepopulated_fields = {'slug': ('title',)} def has_change_permission(self, *args, **kwargs): return False class PostAdmin(admin.ModelAdmin): list_display = ['title', 'public'] readonly_fields = ( 'posted', 'awesomeness_level', 'coolness', 'value', 'multiline', 'multiline_html', lambda obj: "foo", 'readonly_content', ) inlines = [ LinkInline ] @admin.display def coolness(self, instance): if instance.pk: return "%d amount of cool." % instance.pk else: return "Unknown coolness." @admin.display(description='Value in $US') def value(self, instance): return 1000 @admin.display def multiline(self, instance): return "Multiline\ntest\nstring" @admin.display def multiline_html(self, instance): return mark_safe("Multiline<br>\nhtml<br>\ncontent") class FieldOverridePostForm(forms.ModelForm): model = FieldOverridePost class Meta: help_texts = { 'posted': 'Overridden help text for the date', } labels = { 'public': 'Overridden public label', } class FieldOverridePostAdmin(PostAdmin): form = FieldOverridePostForm class CustomChangeList(ChangeList): def get_queryset(self, request): return self.root_queryset.order_by('pk').filter(pk=9999) # Doesn't exist class GadgetAdmin(admin.ModelAdmin): def get_changelist(self, request, **kwargs): return CustomChangeList class ToppingAdmin(admin.ModelAdmin): readonly_fields = ('pizzas',) class PizzaAdmin(admin.ModelAdmin): readonly_fields = ('toppings',) class ReadOnlyRelatedFieldAdmin(admin.ModelAdmin): readonly_fields = ('chapter', 'language', 'user') class StudentAdmin(admin.ModelAdmin): search_fields = ('name',) class ReadOnlyPizzaAdmin(admin.ModelAdmin): readonly_fields = ('name', 'toppings') def has_add_permission(self, request): return False def has_change_permission(self, request, obj=None): return True def has_delete_permission(self, request, obj=None): return True class WorkHourAdmin(admin.ModelAdmin): list_display = ('datum', 'employee') list_filter = ('employee',) class FoodDeliveryAdmin(admin.ModelAdmin): list_display = ('reference', 'driver', 'restaurant') list_editable = ('driver', 'restaurant') class CoverLetterAdmin(admin.ModelAdmin): """ A ModelAdmin with a custom get_queryset() method that uses defer(), to test verbose_name display in messages shown after adding/editing CoverLetter instances. Note that the CoverLetter model defines a __str__ method. For testing fix for ticket #14529. """ def get_queryset(self, request): return super().get_queryset(request).defer('date_written') class PaperAdmin(admin.ModelAdmin): """ A ModelAdmin with a custom get_queryset() method that uses only(), to test verbose_name display in messages shown after adding/editing Paper instances. For testing fix for ticket #14529. """ def get_queryset(self, request): return super().get_queryset(request).only('title') class ShortMessageAdmin(admin.ModelAdmin): """ A ModelAdmin with a custom get_queryset() method that uses defer(), to test verbose_name display in messages shown after adding/editing ShortMessage instances. For testing fix for ticket #14529. """ def get_queryset(self, request): return super().get_queryset(request).defer('timestamp') class TelegramAdmin(admin.ModelAdmin): """ A ModelAdmin with a custom get_queryset() method that uses only(), to test verbose_name display in messages shown after adding/editing Telegram instances. Note that the Telegram model defines a __str__ method. For testing fix for ticket #14529. """ def get_queryset(self, request): return super().get_queryset(request).only('title') class StoryForm(forms.ModelForm): class Meta: widgets = {'title': forms.HiddenInput} class StoryAdmin(admin.ModelAdmin): list_display = ('id', 'title', 'content') list_display_links = ('title',) # 'id' not in list_display_links list_editable = ('content',) form = StoryForm ordering = ['-id'] class OtherStoryAdmin(admin.ModelAdmin): list_display = ('id', 'title', 'content') list_display_links = ('title', 'id') # 'id' in list_display_links list_editable = ('content',) ordering = ['-id'] class ComplexSortedPersonAdmin(admin.ModelAdmin): list_display = ('name', 'age', 'is_employee', 'colored_name') ordering = ('name',) @admin.display(ordering='name') def colored_name(self, obj): return format_html('<span style="color: #ff00ff;">{}</span>', obj.name) class PluggableSearchPersonAdmin(admin.ModelAdmin): list_display = ('name', 'age') search_fields = ('name',) def get_search_results(self, request, queryset, search_term): queryset, may_have_duplicates = super().get_search_results( request, queryset, search_term, ) try: search_term_as_int = int(search_term) except ValueError: pass else: queryset |= self.model.objects.filter(age=search_term_as_int) return queryset, may_have_duplicates class AlbumAdmin(admin.ModelAdmin): list_filter = ['title'] class QuestionAdmin(admin.ModelAdmin): ordering = ['-posted'] search_fields = ['question'] autocomplete_fields = ['related_questions'] class AnswerAdmin(admin.ModelAdmin): autocomplete_fields = ['question'] class PrePopulatedPostLargeSlugAdmin(admin.ModelAdmin): prepopulated_fields = { 'slug': ('title',) } class AdminOrderedFieldAdmin(admin.ModelAdmin): ordering = ('order',) list_display = ('stuff', 'order') class AdminOrderedModelMethodAdmin(admin.ModelAdmin): ordering = ('order',) list_display = ('stuff', 'some_order') class AdminOrderedAdminMethodAdmin(admin.ModelAdmin): @admin.display(ordering='order') def some_admin_order(self, obj): return obj.order ordering = ('order',) list_display = ('stuff', 'some_admin_order') @admin.display(ordering='order') def admin_ordered_callable(obj): return obj.order class AdminOrderedCallableAdmin(admin.ModelAdmin): ordering = ('order',) list_display = ('stuff', admin_ordered_callable) class ReportAdmin(admin.ModelAdmin): def extra(self, request): return HttpResponse() def get_urls(self): # Corner case: Don't call parent implementation return [path('extra/', self.extra, name='cable_extra')] class CustomTemplateBooleanFieldListFilter(BooleanFieldListFilter): template = 'custom_filter_template.html' class CustomTemplateFilterColorAdmin(admin.ModelAdmin): list_filter = (('warm', CustomTemplateBooleanFieldListFilter),) # For Selenium Prepopulated tests ------------------------------------- class RelatedPrepopulatedInline1(admin.StackedInline): fieldsets = ( (None, { 'fields': ( ('fk', 'm2m'), ('pubdate', 'status'), ('name', 'slug1', 'slug2',), ), }), ) formfield_overrides = {models.CharField: {'strip': False}} model = RelatedPrepopulated extra = 1 autocomplete_fields = ['fk', 'm2m'] prepopulated_fields = { 'slug1': ['name', 'pubdate'], 'slug2': ['status', 'name'], } class RelatedPrepopulatedInline2(admin.TabularInline): model = RelatedPrepopulated extra = 1 autocomplete_fields = ['fk', 'm2m'] prepopulated_fields = { 'slug1': ['name', 'pubdate'], 'slug2': ['status', 'name'], } class RelatedPrepopulatedInline3(admin.TabularInline): model = RelatedPrepopulated extra = 0 autocomplete_fields = ['fk', 'm2m'] class RelatedPrepopulatedStackedInlineNoFieldsets(admin.StackedInline): model = RelatedPrepopulated extra = 1 prepopulated_fields = { 'slug1': ['name', 'pubdate'], 'slug2': ['status'], } class MainPrepopulatedAdmin(admin.ModelAdmin): inlines = [ RelatedPrepopulatedInline1, RelatedPrepopulatedInline2, RelatedPrepopulatedInline3, RelatedPrepopulatedStackedInlineNoFieldsets, ] fieldsets = ( (None, { 'fields': (('pubdate', 'status'), ('name', 'slug1', 'slug2', 'slug3')) }), ) formfield_overrides = {models.CharField: {'strip': False}} prepopulated_fields = { 'slug1': ['name', 'pubdate'], 'slug2': ['status', 'name'], 'slug3': ['name'], } class UnorderedObjectAdmin(admin.ModelAdmin): list_display = ['id', 'name'] list_display_links = ['id'] list_editable = ['name'] list_per_page = 2 class UndeletableObjectAdmin(admin.ModelAdmin): def change_view(self, *args, **kwargs): kwargs['extra_context'] = {'show_delete': False} return super().change_view(*args, **kwargs) class UnchangeableObjectAdmin(admin.ModelAdmin): def get_urls(self): # Disable change_view, but leave other urls untouched urlpatterns = super().get_urls() return [p for p in urlpatterns if p.name and not p.name.endswith("_change")] @admin.display def callable_on_unknown(obj): return obj.unknown class AttributeErrorRaisingAdmin(admin.ModelAdmin): list_display = [callable_on_unknown] class CustomManagerAdmin(admin.ModelAdmin): def get_queryset(self, request): return FilteredManager.objects class MessageTestingAdmin(admin.ModelAdmin): actions = ["message_debug", "message_info", "message_success", "message_warning", "message_error", "message_extra_tags"] @admin.action def message_debug(self, request, selected): self.message_user(request, "Test debug", level="debug") @admin.action def message_info(self, request, selected): self.message_user(request, "Test info", level="info") @admin.action def message_success(self, request, selected): self.message_user(request, "Test success", level="success") @admin.action def message_warning(self, request, selected): self.message_user(request, "Test warning", level="warning") @admin.action def message_error(self, request, selected): self.message_user(request, "Test error", level="error") @admin.action def message_extra_tags(self, request, selected): self.message_user(request, "Test tags", extra_tags="extra_tag") class ChoiceList(admin.ModelAdmin): list_display = ['choice'] readonly_fields = ['choice'] fields = ['choice'] class DependentChildAdminForm(forms.ModelForm): """ Issue #20522 Form to test child dependency on parent object's validation """ def clean(self): parent = self.cleaned_data.get('parent') if parent.family_name and parent.family_name != self.cleaned_data.get('family_name'): raise ValidationError("Children must share a family name with their parents " + "in this contrived test case") return super().clean() class DependentChildInline(admin.TabularInline): model = DependentChild form = DependentChildAdminForm class ParentWithDependentChildrenAdmin(admin.ModelAdmin): inlines = [DependentChildInline] # Tests for ticket 11277 ---------------------------------- class FormWithoutHiddenField(forms.ModelForm): first = forms.CharField() second = forms.CharField() class FormWithoutVisibleField(forms.ModelForm): first = forms.CharField(widget=forms.HiddenInput) second = forms.CharField(widget=forms.HiddenInput) class FormWithVisibleAndHiddenField(forms.ModelForm): first = forms.CharField(widget=forms.HiddenInput) second = forms.CharField() class EmptyModelVisibleAdmin(admin.ModelAdmin): form = FormWithoutHiddenField fieldsets = ( (None, { 'fields': (('first', 'second'),), }), ) class EmptyModelHiddenAdmin(admin.ModelAdmin): form = FormWithoutVisibleField fieldsets = EmptyModelVisibleAdmin.fieldsets class EmptyModelMixinAdmin(admin.ModelAdmin): form = FormWithVisibleAndHiddenField fieldsets = EmptyModelVisibleAdmin.fieldsets class CityInlineAdmin(admin.TabularInline): model = City view_on_site = False class StateAdminForm(forms.ModelForm): nolabel_form_field = forms.BooleanField(required=False) class Meta: model = State fields = '__all__' labels = {'name': 'State name (from form’s Meta.labels)'} @property def changed_data(self): data = super().changed_data if data: # Add arbitrary name to changed_data to test # change message construction. return data + ['not_a_form_field'] return data class StateAdmin(admin.ModelAdmin): inlines = [CityInlineAdmin] form = StateAdminForm class RestaurantInlineAdmin(admin.TabularInline): model = Restaurant view_on_site = True class CityAdmin(admin.ModelAdmin): inlines = [RestaurantInlineAdmin] view_on_site = True def get_formset_kwargs(self, request, obj, inline, prefix): return { **super().get_formset_kwargs(request, obj, inline, prefix), 'form_kwargs': {'initial': {'name': 'overridden_name'}}, } class WorkerAdmin(admin.ModelAdmin): def view_on_site(self, obj): return '/worker/%s/%s/' % (obj.surname, obj.name) class WorkerInlineAdmin(admin.TabularInline): model = Worker def view_on_site(self, obj): return '/worker_inline/%s/%s/' % (obj.surname, obj.name) class RestaurantAdmin(admin.ModelAdmin): inlines = [WorkerInlineAdmin] view_on_site = False def get_changeform_initial_data(self, request): return {'name': 'overridden_value'} class FunkyTagAdmin(admin.ModelAdmin): list_display = ('name', 'content_object') class InlineReferenceInline(admin.TabularInline): model = InlineReference class InlineRefererAdmin(admin.ModelAdmin): inlines = [InlineReferenceInline] class PlotReadonlyAdmin(admin.ModelAdmin): readonly_fields = ('plotdetails',) class GetFormsetsArgumentCheckingAdmin(admin.ModelAdmin): fields = ['name'] def add_view(self, request, *args, **kwargs): request.is_add_view = True return super().add_view(request, *args, **kwargs) def change_view(self, request, *args, **kwargs): request.is_add_view = False return super().change_view(request, *args, **kwargs) def get_formsets_with_inlines(self, request, obj=None): if request.is_add_view and obj is not None: raise Exception("'obj' passed to get_formsets_with_inlines wasn't None during add_view") if not request.is_add_view and obj is None: raise Exception("'obj' passed to get_formsets_with_inlines was None during change_view") return super().get_formsets_with_inlines(request, obj) site = admin.AdminSite(name="admin") site.site_url = '/my-site-url/' site.register(Article, ArticleAdmin) site.register(CustomArticle, CustomArticleAdmin) site.register( Section, save_as=True, inlines=[ArticleInline], readonly_fields=['name_property'], search_fields=['name'], ) site.register(ModelWithStringPrimaryKey) site.register(Color) site.register(Thing, ThingAdmin) site.register(Actor) site.register(Inquisition, InquisitionAdmin) site.register(Sketch, SketchAdmin) site.register(Person, PersonAdmin) site.register(Persona, PersonaAdmin) site.register(Subscriber, SubscriberAdmin) site.register(ExternalSubscriber, ExternalSubscriberAdmin) site.register(OldSubscriber, OldSubscriberAdmin) site.register(Podcast, PodcastAdmin) site.register(Vodcast, VodcastAdmin) site.register(Parent, ParentAdmin) site.register(EmptyModel, EmptyModelAdmin) site.register(Fabric, FabricAdmin) site.register(Gallery, GalleryAdmin) site.register(Picture, PictureAdmin) site.register(Language, LanguageAdmin) site.register(Recommendation, RecommendationAdmin) site.register(Recommender) site.register(Collector, CollectorAdmin) site.register(Category, CategoryAdmin) site.register(Post, PostAdmin) site.register(FieldOverridePost, FieldOverridePostAdmin) site.register(Gadget, GadgetAdmin) site.register(Villain) site.register(SuperVillain) site.register(Plot) site.register(PlotDetails) site.register(PlotProxy, PlotReadonlyAdmin) site.register(Bookmark) site.register(CyclicOne) site.register(CyclicTwo) site.register(WorkHour, WorkHourAdmin) site.register(Reservation) site.register(FoodDelivery, FoodDeliveryAdmin) site.register(RowLevelChangePermissionModel, RowLevelChangePermissionModelAdmin) site.register(Paper, PaperAdmin) site.register(CoverLetter, CoverLetterAdmin) site.register(ShortMessage, ShortMessageAdmin) site.register(Telegram, TelegramAdmin) site.register(Story, StoryAdmin) site.register(OtherStory, OtherStoryAdmin) site.register(Report, ReportAdmin) site.register(MainPrepopulated, MainPrepopulatedAdmin) site.register(UnorderedObject, UnorderedObjectAdmin) site.register(UndeletableObject, UndeletableObjectAdmin) site.register(UnchangeableObject, UnchangeableObjectAdmin) site.register(State, StateAdmin) site.register(City, CityAdmin) site.register(Restaurant, RestaurantAdmin) site.register(Worker, WorkerAdmin) site.register(FunkyTag, FunkyTagAdmin) site.register(ReferencedByParent) site.register(ChildOfReferer) site.register(ReferencedByInline) site.register(InlineReferer, InlineRefererAdmin) site.register(ReferencedByGenRel) site.register(GenRelReference) site.register(ParentWithUUIDPK) site.register(RelatedPrepopulated, search_fields=['name']) site.register(RelatedWithUUIDPKModel) site.register(ReadOnlyRelatedField, ReadOnlyRelatedFieldAdmin) # We intentionally register Promo and ChapterXtra1 but not Chapter nor ChapterXtra2. # That way we cover all four cases: # related ForeignKey object registered in admin # related ForeignKey object not registered in admin # related OneToOne object registered in admin # related OneToOne object not registered in admin # when deleting Book so as exercise all four paths through # contrib.admin.utils's get_deleted_objects function. site.register(Book, inlines=[ChapterInline]) site.register(Promo) site.register(ChapterXtra1, ChapterXtra1Admin) site.register(Pizza, PizzaAdmin) site.register(ReadOnlyPizza, ReadOnlyPizzaAdmin) site.register(ReadablePizza) site.register(Topping, ToppingAdmin) site.register(Album, AlbumAdmin) site.register(Song) site.register(Question, QuestionAdmin) site.register(Answer, AnswerAdmin, date_hierarchy='question__posted') site.register(Answer2, date_hierarchy='question__expires') site.register(PrePopulatedPost, PrePopulatedPostAdmin) site.register(ComplexSortedPerson, ComplexSortedPersonAdmin) site.register(FilteredManager, CustomManagerAdmin) site.register(PluggableSearchPerson, PluggableSearchPersonAdmin) site.register(PrePopulatedPostLargeSlug, PrePopulatedPostLargeSlugAdmin) site.register(AdminOrderedField, AdminOrderedFieldAdmin) site.register(AdminOrderedModelMethod, AdminOrderedModelMethodAdmin) site.register(AdminOrderedAdminMethod, AdminOrderedAdminMethodAdmin) site.register(AdminOrderedCallable, AdminOrderedCallableAdmin) site.register(Color2, CustomTemplateFilterColorAdmin) site.register(Simple, AttributeErrorRaisingAdmin) site.register(UserMessenger, MessageTestingAdmin) site.register(Choice, ChoiceList) site.register(ParentWithDependentChildren, ParentWithDependentChildrenAdmin) site.register(EmptyModelHidden, EmptyModelHiddenAdmin) site.register(EmptyModelVisible, EmptyModelVisibleAdmin) site.register(EmptyModelMixin, EmptyModelMixinAdmin) site.register(StumpJoke) site.register(Recipe) site.register(Ingredient) site.register(NotReferenced) site.register(ExplicitlyProvidedPK, GetFormsetsArgumentCheckingAdmin) site.register(ImplicitlyGeneratedPK, GetFormsetsArgumentCheckingAdmin) site.register(UserProxy) site.register(Box) # Register core models we need in our tests site.register(User, UserAdmin) site.register(Group, GroupAdmin) # Used to test URL namespaces site2 = admin.AdminSite(name="namespaced_admin") site2.register(User, UserAdmin) site2.register(Group, GroupAdmin) site2.register(ParentWithUUIDPK) site2.register( RelatedWithUUIDPKModel, list_display=['pk', 'parent'], list_editable=['parent'], raw_id_fields=['parent'], ) site2.register(Person, save_as_continue=False) site2.register(ReadOnlyRelatedField, ReadOnlyRelatedFieldAdmin) site2.register(Language) site7 = admin.AdminSite(name="admin7") site7.register(Article, ArticleAdmin2) site7.register(Section) site7.register(PrePopulatedPost, PrePopulatedPostReadOnlyAdmin) site7.register( Pizza, filter_horizontal=['toppings'], fieldsets=( ('Collapsible', { 'classes': ['collapse'], 'fields': ['toppings'], }), ), ) site7.register( Question, filter_horizontal=['related_questions'], fieldsets=( ('Not collapsible', { 'fields': ['related_questions'], }), ), ) # Used to test ModelAdmin.sortable_by and get_sortable_by(). class ArticleAdmin6(admin.ModelAdmin): list_display = ( 'content', 'date', callable_year, 'model_year', 'modeladmin_year', 'model_year_reversed', 'section', ) sortable_by = ('date', callable_year) @admin.display(ordering='date') def modeladmin_year(self, obj): return obj.date.year class ActorAdmin6(admin.ModelAdmin): list_display = ('name', 'age') sortable_by = ('name',) def get_sortable_by(self, request): return ('age',) class ChapterAdmin6(admin.ModelAdmin): list_display = ('title', 'book') sortable_by = () class ColorAdmin6(admin.ModelAdmin): list_display = ('value',) def get_sortable_by(self, request): return () site6 = admin.AdminSite(name='admin6') site6.register(Article, ArticleAdmin6) site6.register(Actor, ActorAdmin6) site6.register(Chapter, ChapterAdmin6) site6.register(Color, ColorAdmin6) class ArticleAdmin9(admin.ModelAdmin): def has_change_permission(self, request, obj=None): # Simulate that the user can't change a specific object. return obj is None class ActorAdmin9(admin.ModelAdmin): def get_urls(self): # Opt-out of append slash for single model. urls = super().get_urls() for pattern in urls: pattern.callback = no_append_slash(pattern.callback) return urls site9 = admin.AdminSite(name='admin9') site9.register(Article, ArticleAdmin9) site9.register(Actor, ActorAdmin9) site10 = admin.AdminSite(name='admin10') site10.final_catch_all_view = False site10.register(Article, ArticleAdminWithExtraUrl)
f226ea9ef532b261d0c14137168cd317e3a22947c753233fb28725069dee482d
import json from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME from django.contrib.admin.views.main import IS_POPUP_VAR from django.contrib.auth.models import Permission, User from django.core import mail from django.template.loader import render_to_string from django.template.response import TemplateResponse from django.test import TestCase, override_settings from django.urls import reverse from .admin import SubscriberAdmin from .forms import MediaActionForm from .models import ( Actor, Answer, Book, ExternalSubscriber, Question, Subscriber, UnchangeableObject, ) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminActionsTest(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.s1 = ExternalSubscriber.objects.create(name='John Doe', email='[email protected]') cls.s2 = Subscriber.objects.create(name='Max Mustermann', email='[email protected]') def setUp(self): self.client.force_login(self.superuser) def test_model_admin_custom_action(self): """A custom action defined in a ModelAdmin method.""" action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'mail_admin', 'index': 0, } self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Greetings from a ModelAdmin action') def test_model_admin_default_delete_action(self): action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk, self.s2.pk], 'action': 'delete_selected', 'index': 0, } delete_confirmation_data = { ACTION_CHECKBOX_NAME: [self.s1.pk, self.s2.pk], 'action': 'delete_selected', 'post': 'yes', } confirmation = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data) self.assertIsInstance(confirmation, TemplateResponse) self.assertContains(confirmation, 'Are you sure you want to delete the selected subscribers?') self.assertContains(confirmation, '<h2>Summary</h2>') self.assertContains(confirmation, '<li>Subscribers: 2</li>') self.assertContains(confirmation, '<li>External subscribers: 1</li>') self.assertContains(confirmation, ACTION_CHECKBOX_NAME, count=2) self.client.post(reverse('admin:admin_views_subscriber_changelist'), delete_confirmation_data) self.assertEqual(Subscriber.objects.count(), 0) def test_default_delete_action_nonexistent_pk(self): self.assertFalse(Subscriber.objects.filter(id=9998).exists()) action_data = { ACTION_CHECKBOX_NAME: ['9998'], 'action': 'delete_selected', 'index': 0, } response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data) self.assertContains(response, 'Are you sure you want to delete the selected subscribers?') self.assertContains(response, '<ul></ul>', html=True) @override_settings(USE_THOUSAND_SEPARATOR=True, NUMBER_GROUPING=3) def test_non_localized_pk(self): """ If USE_THOUSAND_SEPARATOR is set, the ids for the objects selected for deletion are rendered without separators. """ s = ExternalSubscriber.objects.create(id=9999) action_data = { ACTION_CHECKBOX_NAME: [s.pk, self.s2.pk], 'action': 'delete_selected', 'index': 0, } response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data) self.assertTemplateUsed(response, 'admin/delete_selected_confirmation.html') self.assertContains(response, 'value="9999"') # Instead of 9,999 self.assertContains(response, 'value="%s"' % self.s2.pk) def test_model_admin_default_delete_action_protected(self): """ The default delete action where some related objects are protected from deletion. """ q1 = Question.objects.create(question='Why?') a1 = Answer.objects.create(question=q1, answer='Because.') a2 = Answer.objects.create(question=q1, answer='Yes.') q2 = Question.objects.create(question='Wherefore?') action_data = { ACTION_CHECKBOX_NAME: [q1.pk, q2.pk], 'action': 'delete_selected', 'index': 0, } delete_confirmation_data = action_data.copy() delete_confirmation_data['post'] = 'yes' response = self.client.post(reverse('admin:admin_views_question_changelist'), action_data) self.assertContains(response, 'would require deleting the following protected related objects') self.assertContains( response, '<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)), html=True ) self.assertContains( response, '<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)), html=True ) # A POST request to delete protected objects displays the page which # says the deletion is prohibited. response = self.client.post(reverse('admin:admin_views_question_changelist'), delete_confirmation_data) self.assertContains(response, 'would require deleting the following protected related objects') self.assertEqual(Question.objects.count(), 2) def test_model_admin_default_delete_action_no_change_url(self): """ The default delete action doesn't break if a ModelAdmin removes the change_view URL (#20640). """ obj = UnchangeableObject.objects.create() action_data = { ACTION_CHECKBOX_NAME: obj.pk, 'action': 'delete_selected', 'index': '0', } response = self.client.post(reverse('admin:admin_views_unchangeableobject_changelist'), action_data) # No 500 caused by NoReverseMatch. The page doesn't display a link to # the nonexistent change page. self.assertContains(response, '<li>Unchangeable object: %s</li>' % obj, 1, html=True) def test_delete_queryset_hook(self): delete_confirmation_data = { ACTION_CHECKBOX_NAME: [self.s1.pk, self.s2.pk], 'action': 'delete_selected', 'post': 'yes', 'index': 0, } SubscriberAdmin.overridden = False self.client.post(reverse('admin:admin_views_subscriber_changelist'), delete_confirmation_data) # SubscriberAdmin.delete_queryset() sets overridden to True. self.assertIs(SubscriberAdmin.overridden, True) self.assertEqual(Subscriber.objects.all().count(), 0) def test_delete_selected_uses_get_deleted_objects(self): """The delete_selected action uses ModelAdmin.get_deleted_objects().""" book = Book.objects.create(name='Test Book') data = { ACTION_CHECKBOX_NAME: [book.pk], 'action': 'delete_selected', 'index': 0, } response = self.client.post(reverse('admin2:admin_views_book_changelist'), data) # BookAdmin.get_deleted_objects() returns custom text. self.assertContains(response, 'a deletable object') def test_custom_function_mail_action(self): """A custom action may be defined in a function.""" action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'external_mail', 'index': 0, } self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action') def test_custom_function_action_with_redirect(self): """Another custom action defined in a function.""" action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'redirect_to', 'index': 0, } response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data) self.assertEqual(response.status_code, 302) def test_default_redirect(self): """ Actions which don't return an HttpResponse are redirected to the same page, retaining the querystring (which may contain changelist info). """ action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'external_mail', 'index': 0, } url = reverse('admin:admin_views_externalsubscriber_changelist') + '?o=1' response = self.client.post(url, action_data) self.assertRedirects(response, url) def test_custom_function_action_streaming_response(self): """A custom action may return a StreamingHttpResponse.""" action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'download', 'index': 0, } response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data) content = b''.join(response.streaming_content) self.assertEqual(content, b'This is the content of the file') self.assertEqual(response.status_code, 200) def test_custom_function_action_no_perm_response(self): """A custom action may returns an HttpResponse with a 403 code.""" action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'no_perm', 'index': 0, } response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data) self.assertEqual(response.status_code, 403) self.assertEqual(response.content, b'No permission to perform this action') def test_actions_ordering(self): """Actions are ordered as expected.""" response = self.client.get(reverse('admin:admin_views_externalsubscriber_changelist')) self.assertContains(response, '''<label>Action: <select name="action" required> <option value="" selected>---------</option> <option value="delete_selected">Delete selected external subscribers</option> <option value="redirect_to">Redirect to (Awesome action)</option> <option value="external_mail">External mail (Another awesome action)</option> <option value="download">Download subscription</option> <option value="no_perm">No permission to run</option> </select>''', html=True) def test_model_without_action(self): """A ModelAdmin might not have any actions.""" response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist')) self.assertIsNone(response.context['action_form']) self.assertNotContains( response, '<input type="checkbox" class="action-select"', msg_prefix='Found an unexpected action toggle checkboxbox in response' ) self.assertNotContains(response, '<input type="checkbox" class="action-select"') def test_model_without_action_still_has_jquery(self): """ A ModelAdmin without any actions still has jQuery included on the page. """ response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist')) self.assertIsNone(response.context['action_form']) self.assertContains( response, 'jquery.min.js', msg_prefix='jQuery missing from admin pages for model with no admin actions' ) def test_action_column_class(self): """The checkbox column class is present in the response.""" response = self.client.get(reverse('admin:admin_views_subscriber_changelist')) self.assertIsNotNone(response.context['action_form']) self.assertContains(response, 'action-checkbox-column') def test_multiple_actions_form(self): """ Actions come from the form whose submit button was pressed (#10618). """ action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], # Two different actions selected on the two forms... 'action': ['external_mail', 'delete_selected'], # ...but "go" was clicked on the top form. 'index': 0 } self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data) # The action sends mail rather than deletes. self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action') def test_media_from_actions_form(self): """ The action form's media is included in the changelist view's media. """ response = self.client.get(reverse('admin:admin_views_subscriber_changelist')) media_path = MediaActionForm.Media.js[0] self.assertIsInstance(response.context['action_form'], MediaActionForm) self.assertIn('media', response.context) self.assertIn(media_path, response.context['media']._js) self.assertContains(response, media_path) def test_user_message_on_none_selected(self): """ User sees a warning when 'Go' is pressed and no items are selected. """ action_data = { ACTION_CHECKBOX_NAME: [], 'action': 'delete_selected', 'index': 0, } url = reverse('admin:admin_views_subscriber_changelist') response = self.client.post(url, action_data) self.assertRedirects(response, url, fetch_redirect_response=False) response = self.client.get(response.url) msg = 'Items must be selected in order to perform actions on them. No items have been changed.' self.assertContains(response, msg) self.assertEqual(Subscriber.objects.count(), 2) def test_user_message_on_no_action(self): """ User sees a warning when 'Go' is pressed and no action is selected. """ action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk, self.s2.pk], 'action': '', 'index': 0, } url = reverse('admin:admin_views_subscriber_changelist') response = self.client.post(url, action_data) self.assertRedirects(response, url, fetch_redirect_response=False) response = self.client.get(response.url) self.assertContains(response, 'No action selected.') self.assertEqual(Subscriber.objects.count(), 2) def test_selection_counter(self): """The selection counter is there.""" response = self.client.get(reverse('admin:admin_views_subscriber_changelist')) self.assertContains(response, '0 of 2 selected') def test_popup_actions(self): """ Actions aren't shown in popups.""" changelist_url = reverse('admin:admin_views_subscriber_changelist') response = self.client.get(changelist_url) self.assertIsNotNone(response.context['action_form']) response = self.client.get(changelist_url + '?%s' % IS_POPUP_VAR) self.assertIsNone(response.context['action_form']) def test_popup_template_response_on_add(self): """ Success on popups shall be rendered from template in order to allow easy customization. """ response = self.client.post( reverse('admin:admin_views_actor_add') + '?%s=1' % IS_POPUP_VAR, {'name': 'Troy McClure', 'age': '55', IS_POPUP_VAR: '1'} ) self.assertEqual(response.status_code, 200) self.assertEqual(response.template_name, [ 'admin/admin_views/actor/popup_response.html', 'admin/admin_views/popup_response.html', 'admin/popup_response.html', ]) self.assertTemplateUsed(response, 'admin/popup_response.html') def test_popup_template_response_on_change(self): instance = Actor.objects.create(name='David Tennant', age=45) response = self.client.post( reverse('admin:admin_views_actor_change', args=(instance.pk,)) + '?%s=1' % IS_POPUP_VAR, {'name': 'David Tennant', 'age': '46', IS_POPUP_VAR: '1'} ) self.assertEqual(response.status_code, 200) self.assertEqual(response.template_name, [ 'admin/admin_views/actor/popup_response.html', 'admin/admin_views/popup_response.html', 'admin/popup_response.html', ]) self.assertTemplateUsed(response, 'admin/popup_response.html') def test_popup_template_response_on_delete(self): instance = Actor.objects.create(name='David Tennant', age=45) response = self.client.post( reverse('admin:admin_views_actor_delete', args=(instance.pk,)) + '?%s=1' % IS_POPUP_VAR, {IS_POPUP_VAR: '1'} ) self.assertEqual(response.status_code, 200) self.assertEqual(response.template_name, [ 'admin/admin_views/actor/popup_response.html', 'admin/admin_views/popup_response.html', 'admin/popup_response.html', ]) self.assertTemplateUsed(response, 'admin/popup_response.html') def test_popup_template_escaping(self): popup_response_data = json.dumps({ 'new_value': 'new_value\\', 'obj': 'obj\\', 'value': 'value\\', }) context = { 'popup_response_data': popup_response_data, } output = render_to_string('admin/popup_response.html', context) self.assertIn( r'&quot;value\\&quot;', output ) self.assertIn( r'&quot;new_value\\&quot;', output ) self.assertIn( r'&quot;obj\\&quot;', output ) @override_settings(ROOT_URLCONF='admin_views.urls') class AdminActionsPermissionTests(TestCase): @classmethod def setUpTestData(cls): cls.s1 = ExternalSubscriber.objects.create(name='John Doe', email='[email protected]') cls.s2 = Subscriber.objects.create(name='Max Mustermann', email='[email protected]') cls.user = User.objects.create_user( username='user', password='secret', email='[email protected]', is_staff=True, ) permission = Permission.objects.get(codename='change_subscriber') cls.user.user_permissions.add(permission) def setUp(self): self.client.force_login(self.user) def test_model_admin_no_delete_permission(self): """ Permission is denied if the user doesn't have delete permission for the model (Subscriber). """ action_data = { ACTION_CHECKBOX_NAME: [self.s1.pk], 'action': 'delete_selected', } url = reverse('admin:admin_views_subscriber_changelist') response = self.client.post(url, action_data) self.assertRedirects(response, url, fetch_redirect_response=False) response = self.client.get(response.url) self.assertContains(response, 'No action selected.') def test_model_admin_no_delete_permission_externalsubscriber(self): """ Permission is denied if the user doesn't have delete permission for a related model (ExternalSubscriber). """ permission = Permission.objects.get(codename='delete_subscriber') self.user.user_permissions.add(permission) delete_confirmation_data = { ACTION_CHECKBOX_NAME: [self.s1.pk, self.s2.pk], 'action': 'delete_selected', 'post': 'yes', } response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), delete_confirmation_data) self.assertEqual(response.status_code, 403)
6fe303e8b1ea33e7d3e9b9d4238a6b01205b5f95586994a88492f1dd8dd8a938
from django.contrib.admin.models import CHANGE, LogEntry from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.core.paginator import Paginator from django.test import TestCase, override_settings from django.urls import reverse from .models import City, State @override_settings(ROOT_URLCONF='admin_views.urls') class AdminHistoryViewTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) def setUp(self): self.client.force_login(self.superuser) def test_changed_message_uses_form_labels(self): """ Admin's model history change messages use form labels instead of field names. """ state = State.objects.create(name='My State Name') city = City.objects.create(name='My City Name', state=state) change_dict = { 'name': 'My State Name 2', 'nolabel_form_field': True, 'city_set-0-name': 'My City name 2', 'city_set-0-id': city.pk, 'city_set-TOTAL_FORMS': '3', 'city_set-INITIAL_FORMS': '1', 'city_set-MAX_NUM_FORMS': '0', } state_change_url = reverse('admin:admin_views_state_change', args=(state.pk,)) self.client.post(state_change_url, change_dict) logentry = LogEntry.objects.filter(content_type__model__iexact='state').latest('id') self.assertEqual( logentry.get_change_message(), 'Changed State name (from form’s Meta.labels), ' 'nolabel_form_field and not_a_form_field. ' 'Changed City verbose_name for city “%s”.' % city ) @override_settings(ROOT_URLCONF='admin_views.urls') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps def setUp(self): self.superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) content_type_pk = ContentType.objects.get_for_model(User).pk for i in range(1, 1101): LogEntry.objects.log_action( self.superuser.pk, content_type_pk, self.superuser.pk, repr(self.superuser), CHANGE, change_message=f'Changed something {i}', ) self.admin_login( username='super', password='secret', login_url=reverse('admin:index'), ) def test_pagination(self): from selenium.webdriver.common.by import By user_history_url = reverse('admin:auth_user_history', args=(self.superuser.pk,)) self.selenium.get(self.live_server_url + user_history_url) paginator = self.selenium.find_element(By.CSS_SELECTOR, '.paginator') self.assertTrue(paginator.is_displayed()) self.assertIn('%s entries' % LogEntry.objects.count(), paginator.text) self.assertIn(str(Paginator.ELLIPSIS), paginator.text) # The current page. current_page_link = self.selenium.find_element(By.CSS_SELECTOR, 'span.this-page') self.assertEqual(current_page_link.text, '1') # The last page. last_page_link = self.selenium.find_element(By.CSS_SELECTOR, '.end') self.assertTrue(last_page_link.text, '20') # Select the second page. pages = paginator.find_elements(By.TAG_NAME, 'a') second_page_link = pages[0] self.assertEqual(second_page_link.text, '2') second_page_link.click() self.assertIn('?p=2', self.selenium.current_url) rows = self.selenium.find_elements(By.CSS_SELECTOR, '#change-history tbody tr') self.assertIn('Changed something 101', rows[0].text) self.assertIn('Changed something 200', rows[-1].text)
f0f0f84c65b39353f9248a217d33e9c02dea91c5cfdf9dacfcbd46ce9253596d
from django.contrib import admin from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.auth.models import User from django.test import TestCase, override_settings from django.urls import path, reverse from .models import Héllo class AdminSiteWithSidebar(admin.AdminSite): pass class AdminSiteWithoutSidebar(admin.AdminSite): enable_nav_sidebar = False site_with_sidebar = AdminSiteWithSidebar(name='test_with_sidebar') site_without_sidebar = AdminSiteWithoutSidebar(name='test_without_sidebar') site_with_sidebar.register(User) site_with_sidebar.register(Héllo) urlpatterns = [ path('test_sidebar/admin/', site_with_sidebar.urls), path('test_wihout_sidebar/admin/', site_without_sidebar.urls), ] @override_settings(ROOT_URLCONF='admin_views.test_nav_sidebar') class AdminSidebarTests(TestCase): @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) def setUp(self): self.client.force_login(self.superuser) def test_sidebar_not_on_index(self): response = self.client.get(reverse('test_with_sidebar:index')) self.assertContains(response, '<div class="main" id="main">') self.assertNotContains(response, '<nav class="sticky" id="nav-sidebar">') def test_sidebar_disabled(self): response = self.client.get(reverse('test_without_sidebar:index')) self.assertNotContains(response, '<nav class="sticky" id="nav-sidebar">') def test_sidebar_unauthenticated(self): self.client.logout() response = self.client.get(reverse('test_with_sidebar:login')) self.assertNotContains(response, '<nav class="sticky" id="nav-sidebar">') def test_sidebar_aria_current_page(self): url = reverse('test_with_sidebar:auth_user_changelist') response = self.client.get(url) self.assertContains(response, '<nav class="sticky" id="nav-sidebar">') self.assertContains(response, '<a href="%s" aria-current="page">Users</a>' % url) @override_settings( TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }] ) def test_sidebar_aria_current_page_missing_without_request_context_processor(self): url = reverse('test_with_sidebar:auth_user_changelist') response = self.client.get(url) self.assertContains(response, '<nav class="sticky" id="nav-sidebar">') # Does not include aria-current attribute. self.assertContains(response, '<a href="%s">Users</a>' % url) self.assertNotContains(response, 'aria-current') @override_settings(DEBUG=True) def test_included_app_list_template_context_fully_set(self): # All context variables should be set when rendering the sidebar. url = reverse('test_with_sidebar:auth_user_changelist') with self.assertNoLogs('django.template', 'DEBUG'): self.client.get(url) def test_sidebar_model_name_non_ascii(self): url = reverse('test_with_sidebar:admin_views_héllo_changelist') response = self.client.get(url) self.assertContains(response, '<div class="app-admin_views module current-app">') self.assertContains(response, '<tr class="model-héllo current-model">') self.assertContains( response, '<th scope="row">' '<a href="/test_sidebar/admin/admin_views/h%C3%A9llo/" aria-current="page">' 'Héllos</a></th>' ) @override_settings(ROOT_URLCONF='admin_views.test_nav_sidebar') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps def setUp(self): self.superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) self.admin_login(username='super', password='secret', login_url=reverse('test_with_sidebar:index')) self.selenium.execute_script("localStorage.removeItem('django.admin.navSidebarIsOpen')") def test_sidebar_starts_open(self): from selenium.webdriver.common.by import By self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) main_element = self.selenium.find_element(By.CSS_SELECTOR, '#main') self.assertIn('shifted', main_element.get_attribute('class').split()) def test_sidebar_can_be_closed(self): from selenium.webdriver.common.by import By self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) toggle_button = self.selenium.find_element(By.CSS_SELECTOR, '#toggle-nav-sidebar') self.assertEqual(toggle_button.tag_name, 'button') self.assertEqual(toggle_button.get_attribute('aria-label'), 'Toggle navigation') for link in self.selenium.find_elements(By.CSS_SELECTOR, '#nav-sidebar a'): self.assertEqual(link.get_attribute('tabIndex'), '0') toggle_button.click() # Hidden sidebar is not reachable via keyboard navigation. for link in self.selenium.find_elements(By.CSS_SELECTOR, '#nav-sidebar a'): self.assertEqual(link.get_attribute('tabIndex'), '-1') main_element = self.selenium.find_element(By.CSS_SELECTOR, '#main') self.assertNotIn('shifted', main_element.get_attribute('class').split()) def test_sidebar_state_persists(self): from selenium.webdriver.common.by import By self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) self.assertIsNone(self.selenium.execute_script("return localStorage.getItem('django.admin.navSidebarIsOpen')")) toggle_button = self.selenium.find_element(By.CSS_SELECTOR, '#toggle-nav-sidebar') toggle_button.click() self.assertEqual( self.selenium.execute_script("return localStorage.getItem('django.admin.navSidebarIsOpen')"), 'false', ) self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) main_element = self.selenium.find_element(By.CSS_SELECTOR, '#main') self.assertNotIn('shifted', main_element.get_attribute('class').split()) toggle_button = self.selenium.find_element(By.CSS_SELECTOR, '#toggle-nav-sidebar') # Hidden sidebar is not reachable via keyboard navigation. for link in self.selenium.find_elements(By.CSS_SELECTOR, '#nav-sidebar a'): self.assertEqual(link.get_attribute('tabIndex'), '-1') toggle_button.click() for link in self.selenium.find_elements(By.CSS_SELECTOR, '#nav-sidebar a'): self.assertEqual(link.get_attribute('tabIndex'), '0') self.assertEqual( self.selenium.execute_script("return localStorage.getItem('django.admin.navSidebarIsOpen')"), 'true', ) self.selenium.get(self.live_server_url + reverse('test_with_sidebar:auth_user_changelist')) main_element = self.selenium.find_element(By.CSS_SELECTOR, '#main') self.assertIn('shifted', main_element.get_attribute('class').split()) def test_sidebar_filter_persists(self): from selenium.webdriver.common.by import By self.selenium.get( self.live_server_url + reverse('test_with_sidebar:auth_user_changelist') ) filter_value_script = ( "return sessionStorage.getItem('django.admin.navSidebarFilterValue')" ) self.assertIsNone(self.selenium.execute_script(filter_value_script)) filter_input = self.selenium.find_element(By.CSS_SELECTOR, '#nav-filter') filter_input.send_keys('users') self.assertEqual(self.selenium.execute_script(filter_value_script), 'users')
ee27248ab61f90bdf9103cb37b9ced394c6924215153fc293afdbf9ac62f8127
import datetime import json from contextlib import contextmanager from django.contrib import admin from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.admin.views.autocomplete import AutocompleteJsonView from django.contrib.auth.models import Permission, User from django.contrib.contenttypes.models import ContentType from django.core.exceptions import PermissionDenied from django.http import Http404 from django.test import RequestFactory, override_settings from django.urls import reverse, reverse_lazy from .admin import AnswerAdmin, QuestionAdmin from .models import ( Answer, Author, Authorship, Bonus, Book, Employee, Manager, Parent, PKChild, Question, Toy, WorkHour, ) from .tests import AdminViewBasicTestCase PAGINATOR_SIZE = AutocompleteJsonView.paginate_by class AuthorAdmin(admin.ModelAdmin): ordering = ['id'] search_fields = ['id'] class AuthorshipInline(admin.TabularInline): model = Authorship autocomplete_fields = ['author'] class BookAdmin(admin.ModelAdmin): inlines = [AuthorshipInline] site = admin.AdminSite(name='autocomplete_admin') site.register(Question, QuestionAdmin) site.register(Answer, AnswerAdmin) site.register(Author, AuthorAdmin) site.register(Book, BookAdmin) site.register(Employee, search_fields=['name']) site.register(WorkHour, autocomplete_fields=['employee']) site.register(Manager, search_fields=['name']) site.register(Bonus, autocomplete_fields=['recipient']) site.register(PKChild, search_fields=['name']) site.register(Toy, autocomplete_fields=['child']) @contextmanager def model_admin(model, model_admin, admin_site=site): org_admin = admin_site._registry.get(model) if org_admin: admin_site.unregister(model) admin_site.register(model, model_admin) try: yield finally: if org_admin: admin_site._registry[model] = org_admin class AutocompleteJsonViewTests(AdminViewBasicTestCase): as_view_args = {'admin_site': site} opts = { 'app_label': Answer._meta.app_label, 'model_name': Answer._meta.model_name, 'field_name': 'question' } factory = RequestFactory() url = reverse_lazy('autocomplete_admin:autocomplete') @classmethod def setUpTestData(cls): cls.user = User.objects.create_user( username='user', password='secret', email='[email protected]', is_staff=True, ) super().setUpTestData() def test_success(self): q = Question.objects.create(question='Is this a question?') request = self.factory.get(self.url, {'term': 'is', **self.opts}) request.user = self.superuser response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(data, { 'results': [{'id': str(q.pk), 'text': q.question}], 'pagination': {'more': False}, }) def test_custom_to_field(self): q = Question.objects.create(question='Is this a question?') request = self.factory.get(self.url, {'term': 'is', **self.opts, 'field_name': 'question_with_to_field'}) request.user = self.superuser response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(data, { 'results': [{'id': str(q.uuid), 'text': q.question}], 'pagination': {'more': False}, }) def test_custom_to_field_permission_denied(self): Question.objects.create(question='Is this a question?') request = self.factory.get(self.url, {'term': 'is', **self.opts, 'field_name': 'question_with_to_field'}) request.user = self.user with self.assertRaises(PermissionDenied): AutocompleteJsonView.as_view(**self.as_view_args)(request) def test_custom_to_field_custom_pk(self): q = Question.objects.create(question='Is this a question?') opts = { 'app_label': Question._meta.app_label, 'model_name': Question._meta.model_name, 'field_name': 'related_questions', } request = self.factory.get(self.url, {'term': 'is', **opts}) request.user = self.superuser response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(data, { 'results': [{'id': str(q.big_id), 'text': q.question}], 'pagination': {'more': False}, }) def test_to_field_resolution_with_mti(self): """ to_field resolution should correctly resolve for target models using MTI. Tests for single and multi-level cases. """ tests = [ (Employee, WorkHour, 'employee'), (Manager, Bonus, 'recipient'), ] for Target, Remote, related_name in tests: with self.subTest(target_model=Target, remote_model=Remote, related_name=related_name): o = Target.objects.create(name="Frida Kahlo", gender=2, code="painter", alive=False) opts = { 'app_label': Remote._meta.app_label, 'model_name': Remote._meta.model_name, 'field_name': related_name, } request = self.factory.get(self.url, {'term': 'frida', **opts}) request.user = self.superuser response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(data, { 'results': [{'id': str(o.pk), 'text': o.name}], 'pagination': {'more': False}, }) def test_to_field_resolution_with_fk_pk(self): p = Parent.objects.create(name="Bertie") c = PKChild.objects.create(parent=p, name="Anna") opts = { 'app_label': Toy._meta.app_label, 'model_name': Toy._meta.model_name, 'field_name': 'child', } request = self.factory.get(self.url, {'term': 'anna', **opts}) request.user = self.superuser response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(data, { 'results': [{'id': str(c.pk), 'text': c.name}], 'pagination': {'more': False}, }) def test_field_does_not_exist(self): request = self.factory.get(self.url, {'term': 'is', **self.opts, 'field_name': 'does_not_exist'}) request.user = self.superuser with self.assertRaises(PermissionDenied): AutocompleteJsonView.as_view(**self.as_view_args)(request) def test_field_no_related_field(self): request = self.factory.get(self.url, {'term': 'is', **self.opts, 'field_name': 'answer'}) request.user = self.superuser with self.assertRaises(PermissionDenied): AutocompleteJsonView.as_view(**self.as_view_args)(request) def test_field_does_not_allowed(self): request = self.factory.get(self.url, {'term': 'is', **self.opts, 'field_name': 'related_questions'}) request.user = self.superuser with self.assertRaises(PermissionDenied): AutocompleteJsonView.as_view(**self.as_view_args)(request) def test_limit_choices_to(self): # Answer.question_with_to_field defines limit_choices_to to "those not # starting with 'not'". q = Question.objects.create(question='Is this a question?') Question.objects.create(question='Not a question.') request = self.factory.get(self.url, {'term': 'is', **self.opts, 'field_name': 'question_with_to_field'}) request.user = self.superuser response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(data, { 'results': [{'id': str(q.uuid), 'text': q.question}], 'pagination': {'more': False}, }) def test_must_be_logged_in(self): response = self.client.get(self.url, {'term': '', **self.opts}) self.assertEqual(response.status_code, 200) self.client.logout() response = self.client.get(self.url, {'term': '', **self.opts}) self.assertEqual(response.status_code, 302) def test_has_view_or_change_permission_required(self): """ Users require the change permission for the related model to the autocomplete view for it. """ request = self.factory.get(self.url, {'term': 'is', **self.opts}) request.user = self.user with self.assertRaises(PermissionDenied): AutocompleteJsonView.as_view(**self.as_view_args)(request) for permission in ('view', 'change'): with self.subTest(permission=permission): self.user.user_permissions.clear() p = Permission.objects.get( content_type=ContentType.objects.get_for_model(Question), codename='%s_question' % permission, ) self.user.user_permissions.add(p) request.user = User.objects.get(pk=self.user.pk) response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) def test_search_use_distinct(self): """ Searching across model relations use QuerySet.distinct() to avoid duplicates. """ q1 = Question.objects.create(question='question 1') q2 = Question.objects.create(question='question 2') q2.related_questions.add(q1) q3 = Question.objects.create(question='question 3') q3.related_questions.add(q1) request = self.factory.get(self.url, {'term': 'question', **self.opts}) request.user = self.superuser class DistinctQuestionAdmin(QuestionAdmin): search_fields = ['related_questions__question', 'question'] with model_admin(Question, DistinctQuestionAdmin): response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(len(data['results']), 3) def test_missing_search_fields(self): class EmptySearchAdmin(QuestionAdmin): search_fields = [] with model_admin(Question, EmptySearchAdmin): msg = 'EmptySearchAdmin must have search_fields for the autocomplete_view.' with self.assertRaisesMessage(Http404, msg): site.autocomplete_view(self.factory.get(self.url, {'term': '', **self.opts})) def test_get_paginator(self): """Search results are paginated.""" class PKOrderingQuestionAdmin(QuestionAdmin): ordering = ['pk'] Question.objects.bulk_create(Question(question=str(i)) for i in range(PAGINATOR_SIZE + 10)) # The first page of results. request = self.factory.get(self.url, {'term': '', **self.opts}) request.user = self.superuser with model_admin(Question, PKOrderingQuestionAdmin): response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(data, { 'results': [{'id': str(q.pk), 'text': q.question} for q in Question.objects.all()[:PAGINATOR_SIZE]], 'pagination': {'more': True}, }) # The second page of results. request = self.factory.get(self.url, {'term': '', 'page': '2', **self.opts}) request.user = self.superuser with model_admin(Question, PKOrderingQuestionAdmin): response = AutocompleteJsonView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(data, { 'results': [{'id': str(q.pk), 'text': q.question} for q in Question.objects.all()[PAGINATOR_SIZE:]], 'pagination': {'more': False}, }) def test_serialize_result(self): class AutocompleteJsonSerializeResultView(AutocompleteJsonView): def serialize_result(self, obj, to_field_name): return { **super().serialize_result(obj, to_field_name), 'posted': str(obj.posted), } Question.objects.create(question='Question 1', posted=datetime.date(2021, 8, 9)) Question.objects.create(question='Question 2', posted=datetime.date(2021, 8, 7)) request = self.factory.get(self.url, {'term': 'question', **self.opts}) request.user = self.superuser response = AutocompleteJsonSerializeResultView.as_view(**self.as_view_args)(request) self.assertEqual(response.status_code, 200) data = json.loads(response.content.decode('utf-8')) self.assertEqual(data, { 'results': [ {'id': str(q.pk), 'text': q.question, 'posted': str(q.posted)} for q in Question.objects.order_by('-posted') ], 'pagination': {'more': False}, }) @override_settings(ROOT_URLCONF='admin_views.urls') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps def setUp(self): self.superuser = User.objects.create_superuser( username='super', password='secret', email='[email protected]', ) self.admin_login(username='super', password='secret', login_url=reverse('autocomplete_admin:index')) @contextmanager def select2_ajax_wait(self, timeout=10): from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as ec yield with self.disable_implicit_wait(): try: loading_element = self.selenium.find_element( By.CSS_SELECTOR, 'li.select2-results__option.loading-results' ) except NoSuchElementException: pass else: self.wait_until(ec.staleness_of(loading_element), timeout=timeout) def test_select(self): from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import Select self.selenium.get(self.live_server_url + reverse('autocomplete_admin:admin_views_answer_add')) elem = self.selenium.find_element(By.CSS_SELECTOR, '.select2-selection') elem.click() # Open the autocomplete dropdown. results = self.selenium.find_element(By.CSS_SELECTOR, '.select2-results') self.assertTrue(results.is_displayed()) option = self.selenium.find_element(By.CSS_SELECTOR, '.select2-results__option') self.assertEqual(option.text, 'No results found') elem.click() # Close the autocomplete dropdown. q1 = Question.objects.create(question='Who am I?') Question.objects.bulk_create(Question(question=str(i)) for i in range(PAGINATOR_SIZE + 10)) elem.click() # Reopen the dropdown now that some objects exist. result_container = self.selenium.find_element(By.CSS_SELECTOR, '.select2-results') self.assertTrue(result_container.is_displayed()) # PAGINATOR_SIZE results and "Loading more results". self.assertCountSeleniumElements('.select2-results__option', PAGINATOR_SIZE + 1, root_element=result_container) search = self.selenium.find_element(By.CSS_SELECTOR, '.select2-search__field') # Load next page of results by scrolling to the bottom of the list. with self.select2_ajax_wait(): for _ in range(PAGINATOR_SIZE + 1): search.send_keys(Keys.ARROW_DOWN) # All objects are now loaded. self.assertCountSeleniumElements( '.select2-results__option', PAGINATOR_SIZE + 11, root_element=result_container, ) # Limit the results with the search field. with self.select2_ajax_wait(): search.send_keys('Who') # Ajax request is delayed. self.assertTrue(result_container.is_displayed()) self.assertCountSeleniumElements( '.select2-results__option', PAGINATOR_SIZE + 12, root_element=result_container, ) self.assertTrue(result_container.is_displayed()) self.assertCountSeleniumElements('.select2-results__option', 1, root_element=result_container) # Select the result. search.send_keys(Keys.RETURN) select = Select(self.selenium.find_element(By.ID, 'id_question')) self.assertEqual(select.first_selected_option.get_attribute('value'), str(q1.pk)) def test_select_multiple(self): from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import Select self.selenium.get(self.live_server_url + reverse('autocomplete_admin:admin_views_question_add')) elem = self.selenium.find_element(By.CSS_SELECTOR, '.select2-selection') elem.click() # Open the autocomplete dropdown. results = self.selenium.find_element(By.CSS_SELECTOR, '.select2-results') self.assertTrue(results.is_displayed()) option = self.selenium.find_element(By.CSS_SELECTOR, '.select2-results__option') self.assertEqual(option.text, 'No results found') elem.click() # Close the autocomplete dropdown. Question.objects.create(question='Who am I?') Question.objects.bulk_create(Question(question=str(i)) for i in range(PAGINATOR_SIZE + 10)) elem.click() # Reopen the dropdown now that some objects exist. result_container = self.selenium.find_element(By.CSS_SELECTOR, '.select2-results') self.assertTrue(result_container.is_displayed()) self.assertCountSeleniumElements('.select2-results__option', PAGINATOR_SIZE + 1, root_element=result_container) search = self.selenium.find_element(By.CSS_SELECTOR, '.select2-search__field') # Load next page of results by scrolling to the bottom of the list. with self.select2_ajax_wait(): for _ in range(PAGINATOR_SIZE + 1): search.send_keys(Keys.ARROW_DOWN) self.assertCountSeleniumElements('.select2-results__option', 31, root_element=result_container) # Limit the results with the search field. with self.select2_ajax_wait(): search.send_keys('Who') # Ajax request is delayed. self.assertTrue(result_container.is_displayed()) self.assertCountSeleniumElements('.select2-results__option', 32, root_element=result_container) self.assertTrue(result_container.is_displayed()) self.assertCountSeleniumElements('.select2-results__option', 1, root_element=result_container) # Select the result. search.send_keys(Keys.RETURN) # Reopen the dropdown and add the first result to the selection. elem.click() search.send_keys(Keys.ARROW_DOWN) search.send_keys(Keys.RETURN) select = Select(self.selenium.find_element(By.ID, 'id_related_questions')) self.assertEqual(len(select.all_selected_options), 2) def test_inline_add_another_widgets(self): from selenium.webdriver.common.by import By def assertNoResults(row): elem = row.find_element(By.CSS_SELECTOR, '.select2-selection') elem.click() # Open the autocomplete dropdown. results = self.selenium.find_element(By.CSS_SELECTOR, '.select2-results') self.assertTrue(results.is_displayed()) option = self.selenium.find_element(By.CSS_SELECTOR, '.select2-results__option') self.assertEqual(option.text, 'No results found') # Autocomplete works in rows present when the page loads. self.selenium.get(self.live_server_url + reverse('autocomplete_admin:admin_views_book_add')) rows = self.selenium.find_elements(By.CSS_SELECTOR, '.dynamic-authorship_set') self.assertEqual(len(rows), 3) assertNoResults(rows[0]) # Autocomplete works in rows added using the "Add another" button. self.selenium.find_element(By.LINK_TEXT, 'Add another Authorship').click() rows = self.selenium.find_elements(By.CSS_SELECTOR, '.dynamic-authorship_set') self.assertEqual(len(rows), 4) assertNoResults(rows[-1])
d617235be0bec395c301888ca3a7a45dcb7424ddf010fe4409d988ac4b32a9f7
import datetime import pickle import django from django.db import models from django.test import TestCase from .models import ( BinaryFieldModel, Container, Event, Group, Happening, M2MModel, MyEvent, ) class PickleabilityTestCase(TestCase): @classmethod def setUpTestData(cls): cls.happening = Happening.objects.create() # make sure the defaults are working (#20158) def assert_pickles(self, qs): self.assertEqual(list(pickle.loads(pickle.dumps(qs))), list(qs)) def test_binaryfield(self): BinaryFieldModel.objects.create(data=b'binary data') self.assert_pickles(BinaryFieldModel.objects.all()) def test_related_field(self): g = Group.objects.create(name="Ponies Who Own Maybachs") self.assert_pickles(Event.objects.filter(group=g.id)) def test_datetime_callable_default_all(self): self.assert_pickles(Happening.objects.all()) def test_datetime_callable_default_filter(self): self.assert_pickles(Happening.objects.filter(when=datetime.datetime.now())) def test_string_as_default(self): self.assert_pickles(Happening.objects.filter(name="test")) def test_standalone_method_as_default(self): self.assert_pickles(Happening.objects.filter(number1=1)) def test_staticmethod_as_default(self): self.assert_pickles(Happening.objects.filter(number2=1)) def test_filter_reverse_fk(self): self.assert_pickles(Group.objects.filter(event=1)) def test_doesnotexist_exception(self): # Ticket #17776 original = Event.DoesNotExist("Doesn't exist") unpickled = pickle.loads(pickle.dumps(original)) # Exceptions are not equal to equivalent instances of themselves, so # can't just use assertEqual(original, unpickled) self.assertEqual(original.__class__, unpickled.__class__) self.assertEqual(original.args, unpickled.args) def test_doesnotexist_class(self): klass = Event.DoesNotExist self.assertIs(pickle.loads(pickle.dumps(klass)), klass) def test_multipleobjectsreturned_class(self): klass = Event.MultipleObjectsReturned self.assertIs(pickle.loads(pickle.dumps(klass)), klass) def test_forward_relatedobjectdoesnotexist_class(self): # ForwardManyToOneDescriptor klass = Event.group.RelatedObjectDoesNotExist self.assertIs(pickle.loads(pickle.dumps(klass)), klass) # ForwardOneToOneDescriptor klass = Happening.event.RelatedObjectDoesNotExist self.assertIs(pickle.loads(pickle.dumps(klass)), klass) def test_reverse_one_to_one_relatedobjectdoesnotexist_class(self): klass = Event.happening.RelatedObjectDoesNotExist self.assertIs(pickle.loads(pickle.dumps(klass)), klass) def test_manager_pickle(self): pickle.loads(pickle.dumps(Happening.objects)) def test_model_pickle(self): """ A model not defined on module level is picklable. """ original = Container.SomeModel(pk=1) dumped = pickle.dumps(original) reloaded = pickle.loads(dumped) self.assertEqual(original, reloaded) # Also, deferred dynamic model works Container.SomeModel.objects.create(somefield=1) original = Container.SomeModel.objects.defer('somefield')[0] dumped = pickle.dumps(original) reloaded = pickle.loads(dumped) self.assertEqual(original, reloaded) self.assertEqual(original.somefield, reloaded.somefield) def test_model_pickle_m2m(self): """ Test intentionally the automatically created through model. """ m1 = M2MModel.objects.create() g1 = Group.objects.create(name='foof') m1.groups.add(g1) m2m_through = M2MModel._meta.get_field('groups').remote_field.through original = m2m_through.objects.get() dumped = pickle.dumps(original) reloaded = pickle.loads(dumped) self.assertEqual(original, reloaded) def test_model_pickle_dynamic(self): class Meta: proxy = True dynclass = type("DynamicEventSubclass", (Event,), {'Meta': Meta, '__module__': Event.__module__}) original = dynclass(pk=1) dumped = pickle.dumps(original) reloaded = pickle.loads(dumped) self.assertEqual(original, reloaded) self.assertIs(reloaded.__class__, dynclass) def test_specialized_queryset(self): self.assert_pickles(Happening.objects.values('name')) self.assert_pickles(Happening.objects.values('name').dates('when', 'year')) # With related field (#14515) self.assert_pickles( Event.objects.select_related('group').order_by('title').values_list('title', 'group__name') ) def test_pickle_prefetch_related_idempotence(self): g = Group.objects.create(name='foo') groups = Group.objects.prefetch_related('event_set') # First pickling groups = pickle.loads(pickle.dumps(groups)) self.assertSequenceEqual(groups, [g]) # Second pickling groups = pickle.loads(pickle.dumps(groups)) self.assertSequenceEqual(groups, [g]) def test_pickle_prefetch_queryset_usable_outside_of_prefetch(self): # Prefetch shouldn't affect the fetch-on-pickle behavior of the # queryset passed to it. Group.objects.create(name='foo') events = Event.objects.order_by('id') Group.objects.prefetch_related(models.Prefetch('event_set', queryset=events)) with self.assertNumQueries(1): events2 = pickle.loads(pickle.dumps(events)) with self.assertNumQueries(0): list(events2) def test_pickle_prefetch_queryset_still_usable(self): g = Group.objects.create(name='foo') groups = Group.objects.prefetch_related( models.Prefetch('event_set', queryset=Event.objects.order_by('id')) ) groups2 = pickle.loads(pickle.dumps(groups)) self.assertSequenceEqual(groups2.filter(id__gte=0), [g]) def test_pickle_prefetch_queryset_not_evaluated(self): Group.objects.create(name='foo') groups = Group.objects.prefetch_related( models.Prefetch('event_set', queryset=Event.objects.order_by('id')) ) list(groups) # evaluate QuerySet with self.assertNumQueries(0): pickle.loads(pickle.dumps(groups)) def test_pickle_prefetch_related_with_m2m_and_objects_deletion(self): """ #24831 -- Cached properties on ManyToOneRel created in QuerySet.delete() caused subsequent QuerySet pickling to fail. """ g = Group.objects.create(name='foo') m2m = M2MModel.objects.create() m2m.groups.add(g) Group.objects.all().delete() m2ms = M2MModel.objects.prefetch_related('groups') m2ms = pickle.loads(pickle.dumps(m2ms)) self.assertSequenceEqual(m2ms, [m2m]) def test_pickle_boolean_expression_in_Q__queryset(self): group = Group.objects.create(name='group') Event.objects.create(title='event', group=group) groups = Group.objects.filter( models.Q(models.Exists( Event.objects.filter(group_id=models.OuterRef('id')), )), ) groups2 = pickle.loads(pickle.dumps(groups)) self.assertSequenceEqual(groups2, [group]) def test_pickle_exists_queryset_still_usable(self): group = Group.objects.create(name='group') Event.objects.create(title='event', group=group) groups = Group.objects.annotate( has_event=models.Exists( Event.objects.filter(group_id=models.OuterRef('id')), ), ) groups2 = pickle.loads(pickle.dumps(groups)) self.assertSequenceEqual(groups2.filter(has_event=True), [group]) def test_pickle_exists_queryset_not_evaluated(self): group = Group.objects.create(name='group') Event.objects.create(title='event', group=group) groups = Group.objects.annotate( has_event=models.Exists( Event.objects.filter(group_id=models.OuterRef('id')), ), ) list(groups) # evaluate QuerySet. with self.assertNumQueries(0): self.assert_pickles(groups) def test_pickle_exists_kwargs_queryset_not_evaluated(self): group = Group.objects.create(name='group') Event.objects.create(title='event', group=group) groups = Group.objects.annotate( has_event=models.Exists( queryset=Event.objects.filter(group_id=models.OuterRef('id')), ), ) list(groups) # evaluate QuerySet. with self.assertNumQueries(0): self.assert_pickles(groups) def test_pickle_subquery_queryset_not_evaluated(self): group = Group.objects.create(name='group') Event.objects.create(title='event', group=group) groups = Group.objects.annotate( event_title=models.Subquery( Event.objects.filter(group_id=models.OuterRef('id')).values('title'), ), ) list(groups) # evaluate QuerySet. with self.assertNumQueries(0): self.assert_pickles(groups) def test_pickle_filteredrelation(self): group = Group.objects.create(name='group') event_1 = Event.objects.create(title='Big event', group=group) event_2 = Event.objects.create(title='Small event', group=group) Happening.objects.bulk_create([ Happening(event=event_1, number1=5), Happening(event=event_2, number1=3), ]) groups = Group.objects.annotate( big_events=models.FilteredRelation( 'event', condition=models.Q(event__title__startswith='Big'), ), ).annotate(sum_number=models.Sum('big_events__happening__number1')) groups_query = pickle.loads(pickle.dumps(groups.query)) groups = Group.objects.all() groups.query = groups_query self.assertEqual(groups.get().sum_number, 5) def test_pickle_filteredrelation_m2m(self): group = Group.objects.create(name='group') m2mmodel = M2MModel.objects.create(added=datetime.date(2020, 1, 1)) m2mmodel.groups.add(group) groups = Group.objects.annotate( first_m2mmodels=models.FilteredRelation( 'm2mmodel', condition=models.Q(m2mmodel__added__year=2020), ), ).annotate(count_groups=models.Count('first_m2mmodels__groups')) groups_query = pickle.loads(pickle.dumps(groups.query)) groups = Group.objects.all() groups.query = groups_query self.assertEqual(groups.get().count_groups, 1) def test_annotation_with_callable_default(self): # Happening.when has a callable default of datetime.datetime.now. qs = Happening.objects.annotate(latest_time=models.Max('when')) self.assert_pickles(qs) def test_annotation_values(self): qs = Happening.objects.values('name').annotate(latest_time=models.Max('when')) reloaded = Happening.objects.all() reloaded.query = pickle.loads(pickle.dumps(qs.query)) self.assertEqual( reloaded.get(), {'name': 'test', 'latest_time': self.happening.when}, ) def test_annotation_values_list(self): # values_list() is reloaded to values() when using a pickled query. tests = [ Happening.objects.values_list('name'), Happening.objects.values_list('name', flat=True), Happening.objects.values_list('name', named=True), ] for qs in tests: with self.subTest(qs._iterable_class.__name__): reloaded = Happening.objects.all() reloaded.query = pickle.loads(pickle.dumps(qs.query)) self.assertEqual(reloaded.get(), {'name': 'test'}) def test_filter_deferred(self): qs = Happening.objects.all() qs._defer_next_filter = True qs = qs.filter(id=0) self.assert_pickles(qs) def test_missing_django_version_unpickling(self): """ #21430 -- Verifies a warning is raised for querysets that are unpickled without a Django version """ qs = Group.missing_django_version_objects.all() msg = "Pickled queryset instance's Django version is not specified." with self.assertRaisesMessage(RuntimeWarning, msg): pickle.loads(pickle.dumps(qs)) def test_unsupported_unpickle(self): """ #21430 -- Verifies a warning is raised for querysets that are unpickled with a different Django version than the current """ qs = Group.previous_django_version_objects.all() msg = ( "Pickled queryset instance's Django version 1.0 does not match " "the current version %s." % django.__version__ ) with self.assertRaisesMessage(RuntimeWarning, msg): pickle.loads(pickle.dumps(qs)) def test_order_by_model_with_abstract_inheritance_and_meta_ordering(self): group = Group.objects.create(name='test') event = MyEvent.objects.create(title='test event', group=group) event.edition_set.create() self.assert_pickles(event.edition_set.order_by('event')) class InLookupTests(TestCase): @classmethod def setUpTestData(cls): for i in range(1, 3): group = Group.objects.create(name='Group {}'.format(i)) cls.e1 = Event.objects.create(title='Event 1', group=group) def test_in_lookup_queryset_evaluation(self): """ Neither pickling nor unpickling a QuerySet.query with an __in=inner_qs lookup should evaluate inner_qs. """ events = Event.objects.filter(group__in=Group.objects.all()) with self.assertNumQueries(0): dumped = pickle.dumps(events.query) with self.assertNumQueries(0): reloaded = pickle.loads(dumped) reloaded_events = Event.objects.none() reloaded_events.query = reloaded self.assertSequenceEqual(reloaded_events, [self.e1]) def test_in_lookup_query_evaluation(self): events = Event.objects.filter(group__in=Group.objects.values('id').query) with self.assertNumQueries(0): dumped = pickle.dumps(events.query) with self.assertNumQueries(0): reloaded = pickle.loads(dumped) reloaded_events = Event.objects.none() reloaded_events.query = reloaded self.assertSequenceEqual(reloaded_events, [self.e1])
8cb62cd624dd00015bb611a04d0758ae418d139cf0e7d4153108ac720abbbe81
import datetime from django.db import DJANGO_VERSION_PICKLE_KEY, models from django.utils.translation import gettext_lazy as _ def standalone_number(): return 1 class Numbers: @staticmethod def get_static_number(): return 2 class PreviousDjangoVersionQuerySet(models.QuerySet): def __getstate__(self): state = super().__getstate__() state[DJANGO_VERSION_PICKLE_KEY] = '1.0' return state class MissingDjangoVersionQuerySet(models.QuerySet): def __getstate__(self): state = super().__getstate__() del state[DJANGO_VERSION_PICKLE_KEY] return state class Group(models.Model): name = models.CharField(_('name'), max_length=100) objects = models.Manager() previous_django_version_objects = PreviousDjangoVersionQuerySet.as_manager() missing_django_version_objects = MissingDjangoVersionQuerySet.as_manager() class Event(models.Model): title = models.CharField(max_length=100) group = models.ForeignKey(Group, models.CASCADE, limit_choices_to=models.Q()) class Happening(models.Model): when = models.DateTimeField(blank=True, default=datetime.datetime.now) name = models.CharField(blank=True, max_length=100, default="test") number1 = models.IntegerField(blank=True, default=standalone_number) number2 = models.IntegerField(blank=True, default=Numbers.get_static_number) event = models.OneToOneField(Event, models.CASCADE, null=True) class BinaryFieldModel(models.Model): data = models.BinaryField(null=True) class Container: # To test pickling we need a class that isn't defined on module, but # is still available from app-cache. So, the Container class moves # SomeModel outside of module level class SomeModel(models.Model): somefield = models.IntegerField() class M2MModel(models.Model): added = models.DateField(default=datetime.date.today) groups = models.ManyToManyField(Group) class AbstractEvent(Event): class Meta: abstract = True ordering = ['title'] class MyEvent(AbstractEvent): pass class Edition(models.Model): event = models.ForeignKey('MyEvent', on_delete=models.CASCADE)